diff --git a/.gitignore b/.gitignore
index 34671d3..efebe60 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,10 +1,14 @@
+# frontend
**/*.bundle.js
**/*.js.map
**/coverage
**/files
**/node_modules/*
-**/dist
-**/vendor
**/yarn-error
-**/public/static/*/*.js
-**/public/static/**/*.js
\ No newline at end of file
+**/public/static/**/*.js
+**/public/index.html
+
+# backend
+**/*/quickshare.db
+**/*/files/
+**/*/uploadings/
\ No newline at end of file
diff --git a/.goreleaser.yml b/.goreleaser.yml
deleted file mode 100644
index fa9ad2d..0000000
--- a/.goreleaser.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-builds:
- -
- main: server.go
- env:
- - CGO_ENABLED=0
- binary: quickshare
- goos:
- - windows
- - darwin
- - linux
- goarch:
- - amd64
-archive:
- name_template: "quickshare_{{ .Tag }}_{{ .Os }}_{{ .Arch }}"
- format: zip
- wrap_in_directory: true
- replacements:
- darwin: macos
- linux: linux
- windows: windows
- amd64: x86_64
- files:
- - LICENSE
- - README.md
- - config.json
- - public/*
- - public/dist/*
- - docs/*
-checksum:
- name_template: 'checksums.txt'
-snapshot:
- name_template: "{{ .Tag }}-next"
-changelog:
- sort: asc
- filters:
- exclude:
- - '^docs:'
- - '^test:'
diff --git a/Gopkg.lock b/Gopkg.lock
deleted file mode 100644
index 0d53970..0000000
--- a/Gopkg.lock
+++ /dev/null
@@ -1,27 +0,0 @@
-# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
-
-
-[[projects]]
- name = "github.com/pkg/errors"
- packages = ["."]
- revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
- version = "v0.8.0"
-
-[[projects]]
- name = "github.com/robbert229/jwt"
- packages = ["."]
- revision = "81ddea8e91eecffef557c5e4ce8e78a1d472d7d7"
- version = "v2.0.0"
-
-[[projects]]
- branch = "master"
- name = "github.com/skratchdot/open-golang"
- packages = ["open"]
- revision = "75fb7ed4208cf72d323d7d02fd1a5964a7a9073c"
-
-[solve-meta]
- analyzer-name = "dep"
- analyzer-version = 1
- inputs-digest = "6452415dc9b42f64749516a240d3050c2d22527deeabbe1d438d6974345cd042"
- solver-name = "gps-cdcl"
- solver-version = 1
diff --git a/Gopkg.toml b/Gopkg.toml
deleted file mode 100644
index 979f33a..0000000
--- a/Gopkg.toml
+++ /dev/null
@@ -1,34 +0,0 @@
-# Gopkg.toml example
-#
-# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
-# for detailed Gopkg.toml documentation.
-#
-# required = ["github.com/user/thing/cmd/thing"]
-# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
-#
-# [[constraint]]
-# name = "github.com/user/project"
-# version = "1.0.0"
-#
-# [[constraint]]
-# name = "github.com/user/project2"
-# branch = "dev"
-# source = "github.com/myfork/project2"
-#
-# [[override]]
-# name = "github.com/x/y"
-# version = "2.4.0"
-#
-# [prune]
-# non-go = false
-# go-tests = true
-# unused-packages = true
-
-
-[prune]
- go-tests = true
- unused-packages = true
-
-[[constraint]]
- name = "github.com/robbert229/jwt"
- version = "2.0.0"
diff --git a/README.md b/README.md
index a54ff3c..e278e00 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,12 @@
- Quickshare
+ [WORKING IN PROGRESS!!!] Quickshare
- A succinct file sharing server
+ Simple file sharing server built with Go/Golang, Typescript, Gin, React, Boltdb, etc.
-
+
diff --git a/client/.babelrc b/client/.babelrc
deleted file mode 100644
index de40a79..0000000
--- a/client/.babelrc
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "presets": ["env", "react", "stage-0", "stage-2"]
-}
diff --git a/client/components/composite/auth_pane.jsx b/client/components/composite/auth_pane.jsx
deleted file mode 100644
index 0ba99df..0000000
--- a/client/components/composite/auth_pane.jsx
+++ /dev/null
@@ -1,145 +0,0 @@
-import React from "react";
-import { Button } from "../control/button";
-import { Input } from "../control/input";
-
-import { config } from "../../config";
-import { getIcon } from "../display/icon";
-import { makePostBody } from "../../libs/utils";
-import { styleButtonLabel } from "./info_bar";
-
-export const classLogin = "auth-pane-login";
-export const classLogout = "auth-pane-logout";
-const IconSignIn = getIcon("signIn");
-const IconSignOut = getIcon("signOut");
-const IconAngRight = getIcon("angRight");
-
-export class AuthPane extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = {
- adminId: "",
- adminPwd: ""
- };
- }
-
- onLogin = e => {
- e.preventDefault();
- this.props.onLogin(
- this.props.serverAddr,
- this.state.adminId,
- this.state.adminPwd
- );
- };
-
- onLogout = () => {
- this.props.onLogout(this.props.serverAddr);
- };
-
- onChangeAdminId = adminId => {
- this.setState({ adminId });
- };
-
- onChangeAdminPwd = adminPwd => {
- this.setState({ adminPwd });
- };
-
- render() {
- if (this.props.isLogin) {
- return (
-
- }
- label={"Logout"}
- styleLabel={styleButtonLabel}
- styleDefault={{ color: "#666" }}
- styleContainer={{ backgroundColor: "#ccc" }}
- />
-
- );
- } else {
- if (this.props.compact) {
- return (
-
- );
- } else {
- return (
-
- );
- }
- }
- }
-}
-
-AuthPane.defaultProps = {
- onLogin: () => console.error("undefined"),
- onLogout: () => console.error("undefined"),
- compact: false,
- isLogin: false,
- serverAddr: "",
- styleContainer: {},
- styleStr: ""
-};
diff --git a/client/components/composite/file_box.jsx b/client/components/composite/file_box.jsx
deleted file mode 100644
index a19f513..0000000
--- a/client/components/composite/file_box.jsx
+++ /dev/null
@@ -1,249 +0,0 @@
-import React from "react";
-import { FileBoxDetail } from "./file_box_detail";
-import { Button } from "../control/button";
-
-import { config } from "../../config";
-import { getIcon, getIconColor } from "../display/icon";
-import { getFileExt } from "../../libs/file_type";
-import { del, publishId, shadowId, setDownLimit } from "../../libs/api_share";
-
-const msgUploadOk = "Uploading is stopped and file is deleted";
-const msgUploadNok = "Fail to delete file";
-
-const styleLeft = {
- float: "left",
- padding: "1rem 0 1rem 1rem"
-};
-
-const styleRight = {
- float: "right",
- textAlign: "right",
- padding: "0rem"
-};
-
-const clear = ;
-
-const iconDesStyle = {
- display: "inline-block",
- fontSize: "0.875rem",
- lineHeight: "1rem",
- marginBottom: "0.25rem",
- maxWidth: "12rem",
- overflow: "hidden",
- textOverflow: "ellipsis",
- textDecoration: "none",
- verticalAlign: "middle",
- whiteSpace: "nowrap"
-};
-
-const descStyle = {
- fontSize: "0.75rem",
- padding: "0.75rem"
-};
-
-const otherStyle = `
-.main-pane {
- background-color: rgba(255, 255, 255, 1);
- transition: background-color 0.1s;
-}
-.main-pane:hover {
- background-color: rgba(255, 255, 255, 0.85);
- transition: background-color 0.1s;
-}
-
-.show-detail {
- opacity: 1;
- height: auto;
- transition: opacity 0.15s, height 0.5s;
-}
-
-.hide-detail {
- opacity: 0;
- height: 0;
- overflow: hidden;
- transition: opacity 0.15s, height 0.5s;
-}
-
-.main-pane a {
- color: #333;
- transition: color 1s;
-}
-.main-pane a:hover {
- color: #3498db;
- transition: color 1s;
-}
-`;
-
-const IconMore = getIcon("bars");
-const IconTimesCir = getIcon("timesCir");
-const styleIconTimesCir = {
- color: getIconColor("timesCir")
-};
-
-const iconMoreStyleStr = `
-.file-box-more {
- color: #333;
- background-color: #fff;
- transition: color 0.4s, background-color 0.4s;
-}
-
-.file-box-more:hover {
- color: #000;
- background-color: #ccc;
- transition: color 0.4s, background-color 0.4s;
-}
-`;
-
-let styleFileBox = {
- textAlign: "left",
- margin: "1px 0px",
- fontSize: "0.75rem"
-};
-
-const styleButtonContainer = {
- width: "1rem",
- height: "1rem",
- padding: "1.5rem 1rem"
-};
-
-const styleButtonIcon = {
- lineHeight: "1rem",
- height: "1rem",
- margin: "0"
-};
-
-export class FileBox extends React.PureComponent {
- constructor(props) {
- super(props);
- }
-
- onToggleDetail = () => {
- this.props.onToggleDetail(this.props.id);
- };
-
- onDelete = () => {
- del(this.props.id).then(ok => {
- if (ok) {
- this.props.onOk(msgUploadOk);
- this.props.onRefresh();
- } else {
- this.props.onError(msgUploadNok);
- }
- });
- };
-
- render() {
- const ext = getFileExt(this.props.name);
- const IconFile = getIcon(ext);
- const IconSpinner = getIcon("spinner");
-
- const styleIcon = {
- color: this.props.isLoading ? "#34495e" : getIconColor(ext)
- };
-
- styleFileBox = {
- ...styleFileBox,
- width: this.props.width
- };
-
- const fileIcon = this.props.isLoading ? (
-
- ) : (
-
- );
-
- const opIcon = this.props.isLoading ? (
- }
- label=""
- styleContainer={styleButtonContainer}
- styleIcon={styleButtonIcon}
- onClick={this.onDelete}
- />
- ) : (
- }
- className={"file-box-more"}
- label=""
- styleContainer={styleButtonContainer}
- styleIcon={styleButtonIcon}
- styleStr={iconMoreStyleStr}
- onClick={this.onToggleDetail}
- />
- );
-
- const downloadLink = (
-
- {this.props.name}
-
- );
-
- const classDetailPane =
- this.props.showDetailId === this.props.id &&
- this.props.uploadState === "done"
- ? "show-detail"
- : "hide-detail";
-
- return (
-
-
-
{fileIcon}
-
- {downloadLink}
-
{`${this.props.size} ${this.props.modTime}`}
-
-
{opIcon}
- {clear}
-
-
-
-
-
-
-
- );
- }
-}
-
-FileBox.defaultProps = {
- id: "",
- name: "",
- isLoading: false,
- modTime: "unknown",
- uploadState: "",
- href: "",
- width: "320px",
- showDetailId: "",
- downLimit: -3,
- size: "unknown",
- onToggleDetail: () => console.error("undefined"),
- onRefresh: () => console.error("undefined"),
- onError: () => console.error("undefined"),
- onOk: () => console.error("undefined")
-};
diff --git a/client/components/composite/file_box_detail.jsx b/client/components/composite/file_box_detail.jsx
deleted file mode 100644
index 76a95ee..0000000
--- a/client/components/composite/file_box_detail.jsx
+++ /dev/null
@@ -1,254 +0,0 @@
-import React from "react";
-import { CopyToClipboard } from "react-copy-to-clipboard";
-import { Button } from "../control/button";
-import { Input } from "../control/input";
-
-export const classDelBtn = "file-box-pane-btn-del";
-export const classDelYes = "del-no";
-export const classDelNo = "del-yes";
-const msgUrlCopied = "URL is copied.";
-
-let styleDetailPane = {
- color: "#666",
- backgroundColor: "#fff",
- position: "absolute",
- marginBottom: "5rem",
- zIndex: "10"
-};
-
-const styleDetailContainer = {
- padding: "1em",
- borderBottom: "solid 1rem #ccc"
-};
-
-const styleDetailHeader = {
- color: "#999",
- fontSize: "0.75rem",
- fontWeight: "bold",
- margin: "1.5rem 0 0.5rem 0",
- padding: 0,
- textTransform: "uppercase"
-};
-
-const styleDesc = {
- overflow: "hidden",
- whiteSpace: "nowrap",
- textOverflow: "ellipsis",
- lineHeight: "1.5rem",
- fontSize: "0.875rem"
-};
-
-export class FileBoxDetail extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = {
- downLimit: this.props.downLimit,
- showDelComfirm: false
- };
-
- styleDetailPane = {
- ...styleDetailPane,
- width: this.props.width
- };
- }
-
- onResetLink = () => {
- return this.props.onPublishId(this.props.id).then(resettedId => {
- if (resettedId == null) {
- this.props.onError("Resetting link failed");
- } else {
- this.props.onOk("Link is reset");
- this.props.onRefresh();
- }
- });
- };
-
- onShadowLink = () => {
- return this.props.onShadowId(this.props.id).then(shadowId => {
- if (shadowId == null) {
- this.props.onError("Shadowing link failed");
- } else {
- this.props.onOk("Link is shadowed");
- this.props.onRefresh();
- }
- });
- };
-
- onSetDownLimit = newValue => {
- this.setState({ downLimit: newValue });
- };
-
- onComfirmDel = () => {
- this.setState({ showDelComfirm: true });
- };
-
- onCancelDel = () => {
- this.setState({ showDelComfirm: false });
- };
-
- onUpdateDownLimit = () => {
- return this.props
- .onSetDownLimit(this.props.id, this.state.downLimit)
- .then(ok => {
- if (ok) {
- this.props.onOk("Download limit updated");
- this.props.onRefresh();
- } else {
- this.props.onError("Setting download limit failed");
- }
- });
- };
-
- onDelete = () => {
- return this.props.onDel(this.props.id).then(ok => {
- if (ok) {
- this.props.onOk("File deleted");
- this.props.onRefresh();
- } else {
- this.props.onError("Fail to delete file");
- }
- });
- };
-
- onCopy = () => {
- this.props.onOk(msgUrlCopied);
- };
-
- render() {
- const delComfirmButtons = (
-
-
-
-
- );
-
- const downloadURL = `${window.location.protocol}//${window.location.host}${
- this.props.href
- }`;
-
- return (
-
-
-
-
File Information
-
-
- Name {this.props.name}
-
-
- Size {this.props.size}
-
-
- Time {this.props.modTime}
-
-
-
-
-
Download Link
-
- {/* */}
-
-
-
-
-
-
-
-
-
- Download Limit (-1 means unlimited)
-
-
-
-
-
-
-
Delete
- {this.state.showDelComfirm ? (
- delComfirmButtons
- ) : (
-
- )}
-
-
-
- );
- }
-}
-
-FileBoxDetail.defaultProps = {
- id: "n/a",
- name: "n/a",
- size: "n/a",
- modTime: 0,
- href: "n/a",
- downLimit: -3,
- width: -1,
- className: "",
- onRefresh: () => console.error("undefined"),
- onError: () => console.error("undefined"),
- onOk: () => console.error("undefined"),
- onDel: () => console.error("undefined"),
- onPublishId: () => console.error("undefined"),
- onShadowId: () => console.error("undefined"),
- onSetDownLimit: () => console.error("undefined")
-};
diff --git a/client/components/composite/file_pane.jsx b/client/components/composite/file_pane.jsx
deleted file mode 100644
index 04cda00..0000000
--- a/client/components/composite/file_pane.jsx
+++ /dev/null
@@ -1,154 +0,0 @@
-import axios from "axios";
-import byteSize from "byte-size";
-import React from "react";
-import ReactDOM from "react-dom";
-import throttle from "lodash.throttle";
-import { Grids } from "../../components/layout/grids";
-import { Uploader } from "../../components/composite/uploader";
-import { FileBox } from "./file_box";
-import { TimeGrids } from "./time_grids";
-
-import { config } from "../../config";
-
-const msgSynced = "Synced";
-const msgSyncFailed = "Syncing failed";
-const interval = 250;
-
-export class FilePane extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = {
- infos: [],
- showDetailId: -1
- };
- this.onRefresh = throttle(this.onRefreshImp, interval);
- this.onUpdateProgress = throttle(this.onUpdateProgressImp, interval);
- }
-
- componentWillMount() {
- return this.onRefreshImp();
- }
-
- onRefreshImp = () => {
- return this.props
- .onList()
- .then(infos => {
- if (infos != null) {
- this.setState({ infos });
- this.props.onOk(msgSynced);
- } else {
- this.props.onError(msgSyncFailed);
- }
- })
- .catch(err => {
- console.error(err);
- this.props.onError(msgSyncFailed);
- });
- };
-
- onUpdateProgressImp = (shareId, progress) => {
- const updatedInfos = this.state.infos.map(shareInfo => {
- return shareInfo.Id === shareId ? { ...shareInfo, progress } : shareInfo;
- });
-
- this.setState({ infos: updatedInfos });
- };
-
- onToggleDetail = id => {
- this.setState({
- showDetailId: this.state.showDetailId === id ? -1 : id
- });
- };
-
- getByteSize = size => {
- const sizeObj = byteSize(size);
- return `${sizeObj.value} ${sizeObj.unit}`;
- };
-
- getInfos = filterName => {
- const filteredInfos = this.state.infos.filter(shareInfo => {
- return shareInfo.PathLocal.includes(filterName);
- });
-
- return filteredInfos.map(shareInfo => {
- const isLoading = shareInfo.State === "uploading";
- const timestamp = shareInfo.ModTime / 1000000;
- const modTime = new Date(timestamp).toLocaleString();
- const href = `${config.serverAddr}/download?shareid=${shareInfo.Id}`;
- const progress = isNaN(shareInfo.progress) ? 0 : shareInfo.progress;
- const name = isLoading
- ? `${Math.floor(progress * 100)}% ${shareInfo.PathLocal}`
- : shareInfo.PathLocal;
-
- return {
- key: shareInfo.Id,
- timestamp,
- component: (
-
- )
- };
- });
- };
-
- render() {
- const styleUploaderContainer = {
- width: `${this.props.colWidth}rem`,
- margin: "auto"
- };
-
- const containerStyle = {
- width: this.props.width,
- margin: "auto",
- marginTop: "0",
- marginBottom: "10rem"
- };
-
- return (
-
- );
- }
-}
-
-FilePane.defaultProps = {
- width: "100%",
- colWidth: 20,
- filterName: "",
- onList: () => console.error("undefined"),
- onOk: () => console.error("undefined"),
- onError: () => console.error("undefined")
-};
diff --git a/client/components/composite/info_bar.jsx b/client/components/composite/info_bar.jsx
deleted file mode 100644
index 17083f4..0000000
--- a/client/components/composite/info_bar.jsx
+++ /dev/null
@@ -1,250 +0,0 @@
-import React from "react";
-
-import { Button } from "../control/button";
-import { Input } from "../control/input";
-import { getIcon, getIconColor } from "../display/icon";
-import { AuthPane } from "./auth_pane";
-import { rootSize } from "../../config";
-
-let styleInfoBar = {
- textAlign: "left",
- color: "#999",
- marginBottom: "1rem",
- margin: "auto"
-};
-
-const styleContainer = {
- padding: "0.5rem",
- backgroundColor: "rgba(255, 255, 255, 0.5)"
-};
-
-const styleLeft = {
- float: "left",
- width: "50%",
- heigth: "2rem"
-};
-
-const styleRight = {
- float: "right",
- width: "50%",
- textAlign: "right",
- heigth: "2rem"
-};
-
-const styleButtonLabel = {
- verticalAlign: "middle"
-};
-
-const IconPlusCir = getIcon("pluscir");
-const IconSearch = getIcon("search");
-const clear = ;
-
-export class InfoBar extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = {
- filterFileName: "",
- fold: this.props.compact
- };
- }
-
- onLogin = (serverAddr, adminId, adminPwd) => {
- this.props.onLogin(serverAddr, adminId, adminPwd);
- };
-
- onLogout = serverAddr => {
- this.props.onLogout(serverAddr);
- };
-
- onSearch = value => {
- // TODO: need debounce
- this.props.onSearch(value);
- this.setState({ filterFileName: value });
- };
-
- onAddLocalFiles = () => {
- return this.props.onAddLocalFiles().then(ok => {
- if (ok) {
- // TODO: need to add refresh
- this.props.onOk("Local files are added, please refresh.");
- } else {
- this.props.onError("Fail to add local files");
- }
- });
- };
-
- onToggle = () => {
- this.setState({ fold: !this.state.fold });
- };
-
- render() {
- styleInfoBar = { ...styleInfoBar, width: this.props.width };
-
- if (this.props.compact) {
- const IconMore = getIcon("bars");
-
- const menuIcon = (
-
-
-
- }
- />
-
-
- }
- />
-
-
-
-
- );
- const menuList = !this.state.fold ? (
-
- ) : (
-
- );
-
- const menu = (
-
- {menuIcon}
- {menuList}
-
- );
- return (
-
- {this.props.isLogin ? (
- menu
- ) : (
-
- )}
-
{this.props.children}
-
- );
- }
-
- const visitorPane = (
-
- );
-
- const memberPane = (
-
-
-
- }
- />
-
- {clear}
-
- );
-
- return (
-
-
- {this.props.isLogin ? memberPane : visitorPane}
-
-
{this.props.children}
-
- );
- }
-}
-
-InfoBar.defaultProps = {
- compact: false,
- width: "-1",
- isLogin: false,
- serverAddr: "",
- onLogin: () => console.error("undefined"),
- onLogout: () => console.error("undefined"),
- onAddLocalFiles: () => console.error("undefined"),
- onSearch: () => console.error("undefined"),
- onOk: () => console.error("undefined"),
- onError: () => console.error("undefined")
-};
diff --git a/client/components/composite/log.jsx b/client/components/composite/log.jsx
deleted file mode 100644
index d91b5bc..0000000
--- a/client/components/composite/log.jsx
+++ /dev/null
@@ -1,214 +0,0 @@
-import React from "react";
-import { getIcon, getIconColor } from "../display/icon";
-
-const statusNull = "null";
-const statusInfo = "info";
-const statusWarn = "warn";
-const statusError = "error";
-const statusOk = "ok";
-const statusStart = "start";
-const statusEnd = "end";
-
-const IconInfo = getIcon("infoCir");
-const IconWarn = getIcon("exTri");
-const IconError = getIcon("timesCir");
-const IconOk = getIcon("checkCir");
-const IconStart = getIcon("refresh");
-
-const colorInfo = getIconColor("infoCir");
-const colorWarn = getIconColor("exTri");
-const colorError = getIconColor("timesCir");
-const colorOk = getIconColor("checkCir");
-const colorStart = getIconColor("refresh");
-
-const classFadeIn = "log-fade-in";
-const classHidden = "log-hidden";
-const styleStr = `
- .log .${classFadeIn} {
- opacity: 1;
- margin-left: 0.5rem;
- padding: 0.25rem 0.5rem;
- transition: opacity 0.3s, margin-left 0.3s, padding 0.3s;
- }
-
- .log .${classHidden} {
- opacity: 0;
- margin-left: 0rem;
- padding: 0;
- transition: opacity 0.3s, margin-left 0.3s, padding 0.3s;
- }
-
- .log a {
- color: #2980b9;
- transition: color 0.3s;
- text-decoration: none;
- }
-
- .log a:hover {
- color: #3498db;
- transition: color 0.3s;
- text-decoration: none;
- }
-`;
-
-const wait = 5000;
-const logSlotLen = 2;
-const getEmptyLog = () => ({
- className: classHidden,
- msg: "",
- status: statusNull
-});
-
-const getLogIcon = status => {
- switch (status) {
- case statusInfo:
- return (
-
- );
- case statusWarn:
- return (
-
- );
- case statusError:
- return (
-
- );
- case statusOk:
- return (
-
- );
- case statusStart:
- return (
-
- );
- case statusEnd:
- return (
-
- );
- default:
- return ;
- }
-};
-
-export class Log extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = {
- logs: Array(logSlotLen).fill(getEmptyLog())
- };
- this.id = 0;
- }
-
- genId = () => {
- return this.id++ % logSlotLen;
- };
-
- addLog = (status, msg) => {
- const id = this.genId();
- const nextLogs = [
- ...this.state.logs.slice(0, id),
- {
- className: classFadeIn,
- msg,
- status
- },
- ...this.state.logs.slice(id + 1)
- ];
-
- this.setState({ logs: nextLogs });
- this.delayClearLog(id);
- return id;
- };
-
- delayClearLog = idToDel => {
- setTimeout(this.clearLog, wait, idToDel);
- };
-
- clearLog = idToDel => {
- // TODO: there may be race condition here
- const nextLogs = [
- ...this.state.logs.slice(0, idToDel),
- getEmptyLog(),
- ...this.state.logs.slice(idToDel + 1)
- ];
- this.setState({ logs: nextLogs });
- };
-
- info = msg => {
- this.addLog(statusInfo, msg);
- };
-
- warn = msg => {
- this.addLog(statusWarn, msg);
- };
-
- error = msg => {
- this.addLog(statusError, msg);
- };
-
- ok = msg => {
- this.addLog(statusOk, msg);
- };
-
- start = msg => {
- const id = this.genId();
- const nextLogs = [
- ...this.state.logs.slice(0, id),
- {
- className: classFadeIn,
- msg,
- status: statusStart
- },
- ...this.state.logs.slice(id + 1)
- ];
-
- this.setState({ logs: nextLogs });
- return id;
- };
-
- end = (startId, msg) => {
- // remove start log
- this.clearLog(startId);
- this.addLog(statusEnd, msg);
- };
-
- render() {
- const logList = Object.keys(this.state.logs).map(logId => {
- return (
-
- {getLogIcon(this.state.logs[logId].status)}
- {this.state.logs[logId].msg}
-
- );
- });
-
- return (
-
- {logList}
-
-
- );
- }
-}
-
-Log.defaultProps = {
- style: {},
- styleLog: {}
-};
diff --git a/client/components/composite/tests/auth_pane.test.jsx b/client/components/composite/tests/auth_pane.test.jsx
deleted file mode 100644
index e8c00a2..0000000
--- a/client/components/composite/tests/auth_pane.test.jsx
+++ /dev/null
@@ -1,32 +0,0 @@
-import React from "react";
-import { AuthPane, classLogin, classLogout } from "../auth_pane";
-
-describe("AuthPane", () => {
- test("AuthPane should show login pane if isLogin === true, or show logout pane", () => {
- const tests = [
- {
- input: {
- onLogin: jest.fn,
- onLogout: jest.fn,
- isLogin: false,
- serverAddr: ""
- },
- output: classLogin
- },
- {
- input: {
- onLogin: jest.fn,
- onLogout: jest.fn,
- isLogin: true,
- serverAddr: ""
- },
- output: classLogout
- }
- ];
-
- tests.forEach(testCase => {
- const pane = new AuthPane(testCase.input);
- expect(pane.render().props.className).toBe(testCase.output);
- });
- });
-});
diff --git a/client/components/composite/tests/file_box_detail.test.jsx b/client/components/composite/tests/file_box_detail.test.jsx
deleted file mode 100644
index 4389082..0000000
--- a/client/components/composite/tests/file_box_detail.test.jsx
+++ /dev/null
@@ -1,297 +0,0 @@
-jest.mock("../../../libs/api_share");
-import React from "react";
-import { mount } from "enzyme";
-import { FileBoxDetail, classDelYes, classDelNo } from "../file_box_detail";
-import { execFuncs, getDesc, verifyCalls } from "../../../tests/test_helper";
-import valueEqual from "value-equal";
-import {
- del,
- publishId,
- shadowId,
- setDownLimit
-} from "../../../libs/api_share";
-
-describe("FileBoxDetail", () => {
- test("FileBoxDetail should show delete button by default, toggle using onComfirmDel and onCancelDel", () => {
- const box = mount();
- expect(box.instance().state.showDelComfirm).toBe(false);
- box.instance().onComfirmDel();
- expect(box.instance().state.showDelComfirm).toBe(true);
- box.instance().onCancelDel();
- expect(box.instance().state.showDelComfirm).toBe(false);
- });
-});
-
-describe("FileBoxDetail", () => {
- const tests = [
- {
- init: {
- id: "0",
- name: "filename",
- size: "1B",
- modTime: 0,
- href: "href",
- downLimit: -1
- },
- execs: [
- {
- func: "onSetDownLimit",
- args: [3]
- }
- ],
- state: {
- downLimit: 3,
- showDelComfirm: false
- }
- },
- {
- init: {
- id: "0",
- name: "filename",
- size: "1B",
- modTime: 0,
- href: "href",
- downLimit: -1
- },
- execs: [
- {
- func: "onComfirmDel",
- args: []
- }
- ],
- state: {
- downLimit: -1,
- showDelComfirm: true
- }
- },
- {
- init: {
- id: "0",
- name: "filename",
- size: "1B",
- modTime: 0,
- href: "href",
- downLimit: -1
- },
- execs: [
- {
- func: "onComfirmDel",
- args: []
- },
- {
- func: "onCancelDel",
- args: []
- }
- ],
- state: {
- downLimit: -1,
- showDelComfirm: false
- }
- },
- {
- init: {
- id: "0",
- name: "filename",
- size: "1B",
- modTime: 0,
- href: "href",
- downLimit: -1
- },
- execs: [
- {
- func: "onResetLink",
- args: []
- }
- ],
- state: {
- downLimit: -1,
- showDelComfirm: false
- },
- calls: [
- {
- func: "onPublishId",
- count: 1
- },
- {
- func: "onOk",
- count: 1
- },
- {
- func: "onRefresh",
- count: 1
- }
- ]
- },
- {
- init: {
- id: "0",
- name: "filename",
- size: "1B",
- modTime: 0,
- href: "href",
- downLimit: -1
- },
- execs: [
- {
- func: "onShadowLink",
- args: []
- }
- ],
- state: {
- downLimit: -1,
- showDelComfirm: false
- },
- calls: [
- {
- func: "onShadowId",
- count: 1
- },
- {
- func: "onOk",
- count: 1
- },
- {
- func: "onRefresh",
- count: 1
- }
- ]
- },
- {
- init: {
- id: "0",
- name: "filename",
- size: "1B",
- modTime: 0,
- href: "href",
- downLimit: -1
- },
- execs: [
- {
- func: "onUpdateDownLimit",
- args: []
- }
- ],
- state: {
- downLimit: -1,
- showDelComfirm: false
- },
- calls: [
- {
- func: "onSetDownLimit",
- count: 1
- },
- {
- func: "onOk",
- count: 1
- },
- {
- func: "onRefresh",
- count: 1
- }
- ]
- },
- {
- init: {
- id: "0",
- name: "filename",
- size: "1B",
- modTime: 0,
- href: "href",
- downLimit: -1
- },
- execs: [
- {
- func: "onDelete",
- args: []
- }
- ],
- state: {
- downLimit: -1,
- showDelComfirm: false
- },
- calls: [
- {
- func: "onDel",
- count: 1
- },
- {
- func: "onOk",
- count: 1
- },
- {
- func: "onRefresh",
- count: 1
- }
- ]
- }
- ];
-
- tests.forEach(testCase => {
- test(getDesc("FileBoxDetail", testCase), () => {
- const stubs = {
- onOk: jest.fn(),
- onError: jest.fn(),
- onRefresh: jest.fn(),
- onDel: jest.fn(),
- onPublishId: jest.fn(),
- onShadowId: jest.fn(),
- onSetDownLimit: jest.fn()
- };
-
- const stubWraps = {
- onDel: () => {
- stubs.onDel();
- return del();
- },
- onPublishId: () => {
- stubs.onPublishId();
- return publishId();
- },
- onShadowId: () => {
- stubs.onShadowId();
- return shadowId();
- },
- onSetDownLimit: () => {
- stubs.onSetDownLimit();
- return setDownLimit();
- }
- };
-
- return new Promise((resolve, reject) => {
- const pane = mount(
-
- );
-
- execFuncs(pane.instance(), testCase.execs).then(() => {
- pane.update();
- if (!valueEqual(pane.instance().state, testCase.state)) {
- return reject("FileBoxDetail: state not identical");
- }
-
- if (testCase.calls != null) {
- const err = verifyCalls(testCase.calls, stubs);
- if (err != null) {
- return reject("FileBoxDetail: state not identical");
- }
- }
-
- resolve();
- });
- });
- });
- });
-});
diff --git a/client/components/composite/tests/file_pane.test.jsx b/client/components/composite/tests/file_pane.test.jsx
deleted file mode 100644
index 90f0b75..0000000
--- a/client/components/composite/tests/file_pane.test.jsx
+++ /dev/null
@@ -1,144 +0,0 @@
-jest.mock("../../../libs/api_share");
-import React from "react";
-import { FilePane } from "../file_pane";
-import { mount } from "enzyme";
-import * as mockApiShare from "../../../libs/api_share";
-import { execFuncs, getDesc, verifyCalls } from "../../../tests/test_helper";
-import valueEqual from "value-equal";
-
-describe("FilePane", () => {
- const tests = [
- {
- init: {
- list: [{ Id: 0, PathLocal: "" }]
- },
- execs: [
- {
- func: "componentWillMount",
- args: []
- }
- ],
- state: {
- infos: [{ Id: 0, PathLocal: "" }],
- showDetailId: -1
- },
- calls: [
- {
- func: "onList",
- count: 2 // because componentWillMount will be callled twice
- },
- {
- func: "onOk",
- count: 2 // because componentWillMount will be callled twice
- }
- ]
- },
- {
- init: {
- list: [{ Id: 0, PathLocal: "" }, { Id: 1, PathLocal: "" }]
- },
- execs: [
- {
- func: "componentWillMount",
- args: []
- },
- {
- func: "onUpdateProgressImp",
- args: [0, "100%"]
- }
- ],
- state: {
- infos: [
- { Id: 0, PathLocal: "", progress: "100%" },
- { Id: 1, PathLocal: "" }
- ],
- showDetailId: -1
- }
- },
- {
- init: {
- list: []
- },
- execs: [
- {
- func: "componentWillMount",
- args: []
- },
- {
- func: "onToggleDetail",
- args: [0]
- }
- ],
- state: {
- infos: [],
- showDetailId: 0
- }
- },
- {
- init: {
- list: []
- },
- execs: [
- {
- func: "onToggleDetail",
- args: [0]
- },
- {
- func: "onToggleDetail",
- args: [0]
- }
- ],
- state: {
- infos: [],
- showDetailId: -1
- }
- }
- ];
-
- tests.forEach(testCase => {
- test(getDesc("FilePane", testCase), () => {
- // mock list()
- mockApiShare.__truncInfos();
- mockApiShare.__addInfos(testCase.init.list);
-
- const stubs = {
- onList: jest.fn(),
- onOk: jest.fn(),
- onError: jest.fn()
- };
-
- const stubWraps = {
- onListWrap: () => {
- stubs.onList();
- return mockApiShare.list();
- }
- };
-
- return new Promise((resolve, reject) => {
- const pane = mount(
-
- );
-
- execFuncs(pane.instance(), testCase.execs).then(() => {
- pane.update();
- if (!valueEqual(pane.instance().state, testCase.state)) {
- return reject("FilePane: state not identical");
- }
-
- if (testCase.calls != null) {
- const err = verifyCalls(testCase.calls, stubs);
- if (err != null) {
- return reject(err);
- }
- }
-
- resolve();
- });
- });
- });
- });
-});
diff --git a/client/components/composite/tests/info_bar.test.jsx b/client/components/composite/tests/info_bar.test.jsx
deleted file mode 100644
index 2c611fa..0000000
--- a/client/components/composite/tests/info_bar.test.jsx
+++ /dev/null
@@ -1,136 +0,0 @@
-jest.mock("../../../libs/api_share");
-jest.mock("../../../libs/api_auth");
-import React from "react";
-import { InfoBar } from "../info_bar";
-import { mount } from "enzyme";
-import * as mockApiShare from "../../../libs/api_share";
-import { execFuncs, getDesc, verifyCalls } from "../../../tests/test_helper";
-import valueEqual from "value-equal";
-
-describe("InfoBar", () => {
- const tests = [
- {
- execs: [
- {
- func: "onSearch",
- args: ["searchFileName"]
- }
- ],
- state: {
- filterFileName: "searchFileName",
- fold: false
- },
- calls: [
- {
- func: "onSearch",
- count: 1
- }
- ]
- },
- {
- execs: [
- {
- func: "onLogin",
- args: ["serverAddr", "adminId", "adminPwd"]
- }
- ],
- state: {
- filterFileName: "",
- fold: false
- },
- calls: [
- {
- func: "onLogin",
- count: 1
- }
- ]
- },
- {
- execs: [
- {
- func: "onLogout",
- args: ["serverAddr"]
- }
- ],
- state: {
- filterFileName: "",
- fold: false
- },
- calls: [
- {
- func: "onLogout",
- count: 1
- }
- ]
- },
- {
- execs: [
- {
- func: "onAddLocalFiles",
- args: []
- }
- ],
- state: {
- filterFileName: "",
- fold: false
- },
- calls: [
- {
- func: "onAddLocalFiles",
- count: 1
- }
- ]
- }
- ];
-
- tests.forEach(testCase => {
- test(getDesc("InfoBar", testCase), () => {
- const stubs = {
- onLogin: jest.fn(),
- onLogout: jest.fn(),
- onAddLocalFiles: jest.fn(),
- onSearch: jest.fn(),
- onOk: jest.fn(),
- onError: jest.fn()
- };
-
- const onAddLocalFilesWrap = () => {
- stubs.onAddLocalFiles();
- return Promise.resolve(true);
- };
-
- return new Promise((resolve, reject) => {
- const infoBar = mount(
-
- );
-
- execFuncs(infoBar.instance(), testCase.execs)
- .then(() => {
- infoBar.update();
-
- if (!valueEqual(infoBar.instance().state, testCase.state)) {
- return reject("state not identical");
- }
- if (testCase.calls != null) {
- const err = verifyCalls(testCase.calls, stubs);
- if (err !== null) {
- return reject(err);
- }
- }
- resolve();
- })
- .catch(err => console.error(err));
- });
- });
- });
-});
diff --git a/client/components/composite/tests/uploader.test.jsx b/client/components/composite/tests/uploader.test.jsx
deleted file mode 100644
index 3922490..0000000
--- a/client/components/composite/tests/uploader.test.jsx
+++ /dev/null
@@ -1,51 +0,0 @@
-import React from "react";
-import { mount } from "enzyme";
-import { checkQueueCycle, Uploader } from "../uploader";
-
-const testTimeout = 4000;
-
-describe("Uploader", () => {
- test(
- "Uploader will upload files in uploadQueue by interval",
- () => {
- // TODO: could be refactored using timer mocks
- // https://facebook.github.io/jest/docs/en/timer-mocks.html
- const tests = [
- {
- input: { target: { files: ["task1", "task2", "task3"] } },
- uploadCalled: 3
- }
- ];
-
- let promises = [];
-
- const uploader = mount();
- tests.forEach(testCase => {
- // mock
- const uploadSpy = jest.fn();
- const uploadStub = () => {
- uploadSpy();
- return Promise.resolve();
- };
- uploader.instance().upload = uploadStub;
- uploader.update();
-
- // upload and verify
- uploader.instance().onUpload(testCase.input);
- const wait = testCase.input.target.files.length * 1000 + 100;
- const promise = new Promise(resolve => {
- setTimeout(() => {
- expect(uploader.instance().state.uploadQueue.length).toBe(0);
- expect(uploadSpy.mock.calls.length).toBe(testCase.uploadCalled);
- resolve();
- }, wait);
- });
-
- promises = [...promises, promise];
- });
-
- return Promise.all(promises);
- },
- testTimeout
- );
-});
diff --git a/client/components/composite/time_grids.jsx b/client/components/composite/time_grids.jsx
deleted file mode 100644
index 7663ffb..0000000
--- a/client/components/composite/time_grids.jsx
+++ /dev/null
@@ -1,69 +0,0 @@
-import React from "react";
-import { config } from "../../config";
-import { Grids } from "../layout/grids";
-
-const styleTitle = {
- color: "#fff",
- backgroundColor: "rgba(0, 0, 0, 0.4)",
- display: "inline-block",
- padding: "0.5rem 1rem",
- fontSize: "1rem",
- margin: "2rem 0 0.5rem 0",
- lineHeight: "1rem",
- height: "1rem"
-};
-
-export class TimeGrids extends React.PureComponent {
- render() {
- const groups = new Map();
-
- this.props.items.forEach(item => {
- const date = new Date(item.timestamp);
- const key = `${date.getFullYear()}-${date.getMonth() +
- 1}-${date.getDate()}`;
-
- if (groups.has(key)) {
- groups.set(key, [...groups.get(key), item]);
- } else {
- groups.set(key, [item]);
- }
- });
-
- var timeGrids = [];
- groups.forEach((gridGroup, groupKey) => {
- const year = parseInt(groupKey.split("-")[0]);
- const month = parseInt(groupKey.split("-")[1]);
- const date = parseInt(groupKey.split("-")[2]);
-
- const sortedGroup = gridGroup.sort((item1, item2) => {
- return item2.timestamp - item1.timestamp;
- });
-
- timeGrids = [
- ...timeGrids,
-
- ];
- });
-
- const sortedGroups = timeGrids.sort((group1, group2) => {
- return group2.key - group1.key;
- });
- return {sortedGroups}
;
- }
-}
-
-TimeGrids.defaultProps = {
- items: [
- {
- key: "",
- timestamp: -1,
- component: no grid found
- }
- ],
- styleContainer: {}
-};
diff --git a/client/components/composite/uploader.jsx b/client/components/composite/uploader.jsx
deleted file mode 100644
index 5e393ec..0000000
--- a/client/components/composite/uploader.jsx
+++ /dev/null
@@ -1,215 +0,0 @@
-import React from "react";
-import ReactDOM from "react-dom";
-
-import { config } from "../../config";
-import { Button } from "../control/button";
-import { getIcon } from "../display/icon";
-import { FileUploader } from "../../libs/api_upload";
-
-const msgFileNotFound = "File not found";
-const msgFileUploadOk = "is uploaded";
-const msgChromeLink = "https://www.google.com/chrome/";
-const msgFirefoxLink = "https://www.mozilla.org/";
-
-export const checkQueueCycle = 1000;
-
-const IconPlus = getIcon("cirUp");
-const IconThiList = getIcon("thList");
-
-const styleContainer = {
- position: "fixed",
- bottom: "0.5rem",
- margin: "auto",
- zIndex: 1
-};
-
-const styleButtonContainer = {
- backgroundColor: "#2ecc71",
- width: "20rem",
- height: "auto",
- textAlign: "center"
-};
-
-const styleDefault = {
- color: "#fff"
-};
-
-const styleLabel = {
- display: "inline-block",
- verticalAlign: "middle",
- marginLeft: "0.5rem"
-};
-
-const styleUploadQueue = {
- backgroundColor: "#000",
- opacity: 0.85,
- color: "#fff",
- fontSize: "0.75rem",
- lineHeight: "1.25rem"
-};
-
-const styleUploadItem = {
- width: "18rem",
- overflow: "hidden",
- textOverflow: "ellipsis",
- whiteSpace: "nowrap",
- padding: "0.5rem 1rem"
-};
-
-const styleUnsupported = {
- backgroundColor: "#e74c3c",
- color: "#fff",
- overflow: "hidden",
- padding: "0.5rem 1rem",
- width: "18rem",
- textAlign: "center"
-};
-
-const styleStr = `
- a {
- color: white;
- margin: auto 0.5rem auto 0.5rem;
- }
-`;
-
-export class Uploader extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = {
- uploadQueue: [],
- uploadValue: ""
- };
-
- this.input = undefined;
- this.assignInput = input => {
- this.input = ReactDOM.findDOMNode(input);
- };
- }
-
- componentDidMount() {
- // will polling uploadQueue like a worker
- this.checkQueue();
- }
-
- checkQueue = () => {
- // TODO: using web worker to avoid lagging UI
- if (this.state.uploadQueue.length > 0) {
- this.upload(this.state.uploadQueue[0]).then(() => {
- this.setState({ uploadQueue: this.state.uploadQueue.slice(1) });
- setTimeout(this.checkQueue, checkQueueCycle);
- });
- } else {
- setTimeout(this.checkQueue, checkQueueCycle);
- }
- };
-
- upload = file => {
- const fileUploader = new FileUploader(
- this.onStart,
- this.onProgress,
- this.onFinish,
- this.onError
- );
-
- return fileUploader.uploadFile(file);
- };
-
- onStart = () => {
- this.props.onRefresh();
- };
-
- onProgress = (shareId, progress) => {
- this.props.onUpdateProgress(shareId, progress);
- };
-
- onFinish = () => {
- this.props.onRefresh();
- };
-
- onError = err => {
- this.props.onError(err);
- };
-
- onUpload = event => {
- if (event.target.files == null || event.target.files.length === 0) {
- this.props.onError(msgFileNotFound);
- this.setState({ uploadValue: "" });
- } else {
- this.setState({
- uploadQueue: [...this.state.uploadQueue, ...event.target.files],
- uploadValue: ""
- });
- }
- };
-
- onChooseFile = () => {
- this.input.click();
- };
-
- render() {
- if (
- window.FormData == null ||
- window.FileReader == null ||
- window.Blob == null
- ) {
- return (
-
- );
- }
-
- const hiddenInput = (
-
- );
-
- const uploadQueue = this.state.uploadQueue.map(file => {
- return (
-
-
- {file.name}
-
- );
- });
-
- return (
-
-
{uploadQueue}
-
}
- styleDefault={styleDefault}
- styleContainer={styleButtonContainer}
- styleLabel={styleLabel}
- />
- {hiddenInput}
-
- );
- }
-}
-
-Uploader.defaultProps = {
- onRefresh: () => console.error("undefined"),
- onUpdateProgress: () => console.error("undefined"),
- onOk: () => console.error("undefined"),
- onError: () => console.error("undefined")
-};
diff --git a/client/components/control/button.jsx b/client/components/control/button.jsx
deleted file mode 100644
index 8318669..0000000
--- a/client/components/control/button.jsx
+++ /dev/null
@@ -1,102 +0,0 @@
-import React from "react";
-
-const buttonClassName = "btn";
-
-const styleContainer = {
- display: "inline-block",
- height: "2.5rem"
-};
-
-const styleIcon = {
- lineHeight: "2.5rem",
- height: "2.5rem",
- margin: "0 -0.25rem 0 0.5rem"
-};
-
-const styleBase = {
- background: "transparent",
- lineHeight: "2.5rem",
- fontSize: "0.875rem",
- border: "none",
- outline: "none",
- padding: "0 0.75rem",
- textAlign: "center"
-};
-
-const styleDefault = {
- ...styleBase
-};
-
-const styleStr = `
- .${buttonClassName}:hover {
- opacity: 0.7;
- transition: opacity 0.25s;
- }
-
- .${buttonClassName}:active {
- opacity: 0.7;
- transition: opacity 0.25s;
- }
-
- .${buttonClassName}:disabled {
- opacity: 0.2;
- transition: opacity 0.25s;
- }
-
- button::-moz-focus-inner {
- border: 0;
- }
-`;
-
-export class Button extends React.PureComponent {
- constructor(props) {
- super(props);
- this.styleDefault = { ...styleDefault, ...this.props.styleDefault };
- this.styleStr = this.props.styleStr ? this.props.styleStr : styleStr;
- }
-
- onClick = e => {
- if (this.props.onClick && this.props.isEnabled) {
- this.props.onClick(e);
- }
- };
-
- render() {
- const style = this.props.isEnabled ? this.styleDefault : this.styleDisabled;
- const icon =
- this.props.icon != null ? (
-
- {this.props.icon}
-
- ) : (
-
- );
-
- return (
-
- {icon}
-
-
- );
- }
-}
-
-Button.defaultProps = {
- className: "btn",
- isEnabled: true,
- icon: null,
- onClick: () => true,
- styleContainer: {},
- styleDefault: {},
- styleDisabled: {},
- styleLabel: {},
- styleIcon: {},
- styleStr: undefined
-};
diff --git a/client/components/control/input.jsx b/client/components/control/input.jsx
deleted file mode 100644
index f69e38d..0000000
--- a/client/components/control/input.jsx
+++ /dev/null
@@ -1,128 +0,0 @@
-import React from "react";
-
-const styleContainer = {
- backgroundColor: "#ccc",
- display: "inline-block",
- height: "2.5rem"
-};
-
-const styleIcon = {
- lineHeight: "2.5rem",
- height: "2.5rem",
- margin: "0 0.25rem 0 0.5rem"
-};
-
-const styleInputBase = {
- backgroundColor: "transparent",
- border: "none",
- display: "inline-block",
- fontSize: "0.875rem",
- height: "2.5rem",
- lineHeight: "2.5rem",
- outline: "none",
- overflowY: "hidden",
- padding: "0 0.75rem",
- verticalAlign: "middle"
-};
-
-const styleDefault = {
- ...styleInputBase,
- color: "#333"
-};
-
-const styleInvalid = {
- ...styleInputBase,
- color: "#e74c3c"
-};
-
-const inputClassName = "qs-input";
-const styleStr = `
-.${inputClassName}:hover {
- // box-shadow: 0px 0px -5px rgba(0, 0, 0, 1);
- opacity: 0.7;
- transition: opacity 0.25s;
-}
-
-.${inputClassName}:active {
- // box-shadow: 0px 0px -5px rgba(0, 0, 0, 1);
- opacity: 0.7;
- transition: opacity 0.25s;
-}
-
-.${inputClassName}:disabled {
- color: #ccc;
-}
-`;
-
-export class Input extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = { isValid: true };
- this.inputRef = undefined;
- }
-
- onChange = e => {
- this.props.onChange(e.target.value);
- this.props.onChangeEvent(e);
- this.props.onChangeTarget(e.target);
- this.setState({ isValid: this.props.validate(e.target.value) });
- };
-
- getRef = input => {
- this.inputRef = input;
- this.props.inputRef(this.inputRef);
- };
-
- render() {
- const style = this.state.isValid ? styleDefault : styleInvalid;
- const icon =
- this.props.icon != null ? (
- {this.props.icon}
- ) : (
-
- );
-
- return (
-
- {icon}
-
-
-
- );
- }
-}
-
-Input.defaultProps = {
- className: "input",
- maxLength: "32",
- placeholder: "placeholder",
- readOnly: false,
- style: {},
- styleContainer: {},
- styleInvalid: {},
- type: "text",
- disabled: false,
- width: "auto",
- value: "",
- icon: null,
- onChange: () => true,
- onChangeEvent: () => true,
- onChangeTarget: () => true,
- validate: () => true,
- inputRef: () => true
-};
diff --git a/client/components/display/icon.js b/client/components/display/icon.js
deleted file mode 100644
index 2190eb0..0000000
--- a/client/components/display/icon.js
+++ /dev/null
@@ -1,177 +0,0 @@
-const IconFile = require("react-icons/lib/fa/file-o");
-const IconImg = require("react-icons/lib/md/image");
-const IconZip = require("react-icons/lib/md/archive");
-const IconVideo = require("react-icons/lib/md/ondemand-video");
-const IconAudio = require("react-icons/lib/md/music-video");
-const IconText = require("react-icons/lib/md/description");
-const IconExcel = require("react-icons/lib/fa/file-excel-o");
-const IconPPT = require("react-icons/lib/fa/file-powerpoint-o");
-const IconPdf = require("react-icons/lib/md/picture-as-pdf");
-const IconWord = require("react-icons/lib/fa/file-word-o");
-const IconCode = require("react-icons/lib/md/code");
-const IconApk = require("react-icons/lib/md/android");
-const IconExe = require("react-icons/lib/fa/cog");
-
-const IconBars = require("react-icons/lib/fa/bars");
-const IconSpinner = require("react-icons/lib/md/autorenew");
-const IconCirUp = require("react-icons/lib/fa/arrow-circle-up");
-const IconSignIn = require("react-icons/lib/fa/sign-in");
-const IconSignOut = require("react-icons/lib/fa/sign-out");
-const IconAngUp = require("react-icons/lib/fa/angle-up");
-const IconAngRight = require("react-icons/lib/fa/angle-right");
-const IconAngDown = require("react-icons/lib/fa/angle-down");
-const IconAngLeft = require("react-icons/lib/fa/angle-left");
-const IconTimesCir = require("react-icons/lib/md/cancel");
-const IconPlusSqu = require("react-icons/lib/md/add-box");
-const IconPlusCir = require("react-icons/lib/fa/plus-circle");
-const IconPlus = require("react-icons/lib/md/add");
-const IconSearch = require("react-icons/lib/fa/search");
-const IconThList = require("react-icons/lib/fa/th-list");
-const IconCalendar = require("react-icons/lib/fa/calendar-o");
-
-const IconCheckCir = require("react-icons/lib/fa/check-circle");
-const IconExTri = require("react-icons/lib/fa/exclamation-triangle");
-const IconInfoCir = require("react-icons/lib/fa/info-circle");
-const IconRefresh = require("react-icons/lib/fa/refresh");
-
-const fileTypeIconMap = {
- // text
- txt: { icon: IconText, color: "#333" },
- rtf: { icon: IconText, color: "#333" },
- htm: { icon: IconText, color: "#333" },
- html: { icon: IconText, color: "#333" },
- xml: { icon: IconText, color: "#333" },
- yml: { icon: IconText, color: "#333" },
- json: { icon: IconText, color: "#333" },
- toml: { icon: IconText, color: "#333" },
- md: { icon: IconText, color: "#333" },
- // office
- ppt: { icon: IconPPT, color: "#e67e22" },
- pptx: { icon: IconPPT, color: "#e67e22" },
- xls: { icon: IconExcel, color: "#16a085" },
- xlsx: { icon: IconExcel, color: "#16a085" },
- xlsm: { icon: IconExcel, color: "#16a085" },
- doc: { icon: IconWord, color: "#2980b9" },
- docx: { icon: IconWord, color: "#2980b9" },
- docx: { icon: IconWord, color: "#2980b9" },
- pdf: { icon: IconPdf, color: "#c0392b" },
- // code
- c: { icon: IconCode, color: "#666" },
- cpp: { icon: IconCode, color: "#666" },
- java: { icon: IconCode, color: "#666" },
- js: { icon: IconCode, color: "#666" },
- py: { icon: IconCode, color: "#666" },
- pyc: { icon: IconCode, color: "#666" },
- rb: { icon: IconCode, color: "#666" },
- php: { icon: IconCode, color: "#666" },
- go: { icon: IconCode, color: "#666" },
- sh: { icon: IconCode, color: "#666" },
- vb: { icon: IconCode, color: "#666" },
- sql: { icon: IconCode, color: "#666" },
- r: { icon: IconCode, color: "#666" },
- swift: { icon: IconCode, color: "#666" },
- oc: { icon: IconCode, color: "#666" },
- // misc
- apk: { icon: IconApk, color: "#2ecc71" },
- exe: { icon: IconExe, color: "#333" },
- deb: { icon: IconExe, color: "#333" },
- rpm: { icon: IconExe, color: "#333" },
- // img
- bmp: { icon: IconImg, color: "#1abc9c" },
- gif: { icon: IconImg, color: "#1abc9c" },
- jpg: { icon: IconImg, color: "#1abc9c" },
- jpeg: { icon: IconImg, color: "#1abc9c" },
- tiff: { icon: IconImg, color: "#1abc9c" },
- psd: { icon: IconImg, color: "#1abc9c" },
- png: { icon: IconImg, color: "#1abc9c" },
- svg: { icon: IconImg, color: "#1abc9c" },
- pcx: { icon: IconImg, color: "#1abc9c" },
- dxf: { icon: IconImg, color: "#1abc9c" },
- wmf: { icon: IconImg, color: "#1abc9c" },
- emf: { icon: IconImg, color: "#1abc9c" },
- eps: { icon: IconImg, color: "#1abc9c" },
- tga: { icon: IconImg, color: "#1abc9c" },
- // compress
- gz: { icon: IconZip, color: "#34495e" },
- zip: { icon: IconZip, color: "#34495e" },
- "7z": { icon: IconZip, color: "#34495e" },
- rar: { icon: IconZip, color: "#34495e" },
- tar: { icon: IconZip, color: "#34495e" },
- gzip: { icon: IconZip, color: "#34495e" },
- cab: { icon: IconZip, color: "#34495e" },
- uue: { icon: IconZip, color: "#34495e" },
- arj: { icon: IconZip, color: "#34495e" },
- bz2: { icon: IconZip, color: "#34495e" },
- lzh: { icon: IconZip, color: "#34495e" },
- jar: { icon: IconZip, color: "#34495e" },
- ace: { icon: IconZip, color: "#34495e" },
- iso: { icon: IconZip, color: "#34495e" },
- z: { icon: IconZip, color: "#34495e" },
- // video
- asf: { icon: IconVideo, color: "#f39c12" },
- avi: { icon: IconVideo, color: "#f39c12" },
- flv: { icon: IconVideo, color: "#f39c12" },
- mkv: { icon: IconVideo, color: "#f39c12" },
- mov: { icon: IconVideo, color: "#f39c12" },
- mp4: { icon: IconVideo, color: "#f39c12" },
- mpeg: { icon: IconVideo, color: "#f39c12" },
- mpg: { icon: IconVideo, color: "#f39c12" },
- ram: { icon: IconVideo, color: "#f39c12" },
- rmvb: { icon: IconVideo, color: "#f39c12" },
- qt: { icon: IconVideo, color: "#f39c12" },
- wmv: { icon: IconVideo, color: "#f39c12" },
- // audio
- cda: { icon: IconAudio, color: "#d35400" },
- cmf: { icon: IconAudio, color: "#d35400" },
- mid: { icon: IconAudio, color: "#d35400" },
- mp1: { icon: IconAudio, color: "#d35400" },
- mp2: { icon: IconAudio, color: "#d35400" },
- mp3: { icon: IconAudio, color: "#d35400" },
- rm: { icon: IconAudio, color: "#d35400" },
- rmi: { icon: IconAudio, color: "#d35400" },
- vqf: { icon: IconAudio, color: "#d35400" },
- wav: { icon: IconAudio, color: "#d35400" }
-};
-
-const fileIconMap = {
- ...fileTypeIconMap,
- // other
- spinner: { icon: IconSpinner, color: "#1abc9c" },
- cirup: { icon: IconCirUp, color: "#fff" },
- signin: { icon: IconSignIn, color: "#fff" },
- signout: { icon: IconSignOut, color: "#fff" },
- angup: { icon: IconAngUp, color: "#2c3e50" },
- angright: { icon: IconAngRight, color: "#2c3e50" },
- angdown: { icon: IconAngDown, color: "#2c3e50" },
- angleft: { icon: IconAngLeft, color: "#2c3e50" },
- timescir: { icon: IconTimesCir, color: "#c0392b" },
- plussqu: { icon: IconPlusSqu, color: "#2ecc71" },
- pluscir: { icon: IconPlusCir, color: "#2ecc71" },
- plus: { icon: IconPlus, color: "#2ecc71" },
- search: { icon: IconSearch, color: "#ccc" },
- checkcir: { icon: IconCheckCir, color: "#27ae60" },
- extri: { icon: IconExTri, color: "#f39c12" },
- infocir: { icon: IconInfoCir, color: "#2c3e50" },
- refresh: { icon: IconRefresh, color: "#8e44ad" },
- thlist: { icon: IconThList, color: "#fff" },
- bars: { icon: IconBars, color: "#666" },
- calendar: { icon: IconCalendar, color: "#333" }
-};
-
-export const getIcon = extend => {
- if (fileIconMap[extend.toUpperCase()]) {
- return fileIconMap[extend.toUpperCase()].icon;
- } else if (fileIconMap[extend.toLowerCase()]) {
- return fileIconMap[extend.toLowerCase()].icon;
- }
- return IconFile;
-};
-
-export const getIconColor = extend => {
- if (fileIconMap[extend.toUpperCase()]) {
- return fileIconMap[extend.toUpperCase()].color;
- } else if (fileIconMap[extend.toLowerCase()]) {
- return fileIconMap[extend.toLowerCase()].color;
- }
- return "#333";
-};
diff --git a/client/components/layout/grids.jsx b/client/components/layout/grids.jsx
deleted file mode 100644
index d99ecdb..0000000
--- a/client/components/layout/grids.jsx
+++ /dev/null
@@ -1,27 +0,0 @@
-import React from "react";
-
-const styleGridBase = {
- float: "left",
- margin: 0
-};
-
-export const Grids = props => (
-
- {props.nodes.map(node => (
-
- {node.component}
-
- ))}
-
-
-);
-
-Grids.defaultProps = {
- nodes: [{ key: "key", component: , style: {} }],
- gridStyle: styleGridBase,
- containerStyle: {}
-};
diff --git a/client/config.js b/client/config.js
deleted file mode 100644
index e04b9a3..0000000
--- a/client/config.js
+++ /dev/null
@@ -1,7 +0,0 @@
-export const config = {
- serverAddr: "",
- testId: "admin",
- testPwd: "quicksh@re",
- rootSize: 16,
- colWidth: 20
-};
diff --git a/client/libs/__mocks__/api_auth.js b/client/libs/__mocks__/api_auth.js
deleted file mode 100644
index 7c5f3aa..0000000
--- a/client/libs/__mocks__/api_auth.js
+++ /dev/null
@@ -1,7 +0,0 @@
-export function login(serverAddr, adminId, adminPwd, axiosConfig) {
- return Promise.resolve(true);
-}
-
-export function logout(serverAddr, axiosConfig) {
- return Promise.resolve(true);
-}
diff --git a/client/libs/__mocks__/api_share.js b/client/libs/__mocks__/api_share.js
deleted file mode 100644
index 98db7d8..0000000
--- a/client/libs/__mocks__/api_share.js
+++ /dev/null
@@ -1,41 +0,0 @@
-let _infos = [];
-const shadowedId = "shadowedId";
-const publicId = "publicId";
-
-export function __addInfos(infos) {
- _infos = [..._infos, ...infos];
-}
-
-export function __truncInfos(info) {
- _infos = [];
-}
-
-export const del = shareId => {
- _infos = _infos.filter(info => {
- return !info.shareId == shareId;
- });
- return Promise.resolve(true);
-};
-
-export const list = () => {
- return Promise.resolve(_infos);
-};
-
-export const shadowId = shareId => {
- return Promise.resolve(shadowedId);
-};
-
-export const publishId = shareId => {
- return Promise.resolve(publicId);
-};
-
-export const setDownLimit = (shareId, downLimit) => {
- _infos = _infos.map(info => {
- return info.shareId == shareId ? { ...info, downLimit } : info;
- });
- return Promise.resolve(true);
-};
-
-export const addLocalFiles = () => {
- return Promise.resolve(true);
-};
diff --git a/client/libs/api_auth.js b/client/libs/api_auth.js
deleted file mode 100644
index eee72bc..0000000
--- a/client/libs/api_auth.js
+++ /dev/null
@@ -1,29 +0,0 @@
-import axios from "axios";
-import { config } from "../config";
-import { makePostBody } from "./utils";
-
-export function login(serverAddr, adminId, adminPwd, axiosConfig) {
- return axios
- .post(
- `${serverAddr}/login`,
- makePostBody(
- {
- act: "login",
- adminid: adminId,
- adminpwd: adminPwd
- },
- axiosConfig
- )
- )
- .then(response => {
- return response.data.Code === 200;
- });
-}
-
-export function logout(serverAddr, axiosConfig) {
- return axios
- .post(`${serverAddr}/login`, makePostBody({ act: "logout" }), axiosConfig)
- .then(response => {
- return response.data.Code === 200;
- });
-}
diff --git a/client/libs/api_share.js b/client/libs/api_share.js
deleted file mode 100644
index 88164c4..0000000
--- a/client/libs/api_share.js
+++ /dev/null
@@ -1,51 +0,0 @@
-import axios from "axios";
-import { config } from "../config";
-
-export const del = shareId => {
- return axios
- .delete(`${config.serverAddr}/fileinfo?shareid=${shareId}`)
- .then(response => response.data.Code === 200);
-};
-
-export const list = () => {
- return axios.get(`${config.serverAddr}/fileinfo`).then(response => {
- // TODO check status code
- return response.data.List;
- });
-};
-
-export const shadowId = shareId => {
- const act = "shadowid";
- return axios
- .patch(`${config.serverAddr}/fileinfo?act=${act}&shareid=${shareId}`)
- .then(response => {
- return response.data.ShareId;
- });
-};
-
-export const publishId = shareId => {
- const act = "publishid";
- return axios
- .patch(`${config.serverAddr}/fileinfo?act=${act}&shareid=${shareId}`)
- .then(response => {
- return response.data.ShareId;
- });
-};
-
-export const setDownLimit = (shareId, downLimit) => {
- const act = "setdownlimit";
- return axios
- .patch(
- `${
- config.serverAddr
- }/fileinfo?act=${act}&shareid=${shareId}&downlimit=${downLimit}`
- )
- .then(response => response.data.Code === 200);
-};
-
-export const addLocalFiles = () => {
- const act = "addlocalfiles";
- return axios
- .patch(`${config.serverAddr}/fileinfo?act=${act}`)
- .then(response => response.data.Code === 200);
-};
diff --git a/client/libs/api_upload.js b/client/libs/api_upload.js
deleted file mode 100644
index d8f122e..0000000
--- a/client/libs/api_upload.js
+++ /dev/null
@@ -1,202 +0,0 @@
-import axios from "axios";
-import { config } from "../config";
-import { makePostBody } from "./utils";
-
-const wait = 5000; // TODO: should tune according to backend
-const retryMax = 100000;
-const maxUploadLen = 20 * 1024 * 1024;
-
-// TODO: add to react-intl
-const msgUploadFailed = "Fail to upload, upload is stopped.";
-const msgUploadFailedAndRetry = "Fail to upload, retrying...";
-const msgFileExists = "File exists.";
-const msgTooBigChunk = "Too big chunk.";
-const msgFileNotFound = "File not found, upload stopped.";
-
-function randomWait() {
- return Math.random() * wait;
-}
-
-function isKnownErr(res) {
- return res != null && res.Code != null && res.Msg != null;
-}
-
-export class FileUploader {
- constructor(onStart, onProgress, onFinish, onError) {
- this.onStart = onStart;
- this.onProgress = onProgress;
- this.onFinish = onFinish;
- this.onError = onError;
- this.retry = retryMax;
- this.reader = new FileReader();
-
- this.uploadFile = file => {
- return this.startUpload(file);
- };
-
- this.startUpload = file => {
- return axios
- .post(
- `${config.serverAddr}/startupload`,
- makePostBody({
- fname: file.name
- })
- )
- .then(response => {
- if (
- response.data.ShareId == null ||
- response.data.Start === null ||
- response.data.Length === null
- ) {
- throw response;
- } else {
- this.onStart(response.data.ShareId, file.name);
- return this.upload(
- {
- shareId: response.data.ShareId,
- start: response.data.Start,
- length: response.data.Length
- },
- file
- );
- }
- })
- .catch(response => {
- // TODO: this is not good because error may not be response
- if (isKnownErr(response.data) && response.data.Code === 429) {
- setTimeout(this.startUpload, randomWait(), file);
- } else if (isKnownErr(response.data) && response.data.Code === 412) {
- this.onError(msgFileExists);
- } else if (isKnownErr(response.data) && response.data.Code === 404) {
- this.onError(msgFileNotFound);
- } else if (this.retry > 0) {
- this.retry--;
- this.onError(msgUploadFailedAndRetry);
- console.trace(response);
- setTimeout(this.startUpload, randomWait(), file);
- } else {
- this.onError(msgUploadFailed);
- console.trace(response);
- }
- });
- };
-
- this.prepareReader = (shareInfo, end, resolve, reject) => {
- this.reader.onerror = err => {
- reject(err);
- };
-
- this.reader.onloadend = event => {
- const formData = new FormData();
- formData.append("shareid", shareInfo.shareId);
- formData.append("start", shareInfo.start);
- formData.append("len", end - shareInfo.start);
- formData.append("chunk", new Blob([event.target.result]));
-
- const url = `${config.serverAddr}/upload`;
- const headers = {
- "Content-Type": "multipart/form-data"
- };
-
- try {
- axios
- .post(url, formData, { headers })
- .then(response => resolve(response))
- .catch(err => {
- reject(err);
- });
- } catch (err) {
- reject(err);
- }
- };
- };
-
- this.upload = (shareInfo, file) => {
- const uploaded = shareInfo.start + shareInfo.length;
- const end = uploaded < file.size ? uploaded : file.size;
-
- return new Promise((resolve, reject) => {
- if (
- end == null ||
- shareInfo.start == null ||
- end - shareInfo.start >= maxUploadLen
- ) {
- throw new Error(msgTooBigChunk);
- }
-
- const chunk = file.slice(shareInfo.start, end);
- this.prepareReader(shareInfo, end, resolve, reject);
- this.reader.readAsArrayBuffer(chunk);
- })
- .then(response => {
- if (
- response.data.ShareId == null ||
- response.data.Start == null ||
- response.data.Length == null ||
- response.data.Start !== end
- ) {
- throw response;
- } else {
- if (end < file.size) {
- this.onProgress(shareInfo.shareId, end / file.size);
- return this.upload(
- {
- shareId: shareInfo.shareId,
- start: shareInfo.start + shareInfo.length,
- length: shareInfo.length
- },
- file
- );
- } else {
- return this.finishUpload(shareInfo);
- }
- }
- })
- .catch(response => {
- // possible error: response.data.Start == null || response.data.Start !== end
- if (isKnownErr(response.data) && response.data.Code === 429) {
- setTimeout(this.upload, randomWait(), shareInfo, file);
- } else if (isKnownErr(response.data) && response.data.Code === 404) {
- this.onError(msgFileNotFound);
- } else if (this.retry > 0) {
- this.retry--;
- setTimeout(this.upload, randomWait(), shareInfo, file);
- this.onError(msgUploadFailedAndRetry);
- console.trace(response);
- } else {
- this.onError(msgUploadFailed);
- console.trace(response);
- }
- });
- };
-
- this.finishUpload = shareInfo => {
- return axios
- .post(`${config.serverAddr}/finishupload?shareid=${shareInfo.shareId}`)
- .then(response => {
- // TODO: should check Code instead of Url
- if (response.data.ShareId != null && response.data.Start == null) {
- this.onFinish();
- return response.data.ShareId;
- } else {
- throw response;
- }
- })
- .catch(response => {
- if (isKnownErr(response.data) && response.data.Code === 429) {
- setTimeout(this.finishUpload, randomWait(), shareInfo);
- } else if (isKnownErr(response.data) && response.data.Code === 404) {
- this.onError(msgFileNotFound);
- } else if (this.retry > 0) {
- this.retry--;
- setTimeout(this.finishUpload, randomWait(), shareInfo);
- this.onError(msgUploadFailedAndRetry);
- console.trace(response);
- } else {
- this.onError(msgUploadFailed);
- console.trace(response);
- }
- });
- };
- }
-}
diff --git a/client/libs/file_type.js b/client/libs/file_type.js
deleted file mode 100644
index 0e89473..0000000
--- a/client/libs/file_type.js
+++ /dev/null
@@ -1,18 +0,0 @@
-const fileTypeMap = {
- jpg: "image",
- jpeg: "image",
- png: "image",
- bmp: "image",
- gz: "archive",
- mov: "video",
- mp4: "video",
- mov: "video",
- avi: "video"
-};
-
-export const getFileExt = fileName => fileName.split(".").pop();
-
-export const getFileType = fileName => {
- const ext = getFileExt(fileName);
- return fileTypeMap[ext] != null ? fileTypeMap[ext] : "file";
-};
diff --git a/client/libs/test/api_auth_test.js b/client/libs/test/api_auth_test.js
deleted file mode 100644
index 29cd955..0000000
--- a/client/libs/test/api_auth_test.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { login, logout } from "../api_auth";
-import { config } from "../../config";
-
-const serverAddr = config.serverAddr;
-const testId = config.testId;
-const testPwd = config.testPwd;
-
-export function testAuth() {
- return testLogin()
- .then(testLogout)
- .catch(err => {
- console.error("auth: fail", err);
- });
-}
-
-export function testLogin() {
- return login(serverAddr, testId, testPwd).then(ok => {
- if (ok === true) {
- console.log("login api: ok");
- } else {
- throw new Error("login api: failed");
- }
- });
-}
-
-export function testLogout() {
- return logout(serverAddr).then(ok => {
- if (ok === true) {
- console.log("logout api: ok");
- } else {
- throw new Error("logout api: failed");
- }
- });
-}
diff --git a/client/libs/test/api_share_test.js b/client/libs/test/api_share_test.js
deleted file mode 100644
index 99ad2a1..0000000
--- a/client/libs/test/api_share_test.js
+++ /dev/null
@@ -1,167 +0,0 @@
-import { FileUploader } from "../api_upload";
-import {
- del,
- list,
- shadowId,
- publishId,
- setDownLimit,
- addLocalFiles
-} from "../api_share";
-import { testLogin, testLogout } from "./api_auth_test";
-
-const fileName = "filename";
-
-function upload(fileName) {
- return new Promise(resolve => {
- const onStart = () => true;
- const onProgress = () => true;
- const onFinish = () => resolve();
- const onError = err => {
- throw new Error(JSON.stringify(err));
- };
- const file = new File(["foo"], fileName, {
- type: "text/plain"
- });
-
- const uploader = new FileUploader(onStart, onProgress, onFinish, onError);
- uploader.uploadFile(file);
- });
-}
-
-function getIdFromList(list, fileName) {
- if (list == null) {
- throw new Error("list: list fail");
- }
-
- // TODO: should verify file name
- const filterInfo = list.find(info => {
- return info.PathLocal.includes(fileName);
- });
-
- if (filterInfo == null) {
- console.error(list);
- throw new Error("list: file name not found");
- } else {
- return filterInfo.Id;
- }
-}
-
-function delWithName(fileName) {
- return list().then(infoList => {
- const infoToDel = infoList.find(info => {
- return info.PathLocal.includes(fileName);
- });
-
- if (infoToDel == null) {
- console.warn("delWithName: name not found");
- } else {
- return del(infoToDel.Id);
- }
- });
-}
-
-export function testShadowPublishId() {
- return testLogin()
- .then(() => upload(fileName))
- .then(list)
- .then(infoList => {
- return getIdFromList(infoList, fileName);
- })
- .then(shareId => {
- return shadowId(shareId).then(secretId => {
- if (shareId === secretId) {
- throw new Error("shadowId: id not changed");
- } else {
- return secretId;
- }
- });
- })
- .then(secretId => {
- return list().then(infoList => {
- const info = infoList.find(info => {
- return info.Id === secretId;
- });
-
- if (info.PathLocal.includes(fileName)) {
- console.log("shadowId api: ok", secretId);
- return secretId;
- } else {
- throw new Error("shadowId pai: file not found", infoList, fileName);
- }
- });
- })
- .then(secretId => {
- return publishId(secretId).then(publicId => {
- if (publicId === secretId) {
- // TODO: it is not enough to check they are not equal
- throw new Error("publicId: id not changed");
- } else {
- console.log("publishId api: ok", publicId);
- return publicId;
- }
- });
- })
- .then(shareId => del(shareId))
- .then(testLogout)
- .catch(err => {
- console.error(err);
- delWithName(fileName);
- });
-}
-
-export function testSetDownLimit() {
- const downLimit = 777;
-
- return testLogin()
- .then(() => upload(fileName))
- .then(list)
- .then(infoList => {
- return getIdFromList(infoList, fileName);
- })
- .then(shareId => {
- return setDownLimit(shareId, downLimit).then(ok => {
- if (!ok) {
- throw new Error("setDownLimit: failed");
- } else {
- return shareId;
- }
- });
- })
- .then(shareId => {
- return list().then(infoList => {
- const info = infoList.find(info => {
- return info.Id == shareId;
- });
-
- if (info.DownLimit === downLimit) {
- console.log("setDownLimit api: ok");
- return shareId;
- } else {
- throw new Error("setDownLimit api: limit unchanged");
- }
- });
- })
- .then(shareId => del(shareId))
- .then(testLogout)
- .catch(err => {
- console.error(err);
- delWithName(fileName);
- });
-}
-
-// TODO: need to add local file and test
-export function testAddLocalFiles() {
- return testLogin()
- .then(() => addLocalFiles())
- .then(ok => {
- if (ok) {
- console.log("addLocalFiles api: ok");
- } else {
- throw new Error("addLocalFiles api: failed");
- }
- })
- .then(() => testLogout())
- .catch(err => {
- console.error(err);
- });
-}
diff --git a/client/libs/test/api_test.js b/client/libs/test/api_test.js
deleted file mode 100644
index 5ff75cf..0000000
--- a/client/libs/test/api_test.js
+++ /dev/null
@@ -1,25 +0,0 @@
-import { testAuth } from "./api_auth_test";
-import { testUploadOneFile } from "./api_upload_test";
-import {
- testAddLocalFiles,
- testSetDownLimit,
- testShadowPublishId
-} from "./api_share_test";
-import { testUpDownBatch } from "./api_up_down_batch_test";
-
-console.log("Test started");
-
-const fileName = `test_filename${Date.now()}`;
-const file = new File(["foo"], fileName, {
- type: "text/plain"
-});
-
-testAuth()
- .then(testShadowPublishId)
- .then(() => testUploadOneFile(file, fileName))
- .then(testSetDownLimit)
- .then(testAddLocalFiles)
- .then(testUpDownBatch)
- .then(() => {
- console.log("Tests are finished");
- });
diff --git a/client/libs/test/api_up_down_batch_test.js b/client/libs/test/api_up_down_batch_test.js
deleted file mode 100644
index 6011b1d..0000000
--- a/client/libs/test/api_up_down_batch_test.js
+++ /dev/null
@@ -1,97 +0,0 @@
-import axios from "axios";
-import md5 from "md5";
-
-import { config } from "../../config";
-import { testUpload } from "./api_upload_test";
-import { list, del } from "../api_share";
-import { testLogin, testLogout } from "./api_auth_test";
-
-export function testUpDownBatch() {
- const fileInfos = [
- {
- fileName: "test_2MB_1",
- content: new Array(1024 * 1024 * 2).join("x")
- },
- {
- fileName: "test_1MB_1",
- content: new Array(1024 * 1024 * 1).join("x")
- },
- {
- fileName: "test_2MB_2",
- content: new Array(1024 * 1024 * 2).join("x")
- },
- {
- fileName: "test_1B",
- content: `${new Array(3).join("o")}${new Array(3).join("x")}`
- }
- ];
-
- return testLogin()
- .then(() => {
- const promises = fileInfos.map(info => {
- const file = new File([info.content], info.fileName, {
- type: "text/plain"
- });
-
- return testUpAndDownOneFile(file, info.fileName);
- });
-
- return Promise.all(promises);
- })
- .then(() => {
- testLogout();
- })
- .catch(err => console.error(err));
-}
-
-export function testUpAndDownOneFile(file, fileName) {
- return delTestFile(fileName)
- .then(() => testUpload(file))
- .then(shareId => testDownload(shareId, file))
- .catch(err => console.error(err));
-}
-
-function delTestFile(fileName) {
- return list().then(infos => {
- const info = infos.find(info => {
- return info.PathLocal === fileName;
- });
-
- if (info == null) {
- console.log("up-down: file not found", fileName);
- } else {
- return del(info.Id);
- }
- });
-}
-
-function testDownload(shareId, file) {
- return axios
- .get(`${config.serverAddr}/download?shareid=${shareId}`)
- .then(response => {
- return new Promise((resolve, reject) => {
- const reader = new FileReader();
- reader.onload = event => {
- const upHash = md5(event.target.result);
- const downHash = md5(response.data);
- if (upHash !== downHash) {
- console.error(
- "up&down: hash unmatch",
- file.name,
- upHash,
- downHash,
- upHash.length,
- downHash.length
- );
- } else {
- console.log("up&down: ok: hash match", file.name, upHash, downHash);
- resolve();
- }
- };
-
- reader.onerror = err => reject(err);
-
- reader.readAsText(file);
- });
- });
-}
diff --git a/client/libs/test/api_upload_test.js b/client/libs/test/api_upload_test.js
deleted file mode 100644
index bbd3d99..0000000
--- a/client/libs/test/api_upload_test.js
+++ /dev/null
@@ -1,63 +0,0 @@
-import { FileUploader } from "../api_upload";
-import { list, del } from "../api_share";
-import { testLogin, testLogout } from "./api_auth_test";
-
-function verify(fileName) {
- return list()
- .then(list => {
- if (list == null) {
- throw new Error("upload: list fail");
- }
-
- // TODO: should verify file name
- const filterInfo = list.find(info => {
- return info.PathLocal.includes(fileName);
- });
-
- if (filterInfo == null) {
- console.error(list);
- throw new Error("upload: file name not found");
- } else {
- return filterInfo.Id;
- }
- })
- .then(shareId => {
- console.log("upload api: ok");
- del(shareId);
- })
- .then(testLogout)
- .catch(err => {
- throw err;
- });
-}
-
-export function testUpload(file) {
- const onStart = () => true;
- const onProgress = () => true;
- const onFinish = () => true;
- const onError = err => {
- throw new Error(JSON.stringify(err));
- };
- const uploader = new FileUploader(onStart, onProgress, onFinish, onError);
-
- return uploader.uploadFile(file).catch(err => {
- console.error(err);
- });
-}
-
-export function testUploadOneFile(file, fileName) {
- const onStart = () => true;
- const onProgress = () => true;
- const onFinish = () => true;
- const onError = err => {
- throw new Error(JSON.stringify(err));
- };
- const uploader = new FileUploader(onStart, onProgress, onFinish, onError);
-
- return testLogin()
- .then(() => uploader.uploadFile(file))
- .then(() => verify(fileName))
- .catch(err => {
- console.error(err);
- });
-}
diff --git a/client/libs/utils.js b/client/libs/utils.js
deleted file mode 100644
index dddbc26..0000000
--- a/client/libs/utils.js
+++ /dev/null
@@ -1,5 +0,0 @@
-export function makePostBody(paramMap) {
- return Object.keys(paramMap)
- .map(key => `${key}=${paramMap[key]}`)
- .join("&");
-}
diff --git a/client/panels/admin.jsx b/client/panels/admin.jsx
deleted file mode 100644
index 8cb6d88..0000000
--- a/client/panels/admin.jsx
+++ /dev/null
@@ -1,151 +0,0 @@
-import axios from "axios";
-import React from "react";
-import ReactDOM from "react-dom";
-
-import { config } from "../config";
-import { addLocalFiles, list } from "../libs/api_share";
-import { login, logout } from "../libs/api_auth";
-import { FilePane } from "../components/composite/file_pane";
-import { InfoBar } from "../components/composite/info_bar";
-import { Log } from "../components/composite/log";
-
-function getWidth() {
- if (window.innerWidth >= window.innerHeight) {
- return `${Math.floor(
- (window.innerWidth * 0.95) / config.rootSize / config.colWidth
- ) * config.colWidth}rem`;
- }
- return "auto";
-}
-
-const styleLogContainer = {
- paddingTop: "1rem",
- textAlign: "center",
- height: "2rem",
- overflowX: "hidden" // TODO: should no hidden
-};
-
-const styleLogContent = {
- color: "#333",
- fontSize: "0.875rem",
- opacity: 0.6,
- backgroundColor: "#fff",
- borderRadius: "1rem",
- whiteSpace: "nowrap"
-};
-
-class AdminPanel extends React.PureComponent {
- constructor(props) {
- super(props);
- this.state = {
- isLogin: false,
- filterName: "",
- serverAddr: `${window.location.protocol}//${window.location.hostname}:${
- window.location.port
- }`,
- width: getWidth()
- };
- this.log = {
- ok: msg => console.log(msg),
- warning: msg => console.log(msg),
- info: msg => console.log(msg),
- error: msg => console.log(msg),
- start: msg => console.log(msg),
- end: msg => console.log(msg)
- };
- this.logComponent = ;
- }
-
- componentWillMount() {
- list().then(infos => {
- if (infos != null) {
- this.setState({ isLogin: true });
- }
- });
- }
-
- setWidth = () => {
- this.setState({ width: getWidth() });
- };
-
- // componentDidMount() {
- // window.addEventListener("resize", this.setWidth);
- // }
-
- // componentWillUnmount() {
- // window.removeEventListener("resize", this.setWidth);
- // }
-
- onLogin = (serverAddr, adminId, adminPwd) => {
- login(serverAddr, adminId, adminPwd).then(ok => {
- if (ok === true) {
- this.setState({ isLogin: true });
- } else {
- this.log.error("Fail to login");
- this.setState({ isLogin: false });
- }
- });
- };
-
- onLogout = serverAddr => {
- logout(serverAddr).then(ok => {
- if (ok === false) {
- this.log.error("Fail to log out");
- } else {
- this.log.ok("You are logged out");
- }
- this.setState({ isLogin: false });
- });
- };
-
- onSearch = fileName => {
- this.setState({ filterName: fileName });
- };
-
- assignLog = logRef => {
- this.log = logRef;
- this.log.info(
-
- Know more about{" "}
- Quickshare
-
- );
- };
-
- render() {
- const width = this.state.width;
-
- return (
-
-
- {this.logComponent}
-
- {this.state.isLogin ? (
-
- ) : (
-
- )}
-
- );
- }
-}
-
-ReactDOM.render(, document.getElementById("app"));
diff --git a/client/tests/enzyme_setup.js b/client/tests/enzyme_setup.js
deleted file mode 100644
index 814fd50..0000000
--- a/client/tests/enzyme_setup.js
+++ /dev/null
@@ -1,4 +0,0 @@
-import { configure } from "enzyme";
-import Adapter from "enzyme-adapter-react-15";
-
-configure({ adapter: new Adapter() });
diff --git a/client/tests/test_helper.js b/client/tests/test_helper.js
deleted file mode 100644
index 32d76ac..0000000
--- a/client/tests/test_helper.js
+++ /dev/null
@@ -1,73 +0,0 @@
-// function should be called after async operation is finished
-export function execFuncs(instance, execs) {
- // instance: enzyme mounted component
- // const execs = [
- // {
- // func: "componentWillMount",
- // args: []
- // }
- // ];
- return execs.reduce((prePromise, nextFunc) => {
- return prePromise.then(() => instance[nextFunc.func](...nextFunc.args));
- }, Promise.resolve());
-}
-
-export function execsToStr(execs) {
- // const execs = [
- // {
- // func: "componentWillMount",
- // args: []
- // }
- // ];
- const execList = execs.map(
- funcInfo => `${funcInfo.func}(${funcInfo.args.join(", ")})`
- );
-
- return execList.join(", ");
-}
-
-export function getDesc(componentName, testCase) {
- // const testCase = {
- // execs: [
- // {
- // func: "onAddLocalFiles",
- // args: []
- // }
- // ],
- // state: {
- // filterFileName: ""
- // },
- // calls: [
- // {
- // func: "onAddLocalFiles",
- // count: 1
- // }
- // ]
- // }
- return `${componentName} should satisfy following by exec ${execsToStr(
- testCase.execs
- )}
- state=${JSON.stringify(testCase.state)}
- calls=${JSON.stringify(testCase.calls)} `;
-}
-
-export function verifyCalls(calls, stubs) {
- // const calls: [
- // {
- // func: "funcName",
- // count: 1
- // }
- // ];
- // const stubs = {
- // funcName: jest.fn(),
- // };
- let err = null;
- calls.forEach(called => {
- if (stubs[called.func].mock.calls.length != called.count) {
- err = `InfoBar: ${called.func} should be called ${called.count} but ${
- stubs[called.func].mock.calls.length
- }`;
- }
- });
- return err;
-}
diff --git a/client/webpack.config.common.js b/client/webpack.config.common.js
deleted file mode 100644
index 51ea9cd..0000000
--- a/client/webpack.config.common.js
+++ /dev/null
@@ -1,60 +0,0 @@
-const webpack = require("webpack");
-const CleanWebpackPlugin = require("clean-webpack-plugin");
-// const HtmlWebpackPlugin = require("html-webpack-plugin");
-
-const outputPath = `${__dirname}/../public/dist`;
-
-module.exports = {
- context: __dirname,
- entry: {
- assets: ["axios", "immutable", "react", "react-dom"],
- admin: "./panels/admin"
- },
- output: {
- path: outputPath,
- filename: "[name].bundle.js"
- },
- module: {
- rules: [
- {
- test: /\.js|jsx$/,
- use: [
- {
- loader: "babel-loader",
- options: {
- presets: ["es2015", "react", "stage-2"]
- }
- }
- ]
- },
- {
- test: /\.css$/,
- use: ["style-loader", "css-loader"]
- },
- {
- test: /\.(png|jpg|gif)$/,
- use: [
- {
- loader: "file-loader",
- options: {}
- }
- ]
- }
- ]
- },
- resolve: {
- extensions: [".js", ".json", ".jsx", ".css"]
- },
- plugins: [
- new webpack.optimize.CommonsChunkPlugin({
- name: "assets",
- // filename: "vendor.js"
- // (Give the chunk a different name)
- minChunks: Infinity
- // (with more entries, this ensures that no other module
- // goes into the vendor chunk)
- }),
- // new HtmlWebpackPlugin(),
- new CleanWebpackPlugin([outputPath])
- ]
-};
diff --git a/client/webpack.config.dev.js b/client/webpack.config.dev.js
deleted file mode 100644
index 45be7e1..0000000
--- a/client/webpack.config.dev.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const merge = require("webpack-merge");
-const common = require("./webpack.config.common.js");
-
-module.exports = merge(common, {
- entry: {
- api_test: "./libs/test/api_test"
- },
- devtool: "inline-source-map",
- devServer: {
- contentBase: "./dist"
- },
- watchOptions: {
- aggregateTimeout: 1000,
- poll: 1000,
- ignored: /node_modules/
- }
-});
diff --git a/client/webpack.config.prod.js b/client/webpack.config.prod.js
deleted file mode 100644
index f7ffae0..0000000
--- a/client/webpack.config.prod.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const common = require("./webpack.config.common.js");
-const merge = require("webpack-merge");
-const UglifyJS = require("uglifyjs-webpack-plugin");
-const webpack = require("webpack");
-
-module.exports = merge(common, {
- devtool: "source-map",
- plugins: [
- new UglifyJS({
- sourceMap: true
- }),
- new webpack.DefinePlugin({
- "process.env.NODE_ENV": JSON.stringify("production")
- })
- ]
-});
diff --git a/docs/README_zh-cn.md b/docs/README_zh-cn.md
index 1f86991..622851e 100644
--- a/docs/README_zh-cn.md
+++ b/docs/README_zh-cn.md
@@ -1,12 +1,12 @@
- Quickshare
+ [未完成!!!] Quickshare
- 一个小而美的文件共享服务器
+ 简约的文件共享服务, 使用Go/Golang, Typescript, Gin, React, Boltdb等构建.
-
+
diff --git a/package.json b/package.json
index 09834ab..b48bf65 100644
--- a/package.json
+++ b/package.json
@@ -1,4 +1,16 @@
{
"private": true,
- "workspaces": ["src/client/web"]
+ "workspaces": [
+ "src/client/web"
+ ],
+ "scripts": {
+ "dev:setup": "yarn && yarn dev:copy-immutable && yarn dev:copy-react-dom && yarn dev:copy-react",
+ "dev:copy-immutable": "cp node_modules/immutable/dist/immutable.min.js public/static/js",
+ "dev:copy-react-dom": "cp node_modules/react-dom/umd/react-dom.development.js public/static/js",
+ "dev:copy-react": "cp node_modules/react/umd/react.development.js public/static/js",
+ "prod:setup": "yarn && yarn prod:copy-immutable && yarn prod:copy-react-dom && yarn prod:copy-react",
+ "prod:copy-immutable": "cp node_modules/immutable/dist/immutable.min.js public/static/js",
+ "prod:copy-react-dom": "cp node_modules/react-dom/umd/react-dom.production.min.js public/static/js",
+ "prod:copy-react": "cp node_modules/react/umd/react.production.min.js public/static/js"
+ }
}
diff --git a/public/index.html b/public/index.html
deleted file mode 100644
index 076dc63..0000000
--- a/public/index.html
+++ /dev/null
@@ -1,94 +0,0 @@
-
-
-
-
- Quickshare
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/server.go b/server.go
deleted file mode 100644
index 5093eef..0000000
--- a/server.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package main
-
-import (
- "fmt"
- "log"
- "net/http"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/apis"
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/skratchdot/open-golang/open"
-)
-
-func main() {
- config := cfg.NewConfigFrom("config.json")
- srvShare := apis.NewSrvShare(config)
-
- // TODO: using httprouter instead
- mux := http.NewServeMux()
- mux.HandleFunc(config.PathLogin, srvShare.LoginHandler)
- mux.HandleFunc(config.PathStartUpload, srvShare.StartUploadHandler)
- mux.HandleFunc(config.PathUpload, srvShare.UploadHandler)
- mux.HandleFunc(config.PathFinishUpload, srvShare.FinishUploadHandler)
- mux.HandleFunc(config.PathDownload, srvShare.DownloadHandler)
- mux.HandleFunc(config.PathFileInfo, srvShare.FileInfoHandler)
- mux.HandleFunc(config.PathClient, srvShare.ClientHandler)
-
- server := &http.Server{
- Addr: fmt.Sprintf("%s:%d", config.HostName, config.Port),
- Handler: mux,
- MaxHeaderBytes: config.MaxHeaderBytes,
- ReadTimeout: time.Duration(config.ReadTimeout) * time.Millisecond,
- WriteTimeout: time.Duration(config.WriteTimeout) * time.Millisecond,
- IdleTimeout: time.Duration(config.IdleTimeout) * time.Millisecond,
- }
-
- log.Printf("quickshare starts @ %s:%d", config.HostName, config.Port)
- err := open.Start(fmt.Sprintf("http://%s:%d", config.HostName, config.Port))
- if err != nil {
- log.Println(err)
- }
- log.Fatal(server.ListenAndServe())
-}
diff --git a/server/apis/auth.go b/server/apis/auth.go
deleted file mode 100644
index 1377403..0000000
--- a/server/apis/auth.go
+++ /dev/null
@@ -1,105 +0,0 @@
-package apis
-
-import (
- "net/http"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/httpworker"
-)
-
-func (srv *SrvShare) LoginHandler(res http.ResponseWriter, req *http.Request) {
- if req.Method != http.MethodPost {
- srv.Http.Fill(httputil.Err404, res)
- return
- }
-
- act := req.FormValue(srv.Conf.KeyAct)
- todo := func(res http.ResponseWriter, req *http.Request) interface{} { return httputil.Err404 }
- switch act {
- case srv.Conf.ActLogin:
- todo = srv.Login
- case srv.Conf.ActLogout:
- todo = srv.Logout
- default:
- srv.Http.Fill(httputil.Err404, res)
- return
- }
-
- ack := make(chan error, 1)
- ok := srv.WorkerPool.Put(&httpworker.Task{
- Ack: ack,
- Do: srv.Wrap(todo),
- Res: res,
- Req: req,
- })
- if !ok {
- srv.Http.Fill(httputil.Err503, res)
- return
- }
-
- execErr := srv.WorkerPool.IsInTime(ack, time.Duration(srv.Conf.Timeout)*time.Millisecond)
- if srv.Err.IsErr(execErr) {
- srv.Http.Fill(httputil.Err500, res)
- }
-}
-
-func (srv *SrvShare) Login(res http.ResponseWriter, req *http.Request) interface{} {
- // all users need to pass same wall to login
- if !srv.Walls.PassIpLimit(GetRemoteIp(req.RemoteAddr)) ||
- !srv.Walls.PassOpLimit(srv.Conf.AllUsers, srv.Conf.OpIdLogin) {
- return httputil.Err504
- }
-
- return srv.login(
- req.FormValue(srv.Conf.KeyAdminId),
- req.FormValue(srv.Conf.KeyAdminPwd),
- res,
- )
-}
-
-func (srv *SrvShare) login(adminId string, adminPwd string, res http.ResponseWriter) interface{} {
- if adminId != srv.Conf.AdminId ||
- adminPwd != srv.Conf.AdminPwd {
- return httputil.Err401
- }
-
- token := srv.Walls.MakeLoginToken(srv.Conf.AdminId)
- if token == "" {
- return httputil.Err500
- }
-
- srv.Http.SetCookie(res, srv.Conf.KeyToken, token)
- return httputil.Ok200
-}
-
-func (srv *SrvShare) Logout(res http.ResponseWriter, req *http.Request) interface{} {
- srv.Http.SetCookie(res, srv.Conf.KeyToken, "-")
- return httputil.Ok200
-}
-
-func (srv *SrvShare) IsValidLength(length int64) bool {
- return length > 0 && length <= srv.Conf.MaxUpBytesPerSec
-}
-
-func (srv *SrvShare) IsValidStart(start, expectStart int64) bool {
- return start == expectStart
-}
-
-func (srv *SrvShare) IsValidShareId(shareId string) bool {
- // id could be 0 for dev environment
- if srv.Conf.Production {
- return len(shareId) == 64
- }
- return true
-}
-
-func (srv *SrvShare) IsValidDownLimit(limit int) bool {
- return limit >= -1
-}
-
-func IsValidFileName(fileName string) bool {
- return fileName != "" && len(fileName) < 240
-}
diff --git a/server/apis/auth_test.go b/server/apis/auth_test.go
deleted file mode 100644
index 1a84f78..0000000
--- a/server/apis/auth_test.go
+++ /dev/null
@@ -1,78 +0,0 @@
-package apis
-
-import (
- "fmt"
- "strings"
- "testing"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/ihexxa/quickshare/server/libs/encrypt"
- "github.com/ihexxa/quickshare/server/libs/httputil"
-)
-
-func TestLogin(t *testing.T) {
- conf := cfg.NewConfig()
-
- type testCase struct {
- Desc string
- AdminId string
- AdminPwd string
- Result interface{}
- VerifyToken bool
- }
-
- testCases := []testCase{
- testCase{
- Desc: "invalid input",
- AdminId: "",
- AdminPwd: "",
- Result: httputil.Err401,
- VerifyToken: false,
- },
- testCase{
- Desc: "account not match",
- AdminId: "unknown",
- AdminPwd: "unknown",
- Result: httputil.Err401,
- VerifyToken: false,
- },
- testCase{
- Desc: "succeed to login",
- AdminId: conf.AdminId,
- AdminPwd: conf.AdminPwd,
- Result: httputil.Ok200,
- VerifyToken: true,
- },
- }
-
- for _, testCase := range testCases {
- srv := NewSrvShare(conf)
- res := &stubWriter{Headers: map[string][]string{}}
- ret := srv.login(testCase.AdminId, testCase.AdminPwd, res)
-
- if ret != testCase.Result {
- t.Fatalf("login: response=%v testCase=%v", ret, testCase.Result)
- }
-
- // verify cookie (only token.adminid part))
- if testCase.VerifyToken {
- cookieVal := strings.Replace(
- res.Header().Get("Set-Cookie"),
- fmt.Sprintf("%s=", conf.KeyToken),
- "",
- 1,
- )
-
- gotTokenStr := strings.Split(cookieVal, ";")[0]
- token := encrypt.JwtEncrypterMaker(conf.SecretKey)
- token.FromStr(gotTokenStr)
- gotToken, found := token.Get(conf.KeyAdminId)
- if !found || conf.AdminId != gotToken {
- t.Fatalf("login: token admin id unmatch got=%v expect=%v", gotToken, conf.AdminId)
- }
- }
-
- }
-}
diff --git a/server/apis/client.go b/server/apis/client.go
deleted file mode 100644
index 222dad4..0000000
--- a/server/apis/client.go
+++ /dev/null
@@ -1,66 +0,0 @@
-package apis
-
-import (
- "net/http"
- "path/filepath"
- "strings"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/httpworker"
-)
-
-func (srv *SrvShare) ClientHandler(res http.ResponseWriter, req *http.Request) {
- if req.Method != http.MethodGet {
- srv.Http.Fill(httputil.Err404, res)
- return
- }
-
- ack := make(chan error, 1)
- ok := srv.WorkerPool.Put(&httpworker.Task{
- Ack: ack,
- Do: srv.Wrap(srv.GetClient),
- Res: res,
- Req: req,
- })
- if !ok {
- srv.Http.Fill(httputil.Err503, res)
- return
- }
-
- execErr := srv.WorkerPool.IsInTime(ack, time.Duration(srv.Conf.Timeout)*time.Millisecond)
- if srv.Err.IsErr(execErr) {
- srv.Http.Fill(httputil.Err500, res)
- }
-}
-
-func (srv *SrvShare) GetClient(res http.ResponseWriter, req *http.Request) interface{} {
- if !srv.Walls.PassIpLimit(GetRemoteIp(req.RemoteAddr)) {
- return httputil.Err504
- }
-
- return srv.getClient(res, req, req.URL.EscapedPath())
-}
-
-func (srv *SrvShare) getClient(res http.ResponseWriter, req *http.Request, relPath string) interface{} {
- if strings.HasSuffix(relPath, "/") {
- relPath = relPath + "index.html"
- }
- if !IsValidClientPath(relPath) {
- return httputil.Err400
- }
-
- fullPath := filepath.Clean(filepath.Join("./public", relPath))
- http.ServeFile(res, req, fullPath)
- return 0
-}
-
-func IsValidClientPath(fullPath string) bool {
- if strings.Contains(fullPath, "..") {
- return false
- }
-
- return true
-}
diff --git a/server/apis/download.go b/server/apis/download.go
deleted file mode 100644
index f217601..0000000
--- a/server/apis/download.go
+++ /dev/null
@@ -1,69 +0,0 @@
-package apis
-
-import (
- "net/http"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/httpworker"
-)
-
-func (srv *SrvShare) DownloadHandler(res http.ResponseWriter, req *http.Request) {
- if req.Method != http.MethodGet {
- srv.Http.Fill(httputil.Err404, res)
- }
-
- ack := make(chan error, 1)
- ok := srv.WorkerPool.Put(&httpworker.Task{
- Ack: ack,
- Do: srv.Wrap(srv.Download),
- Res: res,
- Req: req,
- })
- if !ok {
- srv.Http.Fill(httputil.Err503, res)
- }
-
- // using WriteTimeout instead of Timeout
- // After timeout, connection will be lost, and worker will fail to write and return
- execErr := srv.WorkerPool.IsInTime(ack, time.Duration(srv.Conf.WriteTimeout)*time.Millisecond)
- if srv.Err.IsErr(execErr) {
- srv.Http.Fill(httputil.Err500, res)
- }
-}
-
-func (srv *SrvShare) Download(res http.ResponseWriter, req *http.Request) interface{} {
- shareId := req.FormValue(srv.Conf.KeyShareId)
- if !srv.Walls.PassIpLimit(GetRemoteIp(req.RemoteAddr)) ||
- !srv.Walls.PassOpLimit(shareId, srv.Conf.OpIdDownload) {
- return httputil.Err429
- }
-
- return srv.download(shareId, res, req)
-}
-
-func (srv *SrvShare) download(shareId string, res http.ResponseWriter, req *http.Request) interface{} {
- if !srv.IsValidShareId(shareId) {
- return httputil.Err400
- }
-
- fileInfo, found := srv.Index.Get(shareId)
- switch {
- case !found || fileInfo.State != fileidx.StateDone:
- return httputil.Err404
- case fileInfo.DownLimit == 0:
- return httputil.Err412
- default:
- updated, _ := srv.Index.DecrDownLimit(shareId)
- if updated != 1 {
- return httputil.Err500
- }
- }
-
- err := srv.Downloader.ServeFile(res, req, fileInfo)
- srv.Err.IsErr(err)
- return 0
-}
diff --git a/server/apis/download_test.go b/server/apis/download_test.go
deleted file mode 100644
index f8887d9..0000000
--- a/server/apis/download_test.go
+++ /dev/null
@@ -1,271 +0,0 @@
-package apis
-
-import (
- "net/http"
- "os"
- "testing"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/ihexxa/quickshare/server/libs/errutil"
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/logutil"
- "github.com/ihexxa/quickshare/server/libs/qtube"
-)
-
-func initServiceForDownloadTest(config *cfg.Config, indexMap map[string]*fileidx.FileInfo, content string) *SrvShare {
- setDownloader := func(srv *SrvShare) {
- srv.Downloader = stubDownloader{Content: content}
- }
-
- setIndex := func(srv *SrvShare) {
- srv.Index = fileidx.NewMemFileIndexWithMap(len(indexMap), indexMap)
- }
-
- setFs := func(srv *SrvShare) {
- srv.Fs = &stubFsUtil{
- MockFile: &qtube.StubFile{
- Content: content,
- Offset: 0,
- },
- }
- }
-
- logger := logutil.NewSlog(os.Stdout, config.AppName)
- setLog := func(srv *SrvShare) {
- srv.Log = logger
- }
-
- setErr := func(srv *SrvShare) {
- srv.Err = errutil.NewErrChecker(!config.Production, logger)
- }
-
- return InitSrvShare(config, setDownloader, setIndex, setFs, setLog, setErr)
-}
-
-func TestDownload(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- Content string
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- ShareId string
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response interface{}
- Body string
- }
- type testCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []testCase{
- testCase{
- Desc: "empty file index",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{},
- Response: httputil.Err404,
- },
- },
- testCase{
- Desc: "file info not found",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "1": &fileidx.FileInfo{},
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "1": &fileidx.FileInfo{},
- },
- Response: httputil.Err404,
- },
- },
- testCase{
- Desc: "file not found because of state=uploading",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: 1,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateUploading,
- Uploaded: 1,
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: 1,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateUploading,
- Uploaded: 1,
- },
- },
- Response: httputil.Err404,
- },
- },
- testCase{
- Desc: "download failed because download limit = 0",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: 0,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateDone,
- Uploaded: 1,
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: 0,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateDone,
- Uploaded: 1,
- },
- },
- Response: httputil.Err412,
- },
- },
- testCase{
- Desc: "succeed to download",
- Init: Init{
- Content: "content",
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: 1,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateDone,
- Uploaded: 1,
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: 0,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateDone,
- Uploaded: 1,
- },
- },
- Response: 0,
- Body: "content",
- },
- },
- testCase{
- Desc: "succeed to download DownLimit == -1",
- Init: Init{
- Content: "content",
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: -1,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateDone,
- Uploaded: 1,
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: -1,
- ModTime: time.Now().UnixNano(),
- PathLocal: "path",
- State: fileidx.StateDone,
- Uploaded: 1,
- },
- },
- Response: 0,
- Body: "content",
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForDownloadTest(conf, testCase.Init.IndexMap, testCase.Content)
- writer := &stubWriter{Headers: map[string][]string{}}
- response := srv.download(
- testCase.ShareId,
- writer,
- &http.Request{},
- )
-
- // verify downlimit
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- info, _ := srv.Index.Get(testCase.ShareId)
- t.Fatalf(
- "download: index incorrect got=%v want=%v",
- info,
- testCase.Output.IndexMap[testCase.ShareId],
- )
- }
-
- // verify response
- if response != testCase.Output.Response {
- t.Fatalf(
- "download: response incorrect response=%v testCase=%v",
- response,
- testCase.Output.Response,
- )
- }
-
- // verify writerContent
- if string(writer.Response) != testCase.Output.Body {
- t.Fatalf(
- "download: body incorrect got=%v want=%v",
- string(writer.Response),
- testCase.Output.Body,
- )
- }
-
- }
-}
diff --git a/server/apis/file_info.go b/server/apis/file_info.go
deleted file mode 100644
index 4a7df47..0000000
--- a/server/apis/file_info.go
+++ /dev/null
@@ -1,233 +0,0 @@
-package apis
-
-import (
- "fmt"
- "math/rand"
- "net/http"
- "path/filepath"
- "strconv"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/httpworker"
-)
-
-func (srv *SrvShare) FileInfoHandler(res http.ResponseWriter, req *http.Request) {
- tokenStr := srv.Http.GetCookie(req.Cookies(), srv.Conf.KeyToken)
- if !srv.Walls.PassIpLimit(GetRemoteIp(req.RemoteAddr)) ||
- !srv.Walls.PassLoginCheck(tokenStr, req) {
- srv.Http.Fill(httputil.Err429, res)
- return
- }
-
- todo := func(res http.ResponseWriter, req *http.Request) interface{} { return httputil.Err404 }
- switch req.Method {
- case http.MethodGet:
- todo = srv.List
- case http.MethodDelete:
- todo = srv.Del
- case http.MethodPatch:
- act := req.FormValue(srv.Conf.KeyAct)
- switch act {
- case srv.Conf.ActShadowId:
- todo = srv.ShadowId
- case srv.Conf.ActPublishId:
- todo = srv.PublishId
- case srv.Conf.ActSetDownLimit:
- todo = srv.SetDownLimit
- case srv.Conf.ActAddLocalFiles:
- todo = srv.AddLocalFiles
- default:
- srv.Http.Fill(httputil.Err404, res)
- return
- }
- default:
- srv.Http.Fill(httputil.Err404, res)
- return
- }
-
- ack := make(chan error, 1)
- ok := srv.WorkerPool.Put(&httpworker.Task{
- Ack: ack,
- Do: srv.Wrap(todo),
- Res: res,
- Req: req,
- })
- if !ok {
- srv.Http.Fill(httputil.Err503, res)
- }
-
- execErr := srv.WorkerPool.IsInTime(ack, time.Duration(srv.Conf.Timeout)*time.Millisecond)
- if srv.Err.IsErr(execErr) {
- srv.Http.Fill(httputil.Err500, res)
- }
-}
-
-type ResInfos struct {
- List []*fileidx.FileInfo
-}
-
-func (srv *SrvShare) List(res http.ResponseWriter, req *http.Request) interface{} {
- if !srv.Walls.PassOpLimit(srv.Conf.AllUsers, srv.Conf.OpIdGetFInfo) {
- return httputil.Err429
- }
-
- return srv.list()
-}
-
-func (srv *SrvShare) list() interface{} {
- infos := make([]*fileidx.FileInfo, 0)
- for _, info := range srv.Index.List() {
- infos = append(infos, info)
- }
-
- return &ResInfos{List: infos}
-}
-
-func (srv *SrvShare) Del(res http.ResponseWriter, req *http.Request) interface{} {
- shareId := req.FormValue(srv.Conf.KeyShareId)
- if !srv.Walls.PassOpLimit(shareId, srv.Conf.OpIdDelFInfo) {
- return httputil.Err504
- }
-
- return srv.del(shareId)
-}
-
-func (srv *SrvShare) del(shareId string) interface{} {
- if !srv.IsValidShareId(shareId) {
- return httputil.Err400
- }
-
- fileInfo, found := srv.Index.Get(shareId)
- if !found {
- return httputil.Err404
- }
-
- srv.Index.Del(shareId)
- fullPath := filepath.Join(srv.Conf.PathLocal, fileInfo.PathLocal)
- if !srv.Fs.DelFile(fullPath) {
- // TODO: may log file name because file not exist or delete is not authenticated
- return httputil.Err500
- }
-
- return httputil.Ok200
-}
-
-func (srv *SrvShare) ShadowId(res http.ResponseWriter, req *http.Request) interface{} {
- if !srv.Walls.PassOpLimit(srv.Conf.AllUsers, srv.Conf.OpIdOpFInfo) {
- return httputil.Err429
- }
-
- shareId := req.FormValue(srv.Conf.KeyShareId)
- return srv.shadowId(shareId)
-}
-
-func (srv *SrvShare) shadowId(shareId string) interface{} {
- if !srv.IsValidShareId(shareId) {
- return httputil.Err400
- }
-
- info, found := srv.Index.Get(shareId)
- if !found {
- return httputil.Err404
- }
-
- secretId := srv.Encryptor.Encrypt(
- []byte(fmt.Sprintf("%s%s", info.PathLocal, genPwd())),
- )
- if !srv.Index.SetId(info.Id, secretId) {
- return httputil.Err412
- }
-
- return &ShareInfo{ShareId: secretId}
-}
-
-func (srv *SrvShare) PublishId(res http.ResponseWriter, req *http.Request) interface{} {
- if !srv.Walls.PassOpLimit(srv.Conf.AllUsers, srv.Conf.OpIdOpFInfo) {
- return httputil.Err429
- }
-
- shareId := req.FormValue(srv.Conf.KeyShareId)
- return srv.publishId(shareId)
-}
-
-func (srv *SrvShare) publishId(shareId string) interface{} {
- if !srv.IsValidShareId(shareId) {
- return httputil.Err400
- }
-
- info, found := srv.Index.Get(shareId)
- if !found {
- return httputil.Err404
- }
-
- publicId := srv.Encryptor.Encrypt([]byte(info.PathLocal))
- if !srv.Index.SetId(info.Id, publicId) {
- return httputil.Err412
- }
-
- return &ShareInfo{ShareId: publicId}
-}
-
-func (srv *SrvShare) SetDownLimit(res http.ResponseWriter, req *http.Request) interface{} {
- if !srv.Walls.PassOpLimit(srv.Conf.AllUsers, srv.Conf.OpIdOpFInfo) {
- return httputil.Err429
- }
-
- shareId := req.FormValue(srv.Conf.KeyShareId)
- downLimit64, downLimitParseErr := strconv.ParseInt(req.FormValue(srv.Conf.KeyDownLimit), 10, 32)
- downLimit := int(downLimit64)
- if srv.Err.IsErr(downLimitParseErr) {
- return httputil.Err400
- }
-
- return srv.setDownLimit(shareId, downLimit)
-}
-
-func (srv *SrvShare) setDownLimit(shareId string, downLimit int) interface{} {
- if !srv.IsValidShareId(shareId) || !srv.IsValidDownLimit(downLimit) {
- return httputil.Err400
- }
-
- if !srv.Index.SetDownLimit(shareId, downLimit) {
- return httputil.Err404
- }
- return httputil.Ok200
-}
-
-func (srv *SrvShare) AddLocalFiles(res http.ResponseWriter, req *http.Request) interface{} {
- return srv.AddLocalFilesImp()
-}
-
-func (srv *SrvShare) AddLocalFilesImp() interface{} {
- infos, err := srv.Fs.Readdir(srv.Conf.PathLocal, srv.Conf.LocalFileLimit)
- if srv.Err.IsErr(err) {
- panic(fmt.Sprintf("fail to readdir: %v", err))
- }
-
- for _, info := range infos {
- info.DownLimit = srv.Conf.DownLimit
- info.State = fileidx.StateDone
- info.Id = srv.Encryptor.Encrypt([]byte(info.PathLocal))
-
- addRet := srv.Index.Add(info)
- switch {
- case addRet == 0 || addRet == -1:
- // TODO: return files not added
- continue
- case addRet == 1:
- break
- default:
- return httputil.Err500
- }
- }
-
- return httputil.Ok200
-}
-
-func genPwd() string {
- return fmt.Sprintf("%d%d%d%d", rand.Intn(10), rand.Intn(10), rand.Intn(10), rand.Intn(10))
-}
diff --git a/server/apis/file_info_test.go b/server/apis/file_info_test.go
deleted file mode 100644
index b6bf1b5..0000000
--- a/server/apis/file_info_test.go
+++ /dev/null
@@ -1,584 +0,0 @@
-package apis
-
-import (
- "os"
- "path/filepath"
- "testing"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/ihexxa/quickshare/server/libs/errutil"
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/logutil"
-)
-
-const mockShadowId = "shadowId"
-const mockPublicId = "publicId"
-
-func initServiceForFileInfoTest(
- config *cfg.Config,
- indexMap map[string]*fileidx.FileInfo,
- useShadowEnc bool,
- localFileInfos []*fileidx.FileInfo,
-) *SrvShare {
- setIndex := func(srv *SrvShare) {
- srv.Index = fileidx.NewMemFileIndexWithMap(len(indexMap), indexMap)
- }
-
- setFs := func(srv *SrvShare) {
- srv.Fs = &stubFsUtil{MockLocalFileInfos: localFileInfos}
- }
-
- logger := logutil.NewSlog(os.Stdout, config.AppName)
- setLog := func(srv *SrvShare) {
- srv.Log = logger
- }
-
- errChecker := errutil.NewErrChecker(!config.Production, logger)
- setErr := func(srv *SrvShare) {
- srv.Err = errChecker
- }
-
- var setEncryptor AddDep
- if useShadowEnc {
- setEncryptor = func(srv *SrvShare) {
- srv.Encryptor = &stubEncryptor{MockResult: mockShadowId}
- }
- } else {
- setEncryptor = func(srv *SrvShare) {
- srv.Encryptor = &stubEncryptor{MockResult: mockPublicId}
- }
- }
-
- return InitSrvShare(config, setIndex, setFs, setEncryptor, setLog, setErr)
-}
-
-func TestList(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type TestCase struct {
- Desc string
- Output
- }
-
- testCases := []TestCase{
- TestCase{
- Desc: "success",
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- },
- "1": &fileidx.FileInfo{
- Id: "1",
- },
- },
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForFileInfoTest(conf, testCase.Output.IndexMap, true, []*fileidx.FileInfo{})
- response := srv.list()
- resInfos := response.(*ResInfos)
-
- for _, info := range resInfos.List {
- infoFromSrv, found := srv.Index.Get(info.Id)
- if !found || infoFromSrv.Id != info.Id {
- t.Fatalf("list: file infos are not identical")
- }
- }
-
- if len(resInfos.List) != len(srv.Index.List()) {
- t.Fatalf("list: file infos are not identical")
- }
- }
-}
-
-func TestDel(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- ShareId string
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response httputil.MsgRes
- }
- type TestCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []TestCase{
- TestCase{
- Desc: "success",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- },
- "1": &fileidx.FileInfo{
- Id: "1",
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "1": &fileidx.FileInfo{
- Id: "1",
- },
- },
- Response: httputil.Ok200,
- },
- },
- TestCase{
- Desc: "not found",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "1": &fileidx.FileInfo{
- Id: "1",
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "1": &fileidx.FileInfo{
- Id: "1",
- },
- },
- Response: httputil.Err404,
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForFileInfoTest(conf, testCase.Init.IndexMap, true, []*fileidx.FileInfo{})
- response := srv.del(testCase.ShareId)
- res := response.(httputil.MsgRes)
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- t.Fatalf("del: index incorrect")
- }
-
- if res != testCase.Output.Response {
- t.Fatalf("del: response incorrect got: %v, want: %v", res, testCase.Output.Response)
- }
- }
-}
-
-func TestShadowId(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- ShareId string
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response interface{}
- }
- type TestCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []TestCase{
- TestCase{
- Desc: "success",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- mockShadowId: &fileidx.FileInfo{
- Id: mockShadowId,
- },
- },
- Response: &ShareInfo{
- ShareId: mockShadowId,
- },
- },
- },
- TestCase{
- Desc: "original id not exists",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{},
- Response: httputil.Err404,
- },
- },
- TestCase{
- Desc: "dest id exists",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- },
- mockShadowId: &fileidx.FileInfo{
- Id: mockShadowId,
- },
- },
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- },
- mockShadowId: &fileidx.FileInfo{
- Id: mockShadowId,
- },
- },
- Response: httputil.Err412,
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForFileInfoTest(conf, testCase.Init.IndexMap, true, []*fileidx.FileInfo{})
- response := srv.shadowId(testCase.ShareId)
-
- switch response.(type) {
- case *ShareInfo:
- res := response.(*ShareInfo)
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- info, found := srv.Index.Get(mockShadowId)
- t.Fatalf(
- "shadowId: index incorrect got %v found: %v want %v",
- info,
- found,
- testCase.Output.IndexMap[mockShadowId],
- )
- }
-
- if res.ShareId != mockShadowId {
- t.Fatalf("shadowId: mockId incorrect")
- }
-
- case httputil.MsgRes:
- res := response.(httputil.MsgRes)
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- t.Fatalf("shadowId: map not identical")
- }
-
- if res != testCase.Output.Response {
- t.Fatalf("shadowId: response incorrect")
- }
- default:
- t.Fatalf("shadowId: return type not found")
- }
- }
-}
-
-func TestPublishId(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- ShareId string
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response interface{}
- }
- type TestCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []TestCase{
- TestCase{
- Desc: "success",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- mockShadowId: &fileidx.FileInfo{
- Id: mockShadowId,
- },
- },
- },
- Input: Input{
- ShareId: mockShadowId,
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- mockPublicId: &fileidx.FileInfo{
- Id: mockPublicId,
- },
- },
- Response: &ShareInfo{
- ShareId: mockPublicId,
- },
- },
- },
- TestCase{
- Desc: "original id not exists",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{},
- Response: httputil.Err404,
- },
- },
- TestCase{
- Desc: "dest id exists",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- mockShadowId: &fileidx.FileInfo{
- Id: mockShadowId,
- },
- mockPublicId: &fileidx.FileInfo{
- Id: mockPublicId,
- },
- },
- },
- Input: Input{
- ShareId: mockShadowId,
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- mockShadowId: &fileidx.FileInfo{
- Id: mockShadowId,
- },
- mockPublicId: &fileidx.FileInfo{
- Id: mockPublicId,
- },
- },
- Response: httputil.Err412,
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForFileInfoTest(conf, testCase.Init.IndexMap, false, []*fileidx.FileInfo{})
- response := srv.publishId(testCase.ShareId)
-
- switch response.(type) {
- case *ShareInfo:
- res := response.(*ShareInfo)
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- info, found := srv.Index.Get(mockPublicId)
- t.Fatalf(
- "shadowId: index incorrect got %v found: %v want %v",
- info,
- found,
- testCase.Output.IndexMap[mockPublicId],
- )
- }
-
- if res.ShareId != mockPublicId {
- t.Fatalf("shadowId: mockId incorrect %v %v", res.ShareId, mockPublicId)
- }
-
- case httputil.MsgRes:
- res := response.(httputil.MsgRes)
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- t.Fatalf("shadowId: map not identical")
- }
-
- if res != testCase.Output.Response {
- t.Fatalf("shadowId: response incorrect got: %v want: %v", res, testCase.Output.Response)
- }
- default:
- t.Fatalf("shadowId: return type not found")
- }
- }
-}
-
-func TestSetDownLimit(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
- mockDownLimit := 100
-
- type Init struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- ShareId string
- DownLimit int
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response httputil.MsgRes
- }
- type TestCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []TestCase{
- TestCase{
- Desc: "success",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- },
- },
- },
- Input: Input{
- ShareId: "0",
- DownLimit: mockDownLimit,
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- "0": &fileidx.FileInfo{
- Id: "0",
- DownLimit: mockDownLimit,
- },
- },
- Response: httputil.Ok200,
- },
- },
- TestCase{
- Desc: "not found",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- ShareId: "0",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{},
- Response: httputil.Err404,
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForFileInfoTest(conf, testCase.Init.IndexMap, true, []*fileidx.FileInfo{})
- response := srv.setDownLimit(testCase.ShareId, mockDownLimit)
- res := response.(httputil.MsgRes)
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- info, _ := srv.Index.Get(testCase.ShareId)
- t.Fatalf(
- "setDownLimit: index incorrect got: %v want: %v",
- info,
- testCase.Output.IndexMap[testCase.ShareId],
- )
- }
-
- if res != testCase.Output.Response {
- t.Fatalf("setDownLimit: response incorrect got: %v, want: %v", res, testCase.Output.Response)
- }
- }
-}
-
-func TestAddLocalFiles(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- Infos []*fileidx.FileInfo
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response httputil.MsgRes
- }
- type TestCase struct {
- Desc string
- Init
- Output
- }
-
- testCases := []TestCase{
- TestCase{
- Desc: "success",
- Init: Init{
- Infos: []*fileidx.FileInfo{
- &fileidx.FileInfo{
- Id: "",
- DownLimit: 0,
- ModTime: 13,
- PathLocal: "filename1",
- State: "",
- Uploaded: 13,
- },
- },
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- mockPublicId: &fileidx.FileInfo{
- Id: mockPublicId,
- DownLimit: conf.DownLimit,
- ModTime: 13,
- PathLocal: filepath.Join(conf.PathLocal, "filename1"),
- State: fileidx.StateDone,
- Uploaded: 13,
- },
- },
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForFileInfoTest(conf, testCase.Output.IndexMap, false, testCase.Init.Infos)
- response := srv.AddLocalFilesImp()
- res := response.(httputil.MsgRes)
-
- if res.Code != 200 {
- t.Fatalf("addLocalFiles: code not correct")
- }
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- t.Fatalf(
- "addLocalFiles: indexes not identical got: %v want: %v",
- srv.Index.List(),
- testCase.Output.IndexMap,
- )
- }
- }
-}
diff --git a/server/apis/service.go b/server/apis/service.go
deleted file mode 100644
index 912b50b..0000000
--- a/server/apis/service.go
+++ /dev/null
@@ -1,145 +0,0 @@
-package apis
-
-import (
- "log"
- "net/http"
- "os"
- "strings"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/ihexxa/quickshare/server/libs/encrypt"
- "github.com/ihexxa/quickshare/server/libs/errutil"
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/fsutil"
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/httpworker"
- "github.com/ihexxa/quickshare/server/libs/limiter"
- "github.com/ihexxa/quickshare/server/libs/logutil"
- "github.com/ihexxa/quickshare/server/libs/qtube"
- "github.com/ihexxa/quickshare/server/libs/walls"
-)
-
-type AddDep func(*SrvShare)
-
-func NewSrvShare(config *cfg.Config) *SrvShare {
- logger := logutil.NewSlog(os.Stdout, config.AppName)
- setLog := func(srv *SrvShare) {
- srv.Log = logger
- }
-
- errChecker := errutil.NewErrChecker(!config.Production, logger)
- setErr := func(srv *SrvShare) {
- srv.Err = errChecker
- }
-
- setWorkerPool := func(srv *SrvShare) {
- workerPoolSize := config.WorkerPoolSize
- taskQueueSize := config.TaskQueueSize
- srv.WorkerPool = httpworker.NewWorkerPool(workerPoolSize, taskQueueSize, logger)
- }
-
- setWalls := func(srv *SrvShare) {
- encrypterMaker := encrypt.JwtEncrypterMaker
- ipLimiter := limiter.NewRateLimiter(
- config.LimiterCap,
- config.LimiterTtl,
- config.LimiterCyc,
- config.BucketCap,
- config.SpecialCaps,
- )
- opLimiter := limiter.NewRateLimiter(
- config.LimiterCap,
- config.LimiterTtl,
- config.LimiterCyc,
- config.BucketCap,
- config.SpecialCaps,
- )
- srv.Walls = walls.NewAccessWalls(config, ipLimiter, opLimiter, encrypterMaker)
- }
-
- setIndex := func(srv *SrvShare) {
- srv.Index = fileidx.NewMemFileIndex(config.MaxShares)
- }
-
- fs := fsutil.NewSimpleFs(errChecker)
- setFs := func(srv *SrvShare) {
- srv.Fs = fs
- }
-
- setDownloader := func(srv *SrvShare) {
- srv.Downloader = qtube.NewQTube(
- config.PathLocal,
- config.MaxDownBytesPerSec,
- config.MaxRangeLength,
- fs,
- )
- }
-
- setEncryptor := func(srv *SrvShare) {
- srv.Encryptor = &encrypt.HmacEncryptor{Key: config.SecretKeyByte}
- }
-
- setHttp := func(srv *SrvShare) {
- srv.Http = &httputil.QHttpUtil{
- CookieDomain: config.CookieDomain,
- CookieHttpOnly: config.CookieHttpOnly,
- CookieMaxAge: config.CookieMaxAge,
- CookiePath: config.CookiePath,
- CookieSecure: config.CookieSecure,
- Err: errChecker,
- }
- }
-
- return InitSrvShare(config, setIndex, setWalls, setWorkerPool, setFs, setDownloader, setEncryptor, setLog, setErr, setHttp)
-}
-
-func InitSrvShare(config *cfg.Config, addDeps ...AddDep) *SrvShare {
- srv := &SrvShare{}
- srv.Conf = config
- for _, addDep := range addDeps {
- addDep(srv)
- }
-
- if !srv.Fs.MkdirAll(srv.Conf.PathLocal, os.FileMode(0775)) {
- panic("fail to make ./files/ folder")
- }
-
- if res := srv.AddLocalFilesImp(); res != httputil.Ok200 {
- panic("fail to add local files")
- }
-
- return srv
-}
-
-type SrvShare struct {
- Conf *cfg.Config
- Encryptor encrypt.Encryptor
- Err errutil.ErrUtil
- Downloader qtube.Downloader
- Http httputil.HttpUtil
- Index fileidx.FileIndex
- Fs fsutil.FsUtil
- Log logutil.LogUtil
- Walls walls.Walls
- WorkerPool httpworker.Workers
-}
-
-func (srv *SrvShare) Wrap(serviceFunc httpworker.ServiceFunc) httpworker.DoFunc {
- return func(res http.ResponseWriter, req *http.Request) {
- body := serviceFunc(res, req)
-
- if body != nil && body != 0 && srv.Http.Fill(body, res) <= 0 {
- log.Println("Wrap: fail to fill body", body, res)
- }
- }
-}
-
-func GetRemoteIp(addr string) string {
- addrParts := strings.Split(addr, ":")
- if len(addrParts) > 0 {
- return addrParts[0]
- }
- return "unknown ip"
-}
diff --git a/server/apis/test_helper.go b/server/apis/test_helper.go
deleted file mode 100644
index a23c372..0000000
--- a/server/apis/test_helper.go
+++ /dev/null
@@ -1,117 +0,0 @@
-package apis
-
-import (
- "fmt"
- "io"
- "net/http"
- "os"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/qtube"
-)
-
-type stubFsUtil struct {
- MockLocalFileInfos []*fileidx.FileInfo
- MockFile *qtube.StubFile
-}
-
-var expectCreateFileName = ""
-
-func (fs *stubFsUtil) CreateFile(fileName string) error {
- if fileName != expectCreateFileName {
- panic(
- fmt.Sprintf("CreateFile: got: %s expect: %s", fileName, expectCreateFileName),
- )
- }
- return nil
-}
-
-func (fs *stubFsUtil) CopyChunkN(fullPath string, chunk io.Reader, start int64, len int64) bool {
- return true
-}
-
-func (fs *stubFsUtil) ServeFile(res http.ResponseWriter, req *http.Request, fileName string) {
- return
-}
-
-func (fs *stubFsUtil) DelFile(fullPath string) bool {
- return true
-}
-
-func (fs *stubFsUtil) MkdirAll(path string, mode os.FileMode) bool {
- return true
-}
-
-func (fs *stubFsUtil) Readdir(dirname string, n int) ([]*fileidx.FileInfo, error) {
- return fs.MockLocalFileInfos, nil
-}
-
-func (fs *stubFsUtil) Open(filePath string) (qtube.ReadSeekCloser, error) {
- return fs.MockFile, nil
-}
-
-type stubWriter struct {
- Headers http.Header
- Response []byte
- StatusCode int
-}
-
-func (w *stubWriter) Header() http.Header {
- return w.Headers
-}
-
-func (w *stubWriter) Write(body []byte) (int, error) {
- w.Response = append(w.Response, body...)
- return len(body), nil
-}
-
-func (w *stubWriter) WriteHeader(statusCode int) {
- w.StatusCode = statusCode
-}
-
-type stubDownloader struct {
- Content string
-}
-
-func (d stubDownloader) ServeFile(w http.ResponseWriter, r *http.Request, fileInfo *fileidx.FileInfo) error {
- _, err := w.Write([]byte(d.Content))
- return err
-}
-
-func sameInfoWithoutTime(info1, info2 *fileidx.FileInfo) bool {
- return info1.Id == info2.Id &&
- info1.DownLimit == info2.DownLimit &&
- info1.PathLocal == info2.PathLocal &&
- info1.State == info2.State &&
- info1.Uploaded == info2.Uploaded
-}
-
-func sameMap(map1, map2 map[string]*fileidx.FileInfo) bool {
- for key, info1 := range map1 {
- info2, found := map2[key]
- if !found || !sameInfoWithoutTime(info1, info2) {
- fmt.Printf("infos are not same: \n%v \n%v", info1, info2)
- return false
- }
- }
-
- for key, info2 := range map2 {
- info1, found := map1[key]
- if !found || !sameInfoWithoutTime(info1, info2) {
- fmt.Printf("infos are not same: \n%v \n%v", info1, info2)
- return false
- }
- }
-
- return true
-}
-
-type stubEncryptor struct {
- MockResult string
-}
-
-func (enc *stubEncryptor) Encrypt(content []byte) string {
- return enc.MockResult
-}
diff --git a/server/apis/upload.go b/server/apis/upload.go
deleted file mode 100644
index 3878ce6..0000000
--- a/server/apis/upload.go
+++ /dev/null
@@ -1,250 +0,0 @@
-package apis
-
-import (
- "io"
- "net/http"
- "path/filepath"
- "strconv"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/encrypt"
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/fsutil"
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/httpworker"
-)
-
-const DefaultId = "0"
-
-type ByteRange struct {
- ShareId string
- Start int64
- Length int64
-}
-
-type ShareInfo struct {
- ShareId string
-}
-
-func (srv *SrvShare) StartUploadHandler(res http.ResponseWriter, req *http.Request) {
- if req.Method != http.MethodPost {
- srv.Http.Fill(httputil.Err404, res)
- return
- }
-
- tokenStr := srv.Http.GetCookie(req.Cookies(), srv.Conf.KeyToken)
- ipPass := srv.Walls.PassIpLimit(GetRemoteIp(req.RemoteAddr))
- loginPass := srv.Walls.PassLoginCheck(tokenStr, req)
- opPass := srv.Walls.PassOpLimit(GetRemoteIp(req.RemoteAddr), srv.Conf.OpIdUpload)
- if !ipPass || !loginPass || !opPass {
- srv.Http.Fill(httputil.Err429, res)
- return
- }
-
- ack := make(chan error, 1)
- ok := srv.WorkerPool.Put(&httpworker.Task{
- Ack: ack,
- Do: srv.Wrap(srv.StartUpload),
- Res: res,
- Req: req,
- })
- if !ok {
- srv.Http.Fill(httputil.Err503, res)
- }
-
- execErr := srv.WorkerPool.IsInTime(ack, time.Duration(srv.Conf.Timeout)*time.Millisecond)
- if srv.Err.IsErr(execErr) {
- srv.Http.Fill(httputil.Err500, res)
- }
-}
-
-func (srv *SrvShare) UploadHandler(res http.ResponseWriter, req *http.Request) {
- if req.Method != http.MethodPost {
- srv.Http.Fill(httputil.Err404, res)
- return
- }
-
- tokenStr := srv.Http.GetCookie(req.Cookies(), srv.Conf.KeyToken)
- ipPass := srv.Walls.PassIpLimit(GetRemoteIp(req.RemoteAddr))
- loginPass := srv.Walls.PassLoginCheck(tokenStr, req)
- opPass := srv.Walls.PassOpLimit(GetRemoteIp(req.RemoteAddr), srv.Conf.OpIdUpload)
- if !ipPass || !loginPass || !opPass {
- srv.Http.Fill(httputil.Err429, res)
- return
- }
-
- multiFormErr := req.ParseMultipartForm(srv.Conf.ParseFormBufSize)
- if srv.Err.IsErr(multiFormErr) {
- srv.Http.Fill(httputil.Err400, res)
- return
- }
-
- ack := make(chan error, 1)
- ok := srv.WorkerPool.Put(&httpworker.Task{
- Ack: ack,
- Do: srv.Wrap(srv.Upload),
- Res: res,
- Req: req,
- })
- if !ok {
- srv.Http.Fill(httputil.Err503, res)
- }
-
- execErr := srv.WorkerPool.IsInTime(ack, time.Duration(srv.Conf.Timeout)*time.Millisecond)
- if srv.Err.IsErr(execErr) {
- srv.Http.Fill(httputil.Err500, res)
- }
-}
-
-func (srv *SrvShare) FinishUploadHandler(res http.ResponseWriter, req *http.Request) {
- if req.Method != http.MethodPost {
- srv.Http.Fill(httputil.Err404, res)
- return
- }
-
- tokenStr := srv.Http.GetCookie(req.Cookies(), srv.Conf.KeyToken)
- ipPass := srv.Walls.PassIpLimit(GetRemoteIp(req.RemoteAddr))
- loginPass := srv.Walls.PassLoginCheck(tokenStr, req)
- opPass := srv.Walls.PassOpLimit(GetRemoteIp(req.RemoteAddr), srv.Conf.OpIdUpload)
- if !ipPass || !loginPass || !opPass {
- srv.Http.Fill(httputil.Err429, res)
- return
- }
-
- ack := make(chan error, 1)
- ok := srv.WorkerPool.Put(&httpworker.Task{
- Ack: ack,
- Do: srv.Wrap(srv.FinishUpload),
- Res: res,
- Req: req,
- })
- if !ok {
- srv.Http.Fill(httputil.Err503, res)
- }
-
- execErr := srv.WorkerPool.IsInTime(ack, time.Duration(srv.Conf.Timeout)*time.Millisecond)
- if srv.Err.IsErr(execErr) {
- srv.Http.Fill(httputil.Err500, res)
- }
-}
-
-func (srv *SrvShare) StartUpload(res http.ResponseWriter, req *http.Request) interface{} {
- return srv.startUpload(req.FormValue(srv.Conf.KeyFileName))
-}
-
-func (srv *SrvShare) startUpload(fileName string) interface{} {
- if !IsValidFileName(fileName) {
- return httputil.Err400
- }
-
- id := DefaultId
- if srv.Conf.Production {
- id = genInfoId(fileName, srv.Conf.SecretKeyByte)
- }
-
- info := &fileidx.FileInfo{
- Id: id,
- DownLimit: srv.Conf.DownLimit,
- ModTime: time.Now().UnixNano(),
- PathLocal: fileName,
- Uploaded: 0,
- State: fileidx.StateStarted,
- }
-
- switch srv.Index.Add(info) {
- case 0:
- // go on
- case -1:
- return httputil.Err412
- case 1:
- return httputil.Err500 // TODO: use correct status code
- default:
- srv.Index.Del(id)
- return httputil.Err500
- }
-
- fullPath := filepath.Join(srv.Conf.PathLocal, info.PathLocal)
- createFileErr := srv.Fs.CreateFile(fullPath)
- switch {
- case createFileErr == fsutil.ErrExists:
- srv.Index.Del(id)
- return httputil.Err412
- case createFileErr == fsutil.ErrUnknown:
- srv.Index.Del(id)
- return httputil.Err500
- default:
- srv.Index.SetState(id, fileidx.StateUploading)
- return &ByteRange{
- ShareId: id,
- Start: 0,
- Length: srv.Conf.MaxUpBytesPerSec,
- }
- }
-}
-
-func (srv *SrvShare) Upload(res http.ResponseWriter, req *http.Request) interface{} {
- shareId := req.FormValue(srv.Conf.KeyShareId)
- start, startErr := strconv.ParseInt(req.FormValue(srv.Conf.KeyStart), 10, 64)
- length, lengthErr := strconv.ParseInt(req.FormValue(srv.Conf.KeyLen), 10, 64)
- chunk, _, chunkErr := req.FormFile(srv.Conf.KeyChunk)
-
- if srv.Err.IsErr(startErr) ||
- srv.Err.IsErr(lengthErr) ||
- srv.Err.IsErr(chunkErr) {
- return httputil.Err400
- }
-
- return srv.upload(shareId, start, length, chunk)
-}
-
-func (srv *SrvShare) upload(shareId string, start int64, length int64, chunk io.Reader) interface{} {
- if !srv.IsValidShareId(shareId) {
- return httputil.Err400
- }
-
- fileInfo, found := srv.Index.Get(shareId)
- if !found {
- return httputil.Err404
- }
-
- if !srv.IsValidStart(start, fileInfo.Uploaded) || !srv.IsValidLength(length) {
- return httputil.Err400
- }
-
- fullPath := filepath.Join(srv.Conf.PathLocal, fileInfo.PathLocal)
- if !srv.Fs.CopyChunkN(fullPath, chunk, start, length) {
- return httputil.Err500
- }
-
- if srv.Index.IncrUploaded(shareId, length) == 0 {
- return httputil.Err404
- }
-
- return &ByteRange{
- ShareId: shareId,
- Start: start + length,
- Length: srv.Conf.MaxUpBytesPerSec,
- }
-}
-
-func (srv *SrvShare) FinishUpload(res http.ResponseWriter, req *http.Request) interface{} {
- shareId := req.FormValue(srv.Conf.KeyShareId)
- return srv.finishUpload(shareId)
-}
-
-func (srv *SrvShare) finishUpload(shareId string) interface{} {
- if !srv.Index.SetState(shareId, fileidx.StateDone) {
- return httputil.Err404
- }
-
- return &ShareInfo{
- ShareId: shareId,
- }
-}
-
-func genInfoId(content string, key []byte) string {
- encrypter := encrypt.HmacEncryptor{Key: key}
- return encrypter.Encrypt([]byte(content))
-}
diff --git a/server/apis/upload_test.go b/server/apis/upload_test.go
deleted file mode 100644
index 2338439..0000000
--- a/server/apis/upload_test.go
+++ /dev/null
@@ -1,368 +0,0 @@
-package apis
-
-import (
- "fmt"
- "io"
- "os"
- "path/filepath"
- "strings"
- "testing"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/ihexxa/quickshare/server/libs/encrypt"
- "github.com/ihexxa/quickshare/server/libs/errutil"
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/httputil"
- "github.com/ihexxa/quickshare/server/libs/httpworker"
- "github.com/ihexxa/quickshare/server/libs/limiter"
- "github.com/ihexxa/quickshare/server/libs/logutil"
- "github.com/ihexxa/quickshare/server/libs/walls"
-)
-
-const testCap = 3
-
-func initServiceForUploadTest(config *cfg.Config, indexMap map[string]*fileidx.FileInfo) *SrvShare {
- logger := logutil.NewSlog(os.Stdout, config.AppName)
- setLog := func(srv *SrvShare) {
- srv.Log = logger
- }
-
- setWorkerPool := func(srv *SrvShare) {
- workerPoolSize := config.WorkerPoolSize
- taskQueueSize := config.TaskQueueSize
- srv.WorkerPool = httpworker.NewWorkerPool(workerPoolSize, taskQueueSize, logger)
- }
-
- setWalls := func(srv *SrvShare) {
- encrypterMaker := encrypt.JwtEncrypterMaker
- ipLimiter := limiter.NewRateLimiter(config.LimiterCap, config.LimiterTtl, config.LimiterCyc, config.BucketCap, map[int16]int16{})
- opLimiter := limiter.NewRateLimiter(config.LimiterCap, config.LimiterTtl, config.LimiterCyc, config.BucketCap, map[int16]int16{})
- srv.Walls = walls.NewAccessWalls(config, ipLimiter, opLimiter, encrypterMaker)
- }
-
- setIndex := func(srv *SrvShare) {
- srv.Index = fileidx.NewMemFileIndexWithMap(len(indexMap)+testCap, indexMap)
- }
-
- setFs := func(srv *SrvShare) {
- srv.Fs = &stubFsUtil{}
- }
-
- setEncryptor := func(srv *SrvShare) {
- srv.Encryptor = &encrypt.HmacEncryptor{Key: config.SecretKeyByte}
- }
-
- errChecker := errutil.NewErrChecker(!config.Production, logger)
- setErr := func(srv *SrvShare) {
- srv.Err = errChecker
- }
-
- return InitSrvShare(config, setIndex, setWalls, setWorkerPool, setFs, setEncryptor, setLog, setErr)
-}
-
-func TestStartUpload(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- FileName string
- }
- type Output struct {
- Response interface{}
- IndexMap map[string]*fileidx.FileInfo
- }
- type testCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []testCase{
- testCase{
- Desc: "invalid file name",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- FileName: "",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{},
- Response: httputil.Err400,
- },
- },
- testCase{
- Desc: "succeed to start uploading",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- FileName: "filename",
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- DefaultId: &fileidx.FileInfo{
- Id: DefaultId,
- DownLimit: conf.DownLimit,
- ModTime: time.Now().UnixNano(),
- PathLocal: "filename",
- Uploaded: 0,
- State: fileidx.StateUploading,
- },
- },
- Response: &ByteRange{
- ShareId: DefaultId,
- Start: 0,
- Length: conf.MaxUpBytesPerSec,
- },
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForUploadTest(conf, testCase.Init.IndexMap)
-
- // verify CreateFile
- expectCreateFileName = filepath.Join(conf.PathLocal, testCase.FileName)
-
- response := srv.startUpload(testCase.FileName)
-
- // verify index
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- t.Fatalf("startUpload: index not equal got: %v, %v, expect: %v", srv.Index.List(), response, testCase.Output.IndexMap)
- }
-
- // verify response
- switch expectRes := testCase.Output.Response.(type) {
- case *ByteRange:
- res := response.(*ByteRange)
- if res.ShareId != expectRes.ShareId ||
- res.Start != expectRes.Start ||
- res.Length != expectRes.Length {
- t.Fatalf(fmt.Sprintf("startUpload: res=%v expect=%v", res, expectRes))
- }
- case httputil.MsgRes:
- if response != expectRes {
- t.Fatalf(fmt.Sprintf("startUpload: response=%v expectRes=%v", response, expectRes))
- }
- default:
- t.Fatalf(fmt.Sprintf("startUpload: type not found: %T %T", testCase.Output.Response, httputil.Err400))
- }
- }
-}
-
-func TestUpload(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- ShareId string
- Start int64
- Len int64
- Chunk io.Reader
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response interface{}
- }
- type testCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []testCase{
- testCase{
- Desc: "shareid does not exist",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- ShareId: DefaultId,
- Start: 0,
- Len: 1,
- Chunk: strings.NewReader(""),
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{},
- Response: httputil.Err404,
- },
- },
- testCase{
- Desc: "succeed",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- DefaultId: &fileidx.FileInfo{
- Id: DefaultId,
- DownLimit: conf.MaxShares,
- PathLocal: "path/filename",
- State: fileidx.StateUploading,
- Uploaded: 0,
- },
- },
- },
- Input: Input{
- ShareId: DefaultId,
- Start: 0,
- Len: 1,
- Chunk: strings.NewReader("a"),
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- DefaultId: &fileidx.FileInfo{
- Id: DefaultId,
- DownLimit: conf.MaxShares,
- PathLocal: "path/filename",
- State: fileidx.StateUploading,
- Uploaded: 1,
- },
- },
- Response: &ByteRange{
- ShareId: DefaultId,
- Start: 1,
- Length: conf.MaxUpBytesPerSec,
- },
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForUploadTest(conf, testCase.Init.IndexMap)
-
- response := srv.upload(
- testCase.Input.ShareId,
- testCase.Input.Start,
- testCase.Input.Len,
- testCase.Input.Chunk,
- )
-
- // TODO: not verified copyChunk
-
- // verify index
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- t.Fatalf("upload: index not identical got: %v want: %v", srv.Index.List(), testCase.Output.IndexMap)
- }
- // verify response
- switch response.(type) {
- case *ByteRange:
- br := testCase.Output.Response.(*ByteRange)
- res := response.(*ByteRange)
- if res.ShareId != br.ShareId || res.Start != br.Start || res.Length != br.Length {
- t.Fatalf(fmt.Sprintf("upload: response=%v expectRes=%v", res, br))
- }
- default:
- if response != testCase.Output.Response {
- t.Fatalf(fmt.Sprintf("upload: response=%v expectRes=%v", response, testCase.Output.Response))
- }
- }
- }
-}
-
-func TestFinishUpload(t *testing.T) {
- conf := cfg.NewConfig()
- conf.Production = false
-
- type Init struct {
- IndexMap map[string]*fileidx.FileInfo
- }
- type Input struct {
- ShareId string
- Start int64
- Len int64
- Chunk io.Reader
- }
- type Output struct {
- IndexMap map[string]*fileidx.FileInfo
- Response interface{}
- }
- type testCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []testCase{
- testCase{
- Desc: "success",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{
- DefaultId: &fileidx.FileInfo{
- Id: DefaultId,
- DownLimit: conf.MaxShares,
- PathLocal: "path/filename",
- State: fileidx.StateUploading,
- Uploaded: 1,
- },
- },
- },
- Input: Input{
- ShareId: DefaultId,
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{
- DefaultId: &fileidx.FileInfo{
- Id: DefaultId,
- DownLimit: conf.MaxShares,
- PathLocal: "path/filename",
- State: fileidx.StateDone,
- Uploaded: 1,
- },
- },
- Response: &ShareInfo{
- ShareId: DefaultId,
- },
- },
- },
- testCase{
- Desc: "shareId exists",
- Init: Init{
- IndexMap: map[string]*fileidx.FileInfo{},
- },
- Input: Input{
- ShareId: DefaultId,
- },
- Output: Output{
- IndexMap: map[string]*fileidx.FileInfo{},
- Response: httputil.Err404,
- },
- },
- }
-
- for _, testCase := range testCases {
- srv := initServiceForUploadTest(conf, testCase.Init.IndexMap)
-
- response := srv.finishUpload(testCase.ShareId)
-
- if !sameMap(srv.Index.List(), testCase.Output.IndexMap) {
- t.Fatalf("finishUpload: index not identical got: %v, want: %v", srv.Index.List(), testCase.Output.IndexMap)
- }
-
- switch res := response.(type) {
- case httputil.MsgRes:
- expectRes := testCase.Output.Response.(httputil.MsgRes)
- if res != expectRes {
- t.Fatalf(fmt.Sprintf("finishUpload: response=%v expectRes=%v", res, expectRes))
- }
- case *ShareInfo:
- info, found := testCase.Output.IndexMap[res.ShareId]
- if !found || info.State != fileidx.StateDone {
- // TODO: should use isValidUrl or better to verify result
- t.Fatalf(fmt.Sprintf("finishUpload: share info is not correct: received: %v expect: %v", res.ShareId, testCase.ShareId))
- }
- default:
- t.Fatalf(fmt.Sprintf("finishUpload: type not found: %T %T", response, testCase.Output.Response))
- }
- }
-}
diff --git a/server/libs/cfg/cfg.go b/server/libs/cfg/cfg.go
deleted file mode 100644
index c1272de..0000000
--- a/server/libs/cfg/cfg.go
+++ /dev/null
@@ -1,251 +0,0 @@
-package cfg
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "io/ioutil"
- "net"
- "strconv"
- "strings"
-)
-
-type Config struct {
- AppName string
- AdminId string
- AdminPwd string
- SecretKey string
- SecretKeyByte []byte `json:",omitempty"`
- // server
- Production bool
- HostName string
- Port int
- // performance
- MaxUpBytesPerSec int64
- MaxDownBytesPerSec int64
- MaxRangeLength int64
- Timeout int // millisecond
- ReadTimeout int
- WriteTimeout int
- IdleTimeout int
- WorkerPoolSize int
- TaskQueueSize int
- QueueSize int
- ParseFormBufSize int64
- MaxHeaderBytes int
- DownLimit int
- MaxShares int
- LocalFileLimit int
- // Cookie
- CookieDomain string
- CookieHttpOnly bool
- CookieMaxAge int
- CookiePath string
- CookieSecure bool
- // keys
- KeyAdminId string
- KeyAdminPwd string
- KeyToken string
- KeyFileName string
- KeyFileSize string
- KeyShareId string
- KeyStart string
- KeyLen string
- KeyChunk string
- KeyAct string
- KeyExpires string
- KeyDownLimit string
- ActStartUpload string
- ActUpload string
- ActFinishUpload string
- ActLogin string
- ActLogout string
- ActShadowId string
- ActPublishId string
- ActSetDownLimit string
- ActAddLocalFiles string
- // resource id
- AllUsers string
- // opIds
- OpIdIpVisit int16
- OpIdUpload int16
- OpIdDownload int16
- OpIdLogin int16
- OpIdGetFInfo int16
- OpIdDelFInfo int16
- OpIdOpFInfo int16
- // local
- PathLocal string
- PathLogin string
- PathDownloadLogin string
- PathDownload string
- PathUpload string
- PathStartUpload string
- PathFinishUpload string
- PathFileInfo string
- PathClient string
- // rate Limiter
- LimiterCap int64
- LimiterTtl int32
- LimiterCyc int32
- BucketCap int16
- SpecialCapsStr map[string]int16
- SpecialCaps map[int16]int16
-}
-
-func NewConfig() *Config {
- config := &Config{
- // secrets
- AppName: "qs",
- AdminId: "admin",
- AdminPwd: "qs",
- SecretKey: "qs",
- SecretKeyByte: []byte("qs"),
- // server
- Production: true,
- HostName: "localhost",
- Port: 8888,
- // performance
- MaxUpBytesPerSec: 500 * 1000,
- MaxDownBytesPerSec: 500 * 1000,
- MaxRangeLength: 10 * 1024 * 1024,
- Timeout: 500, // millisecond,
- ReadTimeout: 500,
- WriteTimeout: 43200000,
- IdleTimeout: 10000,
- WorkerPoolSize: 2,
- TaskQueueSize: 2,
- QueueSize: 2,
- ParseFormBufSize: 600,
- MaxHeaderBytes: 1 << 15, // 32KB
- DownLimit: -1,
- MaxShares: 1 << 31,
- LocalFileLimit: -1,
- // Cookie
- CookieDomain: "",
- CookieHttpOnly: false,
- CookieMaxAge: 3600 * 24 * 30, // one week,
- CookiePath: "/",
- CookieSecure: false,
- // keys
- KeyAdminId: "adminid",
- KeyAdminPwd: "adminpwd",
- KeyToken: "token",
- KeyFileName: "fname",
- KeyFileSize: "size",
- KeyShareId: "shareid",
- KeyStart: "start",
- KeyLen: "len",
- KeyChunk: "chunk",
- KeyAct: "act",
- KeyExpires: "expires",
- KeyDownLimit: "downlimit",
- ActStartUpload: "startupload",
- ActUpload: "upload",
- ActFinishUpload: "finishupload",
- ActLogin: "login",
- ActLogout: "logout",
- ActShadowId: "shadowid",
- ActPublishId: "publishid",
- ActSetDownLimit: "setdownlimit",
- ActAddLocalFiles: "addlocalfiles",
- AllUsers: "allusers",
- // opIds
- OpIdIpVisit: 0,
- OpIdUpload: 1,
- OpIdDownload: 2,
- OpIdLogin: 3,
- OpIdGetFInfo: 4,
- OpIdDelFInfo: 5,
- OpIdOpFInfo: 6,
- // local
- PathLocal: "files",
- PathLogin: "/login",
- PathDownloadLogin: "/download-login",
- PathDownload: "/download",
- PathUpload: "/upload",
- PathStartUpload: "/startupload",
- PathFinishUpload: "/finishupload",
- PathFileInfo: "/fileinfo",
- PathClient: "/",
- // rate Limiter
- LimiterCap: 256, // how many op supported for each user
- LimiterTtl: 3600, // second
- LimiterCyc: 1, // second
- BucketCap: 3, // how many op can do per LimiterCyc sec
- SpecialCaps: map[int16]int16{
- 0: 5, // ip
- 1: 1, // upload
- 2: 1, // download
- 3: 1, // login
- },
- }
-
- return config
-}
-
-func NewConfigFrom(path string) *Config {
- configBytes, readErr := ioutil.ReadFile(path)
- if readErr != nil {
- panic(fmt.Sprintf("config file not found: %s", path))
- }
-
- config := &Config{}
- marshalErr := json.Unmarshal(configBytes, config)
-
- // TODO: look for a better solution
- config.SpecialCaps = make(map[int16]int16)
- for strKey, value := range config.SpecialCapsStr {
- key, parseKeyErr := strconv.ParseInt(strKey, 10, 16)
- if parseKeyErr != nil {
- panic("fail to parse SpecialCapsStr, its type should be map[int16]int16")
- }
- config.SpecialCaps[int16(key)] = value
- }
-
- if marshalErr != nil {
- panic("config file format is incorrect")
- }
-
- config.SecretKeyByte = []byte(config.SecretKey)
- if config.HostName == "" {
- hostName, err := GetLocalAddr()
- if err != nil {
- panic(err)
- }
- config.HostName = hostName.String()
- }
-
- return config
-}
-
-func GetLocalAddr() (net.IP, error) {
- fmt.Println(`config.HostName is empty(""), choose one IP for listening automatically.`)
- infs, err := net.Interfaces()
- if err != nil {
- panic("fail to get net interfaces")
- }
-
- for _, inf := range infs {
- if inf.Flags&4 != 4 && !strings.Contains(inf.Name, "docker") {
- addrs, err := inf.Addrs()
- if err != nil {
- panic("fail to get addrs of interface")
- }
- for _, addr := range addrs {
- switch v := addr.(type) {
- case *net.IPAddr:
- if !strings.Contains(v.IP.String(), ":") {
- return v.IP, nil
- }
- case *net.IPNet:
- if !strings.Contains(v.IP.String(), ":") {
- return v.IP, nil
- }
- }
- }
- }
- }
-
- return nil, errors.New("no addr found")
-}
diff --git a/server/libs/encrypt/encrypter_hmac.go b/server/libs/encrypt/encrypter_hmac.go
deleted file mode 100644
index fc2687d..0000000
--- a/server/libs/encrypt/encrypter_hmac.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package encrypt
-
-import (
- "crypto/hmac"
- "crypto/sha256"
- "encoding/hex"
-)
-
-type HmacEncryptor struct {
- Key []byte
-}
-
-func (encryptor *HmacEncryptor) Encrypt(content []byte) string {
- mac := hmac.New(sha256.New, encryptor.Key)
- mac.Write(content)
- return hex.EncodeToString(mac.Sum(nil))
-}
diff --git a/server/libs/encrypt/encryptor.go b/server/libs/encrypt/encryptor.go
deleted file mode 100644
index e7438c9..0000000
--- a/server/libs/encrypt/encryptor.go
+++ /dev/null
@@ -1,5 +0,0 @@
-package encrypt
-
-type Encryptor interface {
- Encrypt(content []byte) string
-}
diff --git a/server/libs/encrypt/jwt.go b/server/libs/encrypt/jwt.go
deleted file mode 100644
index e54b5cb..0000000
--- a/server/libs/encrypt/jwt.go
+++ /dev/null
@@ -1,53 +0,0 @@
-package encrypt
-
-import (
- "github.com/robbert229/jwt"
-)
-
-func JwtEncrypterMaker(secret string) TokenEncrypter {
- return &JwtEncrypter{
- alg: jwt.HmacSha256(secret),
- claims: jwt.NewClaim(),
- }
-}
-
-type JwtEncrypter struct {
- alg jwt.Algorithm
- claims *jwt.Claims
-}
-
-func (encrypter *JwtEncrypter) Add(key string, value string) bool {
- encrypter.claims.Set(key, value)
- return true
-}
-
-func (encrypter *JwtEncrypter) FromStr(token string) bool {
- claims, err := encrypter.alg.Decode(token)
- // TODO: should return error or error info will lost
- if err != nil {
- return false
- }
-
- encrypter.claims = claims
- return true
-}
-
-func (encrypter *JwtEncrypter) Get(key string) (string, bool) {
- iValue, err := encrypter.claims.Get(key)
- // TODO: should return error or error info will lost
- if err != nil {
- return "", false
- }
-
- return iValue.(string), true
-}
-
-func (encrypter *JwtEncrypter) ToStr() (string, bool) {
- token, err := encrypter.alg.Encode(encrypter.claims)
-
- // TODO: should return error or error info will lost
- if err != nil {
- return "", false
- }
- return token, true
-}
diff --git a/server/libs/encrypt/token_encrypter.go b/server/libs/encrypt/token_encrypter.go
deleted file mode 100644
index d400163..0000000
--- a/server/libs/encrypt/token_encrypter.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package encrypt
-
-type EncrypterMaker func(string) TokenEncrypter
-
-// TODO: name should be Encrypter?
-type TokenEncrypter interface {
- Add(string, string) bool
- FromStr(string) bool
- Get(string) (string, bool)
- ToStr() (string, bool)
-}
diff --git a/server/libs/errutil/ettutil.go b/server/libs/errutil/ettutil.go
deleted file mode 100644
index 8b53bd0..0000000
--- a/server/libs/errutil/ettutil.go
+++ /dev/null
@@ -1,59 +0,0 @@
-package errutil
-
-import (
- "os"
- "runtime/debug"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/logutil"
-)
-
-type ErrUtil interface {
- IsErr(err error) bool
- IsFatalErr(err error) bool
- RecoverPanic()
-}
-
-func NewErrChecker(logStack bool, logger logutil.LogUtil) ErrUtil {
- return &ErrChecker{logStack: logStack, log: logger}
-}
-
-type ErrChecker struct {
- log logutil.LogUtil
- logStack bool
-}
-
-// IsErr checks if error occurs
-func (e *ErrChecker) IsErr(err error) bool {
- if err != nil {
- e.log.Printf("Error:%q\n", err)
- if e.logStack {
- e.log.Println(debug.Stack())
- }
- return true
- }
- return false
-}
-
-// IsFatalPanic should be used with defer
-func (e *ErrChecker) IsFatalErr(fe error) bool {
- if fe != nil {
- e.log.Printf("Panic:%q", fe)
- if e.logStack {
- e.log.Println(debug.Stack())
- }
- os.Exit(1)
- }
- return false
-}
-
-// RecoverPanic catchs the panic and logs panic information
-func (e *ErrChecker) RecoverPanic() {
- if r := recover(); r != nil {
- e.log.Printf("Recovered:%v", r)
- if e.logStack {
- e.log.Println(debug.Stack())
- }
- }
-}
diff --git a/server/libs/fileidx/file_idx.go b/server/libs/fileidx/file_idx.go
deleted file mode 100644
index 3b96f34..0000000
--- a/server/libs/fileidx/file_idx.go
+++ /dev/null
@@ -1,177 +0,0 @@
-package fileidx
-
-import (
- "sync"
-)
-
-const (
- // StateStarted = after startUpload before upload
- StateStarted = "started"
- // StateUploading =after upload before finishUpload
- StateUploading = "uploading"
- // StateDone = after finishedUpload
- StateDone = "done"
-)
-
-type FileInfo struct {
- Id string
- DownLimit int
- ModTime int64
- PathLocal string
- State string
- Uploaded int64
-}
-
-type FileIndex interface {
- Add(fileInfo *FileInfo) int
- Del(id string)
- SetId(id string, newId string) bool
- SetDownLimit(id string, downLimit int) bool
- DecrDownLimit(id string) (int, bool)
- SetState(id string, state string) bool
- IncrUploaded(id string, uploaded int64) int64
- Get(id string) (*FileInfo, bool)
- List() map[string]*FileInfo
-}
-
-func NewMemFileIndex(cap int) *MemFileIndex {
- return &MemFileIndex{
- cap: cap,
- infos: make(map[string]*FileInfo, 0),
- }
-}
-
-func NewMemFileIndexWithMap(cap int, infos map[string]*FileInfo) *MemFileIndex {
- return &MemFileIndex{
- cap: cap,
- infos: infos,
- }
-}
-
-type MemFileIndex struct {
- cap int
- infos map[string]*FileInfo
- mux sync.RWMutex
-}
-
-func (idx *MemFileIndex) Add(fileInfo *FileInfo) int {
- idx.mux.Lock()
- defer idx.mux.Unlock()
-
- if len(idx.infos) >= idx.cap {
- return 1
- }
-
- if _, found := idx.infos[fileInfo.Id]; found {
- return -1
- }
-
- idx.infos[fileInfo.Id] = fileInfo
- return 0
-}
-
-func (idx *MemFileIndex) Del(id string) {
- idx.mux.Lock()
- defer idx.mux.Unlock()
-
- delete(idx.infos, id)
-}
-
-func (idx *MemFileIndex) SetId(id string, newId string) bool {
- if id == newId {
- return true
- }
-
- idx.mux.Lock()
- defer idx.mux.Unlock()
-
- info, found := idx.infos[id]
- if !found {
- return false
- }
-
- if _, foundNewId := idx.infos[newId]; foundNewId {
- return false
- }
-
- idx.infos[newId] = info
- idx.infos[newId].Id = newId
- delete(idx.infos, id)
- return true
-}
-
-func (idx *MemFileIndex) SetDownLimit(id string, downLimit int) bool {
- idx.mux.Lock()
- defer idx.mux.Unlock()
-
- info, found := idx.infos[id]
- if !found {
- return false
- }
-
- info.DownLimit = downLimit
- return true
-}
-
-func (idx *MemFileIndex) DecrDownLimit(id string) (int, bool) {
- idx.mux.Lock()
- defer idx.mux.Unlock()
-
- info, found := idx.infos[id]
- if !found || info.State != StateDone {
- return 0, false
- }
-
- if info.DownLimit == 0 {
- return 1, false
- }
-
- if info.DownLimit > 0 {
- // info.DownLimit means unlimited
- info.DownLimit = info.DownLimit - 1
- }
- return 1, true
-}
-
-func (idx *MemFileIndex) SetState(id string, state string) bool {
- idx.mux.Lock()
- defer idx.mux.Unlock()
-
- info, found := idx.infos[id]
- if !found {
- return false
- }
-
- info.State = state
- return true
-}
-
-func (idx *MemFileIndex) IncrUploaded(id string, uploaded int64) int64 {
- idx.mux.Lock()
- defer idx.mux.Unlock()
-
- info, found := idx.infos[id]
- if !found {
- return 0
- }
-
- info.Uploaded = info.Uploaded + uploaded
- return info.Uploaded
-}
-
-func (idx *MemFileIndex) Get(id string) (*FileInfo, bool) {
- idx.mux.RLock()
- defer idx.mux.RUnlock()
-
- infos, found := idx.infos[id]
- return infos, found
-}
-
-func (idx *MemFileIndex) List() map[string]*FileInfo {
- idx.mux.RLock()
- defer idx.mux.RUnlock()
-
- return idx.infos
-}
-
-// TODO: add unit tests
diff --git a/server/libs/fsutil/fsutil.go b/server/libs/fsutil/fsutil.go
deleted file mode 100644
index 6547cd1..0000000
--- a/server/libs/fsutil/fsutil.go
+++ /dev/null
@@ -1,118 +0,0 @@
-package fsutil
-
-import (
- "errors"
- "io"
- "os"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/errutil"
- "github.com/ihexxa/quickshare/server/libs/fileidx"
- "github.com/ihexxa/quickshare/server/libs/qtube"
-)
-
-type FsUtil interface {
- CreateFile(fullPath string) error
- CopyChunkN(fullPath string, chunk io.Reader, start int64, length int64) bool
- DelFile(fullPath string) bool
- Open(fullPath string) (qtube.ReadSeekCloser, error)
- MkdirAll(path string, mode os.FileMode) bool
- Readdir(dirName string, n int) ([]*fileidx.FileInfo, error)
-}
-
-func NewSimpleFs(errUtil errutil.ErrUtil) FsUtil {
- return &SimpleFs{
- Err: errUtil,
- }
-}
-
-type SimpleFs struct {
- Err errutil.ErrUtil
-}
-
-var (
- ErrExists = errors.New("file exists")
- ErrUnknown = errors.New("unknown error")
-)
-
-func (sfs *SimpleFs) CreateFile(fullPath string) error {
- flag := os.O_CREATE | os.O_EXCL | os.O_RDONLY
- perm := os.FileMode(0644)
- newFile, err := os.OpenFile(fullPath, flag, perm)
- defer newFile.Close()
-
- if err == nil {
- return nil
- } else if os.IsExist(err) {
- return ErrExists
- } else {
- return ErrUnknown
- }
-}
-
-func (sfs *SimpleFs) CopyChunkN(fullPath string, chunk io.Reader, start int64, length int64) bool {
- flag := os.O_WRONLY
- perm := os.FileMode(0644)
- file, openErr := os.OpenFile(fullPath, flag, perm)
-
- defer file.Close()
- if sfs.Err.IsErr(openErr) {
- return false
- }
-
- if _, err := file.Seek(start, io.SeekStart); sfs.Err.IsErr(err) {
- return false
- }
-
- if _, err := io.CopyN(file, chunk, length); sfs.Err.IsErr(err) && err != io.EOF {
- return false
- }
-
- return true
-}
-
-func (sfs *SimpleFs) DelFile(fullPath string) bool {
- return !sfs.Err.IsErr(os.Remove(fullPath))
-}
-
-func (sfs *SimpleFs) MkdirAll(path string, mode os.FileMode) bool {
- err := os.MkdirAll(path, mode)
- return !sfs.Err.IsErr(err)
-}
-
-// TODO: not support read from last seek position
-func (sfs *SimpleFs) Readdir(dirName string, n int) ([]*fileidx.FileInfo, error) {
- dir, openErr := os.Open(dirName)
- defer dir.Close()
-
- if sfs.Err.IsErr(openErr) {
- return []*fileidx.FileInfo{}, openErr
- }
-
- osFileInfos, readErr := dir.Readdir(n)
- if sfs.Err.IsErr(readErr) && readErr != io.EOF {
- return []*fileidx.FileInfo{}, readErr
- }
-
- fileInfos := make([]*fileidx.FileInfo, 0)
- for _, osFileInfo := range osFileInfos {
- if osFileInfo.Mode().IsRegular() {
- fileInfos = append(
- fileInfos,
- &fileidx.FileInfo{
- ModTime: osFileInfo.ModTime().UnixNano(),
- PathLocal: osFileInfo.Name(),
- Uploaded: osFileInfo.Size(),
- },
- )
- }
- }
-
- return fileInfos, readErr
-}
-
-// the associated file descriptor has mode O_RDONLY as using os.Open
-func (sfs *SimpleFs) Open(fullPath string) (qtube.ReadSeekCloser, error) {
- return os.Open(fullPath)
-}
diff --git a/server/libs/httputil/httputil.go b/server/libs/httputil/httputil.go
deleted file mode 100644
index 1dda6e3..0000000
--- a/server/libs/httputil/httputil.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package httputil
-
-import (
- "encoding/json"
- "net/http"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/errutil"
-)
-
-type MsgRes struct {
- Code int
- Msg string
-}
-
-var (
- Err400 = MsgRes{Code: http.StatusBadRequest, Msg: "Bad Request"}
- Err401 = MsgRes{Code: http.StatusUnauthorized, Msg: "Unauthorized"}
- Err404 = MsgRes{Code: http.StatusNotFound, Msg: "Not Found"}
- Err412 = MsgRes{Code: http.StatusPreconditionFailed, Msg: "Precondition Failed"}
- Err429 = MsgRes{Code: http.StatusTooManyRequests, Msg: "Too Many Requests"}
- Err500 = MsgRes{Code: http.StatusInternalServerError, Msg: "Internal Server Error"}
- Err503 = MsgRes{Code: http.StatusServiceUnavailable, Msg: "Service Unavailable"}
- Err504 = MsgRes{Code: http.StatusGatewayTimeout, Msg: "Gateway Timeout"}
- Ok200 = MsgRes{Code: http.StatusOK, Msg: "OK"}
-)
-
-type HttpUtil interface {
- GetCookie(cookies []*http.Cookie, key string) string
- SetCookie(res http.ResponseWriter, key string, val string)
- Fill(msg interface{}, res http.ResponseWriter) int
-}
-
-type QHttpUtil struct {
- CookieDomain string
- CookieHttpOnly bool
- CookieMaxAge int
- CookiePath string
- CookieSecure bool
- Err errutil.ErrUtil
-}
-
-func (q *QHttpUtil) GetCookie(cookies []*http.Cookie, key string) string {
- for _, cookie := range cookies {
- if cookie.Name == key {
- return cookie.Value
- }
- }
- return ""
-}
-
-func (q *QHttpUtil) SetCookie(res http.ResponseWriter, key string, val string) {
- cookie := http.Cookie{
- Name: key,
- Value: val,
- Domain: q.CookieDomain,
- Expires: time.Now().Add(time.Duration(q.CookieMaxAge) * time.Second),
- HttpOnly: q.CookieHttpOnly,
- MaxAge: q.CookieMaxAge,
- Secure: q.CookieSecure,
- Path: q.CookiePath,
- }
-
- res.Header().Set("Set-Cookie", cookie.String())
-}
-
-func (q *QHttpUtil) Fill(msg interface{}, res http.ResponseWriter) int {
- if msg == nil {
- return 0
- }
-
- msgBytes, marsErr := json.Marshal(msg)
- if q.Err.IsErr(marsErr) {
- return 0
- }
-
- wrote, writeErr := res.Write(msgBytes)
- if q.Err.IsErr(writeErr) {
- return 0
- }
- return wrote
-}
diff --git a/server/libs/httpworker/worker.go b/server/libs/httpworker/worker.go
deleted file mode 100644
index b578938..0000000
--- a/server/libs/httpworker/worker.go
+++ /dev/null
@@ -1,130 +0,0 @@
-package httpworker
-
-import (
- "errors"
- "net/http"
- "runtime/debug"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/logutil"
-)
-
-var (
- ErrWorkerNotFound = errors.New("worker not found")
- ErrTimeout = errors.New("timeout")
-)
-
-type DoFunc func(http.ResponseWriter, *http.Request)
-
-type Task struct {
- Ack chan error
- Do DoFunc
- Res http.ResponseWriter
- Req *http.Request
-}
-
-type Workers interface {
- Put(*Task) bool
- IsInTime(ack chan error, msec time.Duration) error
-}
-
-type WorkerPool struct {
- queue chan *Task
- size int
- workers []*Worker
- log logutil.LogUtil // TODO: should not pass log here
-}
-
-func NewWorkerPool(poolSize int, queueSize int, log logutil.LogUtil) Workers {
- queue := make(chan *Task, queueSize)
- workers := make([]*Worker, 0, poolSize)
-
- for i := 0; i < poolSize; i++ {
- worker := &Worker{
- Id: uint64(i),
- queue: queue,
- log: log,
- }
-
- go worker.Start()
- workers = append(workers, worker)
- }
-
- return &WorkerPool{
- queue: queue,
- size: poolSize,
- workers: workers,
- log: log,
- }
-}
-
-func (pool *WorkerPool) Put(task *Task) bool {
- if len(pool.queue) >= pool.size {
- return false
- }
-
- pool.queue <- task
- return true
-}
-
-func (pool *WorkerPool) IsInTime(ack chan error, msec time.Duration) error {
- start := time.Now().UnixNano()
- timeout := make(chan error)
-
- go func() {
- time.Sleep(msec)
- timeout <- ErrTimeout
- }()
-
- select {
- case err := <-ack:
- if err == nil {
- pool.log.Printf(
- "finish cost: %d usec",
- (time.Now().UnixNano()-start)/1000,
- )
- } else {
- pool.log.Printf(
- "finish with error cost: %d usec",
- (time.Now().UnixNano()-start)/1000,
- )
- }
- return err
- case errTimeout := <-timeout:
- pool.log.Printf("timeout cost: %d usec", (time.Now().UnixNano()-start)/1000)
- return errTimeout
- }
-}
-
-type Worker struct {
- Id uint64
- queue chan *Task
- log logutil.LogUtil
-}
-
-func (worker *Worker) RecoverPanic() {
- if r := recover(); r != nil {
- worker.log.Printf("Recovered:%v stack: %v", r, debug.Stack())
- // restart worker and IsInTime will return timeout error for last task
- worker.Start()
- }
-}
-
-func (worker *Worker) Start() {
- defer worker.RecoverPanic()
-
- for {
- task := <-worker.queue
- if task.Do != nil {
- task.Do(task.Res, task.Req)
- task.Ack <- nil
- } else {
- task.Ack <- ErrWorkerNotFound
- }
- }
-}
-
-// ServiceFunc lets you return struct directly
-type ServiceFunc func(http.ResponseWriter, *http.Request) interface{}
diff --git a/server/libs/limiter/limiter.go b/server/libs/limiter/limiter.go
deleted file mode 100644
index 307037d..0000000
--- a/server/libs/limiter/limiter.go
+++ /dev/null
@@ -1,5 +0,0 @@
-package limiter
-
-type Limiter interface {
- Access(string, int16) bool
-}
diff --git a/server/libs/limiter/rate_limiter.go b/server/libs/limiter/rate_limiter.go
deleted file mode 100644
index 7bccf1a..0000000
--- a/server/libs/limiter/rate_limiter.go
+++ /dev/null
@@ -1,220 +0,0 @@
-package limiter
-
-import (
- "sync"
- "time"
-)
-
-func now() int32 {
- return int32(time.Now().Unix())
-}
-
-func afterCyc(cyc int32) int32 {
- return int32(time.Now().Unix()) + cyc
-}
-
-func afterTtl(ttl int32) int32 {
- return int32(time.Now().Unix()) + ttl
-}
-
-type Bucket struct {
- Refresh int32
- Tokens int16
-}
-
-func NewBucket(cyc int32, cap int16) *Bucket {
- return &Bucket{
- Refresh: afterCyc(cyc),
- Tokens: cap,
- }
-}
-
-type Item struct {
- Expired int32
- Buckets map[int16]*Bucket
-}
-
-func NewItem(ttl int32) *Item {
- return &Item{
- Expired: afterTtl(ttl),
- Buckets: make(map[int16]*Bucket),
- }
-}
-
-type RateLimiter struct {
- items map[string]*Item
- bucketCap int16
- customCaps map[int16]int16
- cap int64
- cyc int32 // how much time, item autoclean will be executed, bucket will be refreshed
- ttl int32 // how much time, item will be expired(but not cleaned)
- mux sync.RWMutex
- snapshot map[string]map[int16]*Bucket
-}
-
-func NewRateLimiter(cap int64, ttl int32, cyc int32, bucketCap int16, customCaps map[int16]int16) Limiter {
- if cap < 1 || ttl < 1 || cyc < 1 || bucketCap < 1 {
- panic("cap | bucketCap | ttl | cycle cant be less than 1")
- }
-
- limiter := &RateLimiter{
- items: make(map[string]*Item, cap),
- bucketCap: bucketCap,
- customCaps: customCaps,
- cap: cap,
- ttl: ttl,
- cyc: cyc,
- }
-
- go limiter.autoClean()
-
- return limiter
-}
-
-func (limiter *RateLimiter) getBucketCap(opId int16) int16 {
- bucketCap, existed := limiter.customCaps[opId]
- if !existed {
- return limiter.bucketCap
- }
- return bucketCap
-}
-
-func (limiter *RateLimiter) Access(itemId string, opId int16) bool {
- limiter.mux.Lock()
- defer limiter.mux.Unlock()
-
- item, itemExisted := limiter.items[itemId]
- if !itemExisted {
- if int64(len(limiter.items)) >= limiter.cap {
- return false
- }
-
- limiter.items[itemId] = NewItem(limiter.ttl)
- limiter.items[itemId].Buckets[opId] = NewBucket(limiter.cyc, limiter.getBucketCap(opId)-1)
- return true
- }
-
- bucket, bucketExisted := item.Buckets[opId]
- if !bucketExisted {
- item.Buckets[opId] = NewBucket(limiter.cyc, limiter.getBucketCap(opId)-1)
- return true
- }
-
- if bucket.Refresh > now() {
- if bucket.Tokens > 0 {
- bucket.Tokens--
- return true
- }
- return false
- }
-
- bucket.Refresh = afterCyc(limiter.cyc)
- bucket.Tokens = limiter.getBucketCap(opId) - 1
- return true
-}
-
-func (limiter *RateLimiter) GetCap() int64 {
- return limiter.cap
-}
-
-func (limiter *RateLimiter) GetSize() int64 {
- limiter.mux.RLock()
- defer limiter.mux.RUnlock()
- return int64(len(limiter.items))
-}
-
-func (limiter *RateLimiter) ExpandCap(cap int64) bool {
- limiter.mux.RLock()
- defer limiter.mux.RUnlock()
-
- if cap <= int64(len(limiter.items)) {
- return false
- }
-
- limiter.cap = cap
- return true
-}
-
-func (limiter *RateLimiter) GetTTL() int32 {
- return limiter.ttl
-}
-
-func (limiter *RateLimiter) UpdateTTL(ttl int32) bool {
- if ttl < 1 {
- return false
- }
-
- limiter.ttl = ttl
- return true
-}
-
-func (limiter *RateLimiter) GetCyc() int32 {
- return limiter.cyc
-}
-
-func (limiter *RateLimiter) UpdateCyc(cyc int32) bool {
- if limiter.cyc < 1 {
- return false
- }
-
- limiter.cyc = cyc
- return true
-}
-
-func (limiter *RateLimiter) Snapshot() map[string]map[int16]*Bucket {
- return limiter.snapshot
-}
-
-func (limiter *RateLimiter) autoClean() {
- for {
- if limiter.cyc == 0 {
- break
- }
- time.Sleep(time.Duration(int64(limiter.cyc) * 1000000000))
- limiter.clean()
- }
-}
-
-// clean may add affect other operations, do frequently?
-func (limiter *RateLimiter) clean() {
- limiter.snapshot = make(map[string]map[int16]*Bucket)
- now := now()
-
- limiter.mux.RLock()
- defer limiter.mux.RUnlock()
- for key, item := range limiter.items {
- if item.Expired <= now {
- delete(limiter.items, key)
- } else {
- limiter.snapshot[key] = item.Buckets
- }
- }
-}
-
-// Only for test
-func (limiter *RateLimiter) exist(id string) bool {
- limiter.mux.RLock()
- defer limiter.mux.RUnlock()
-
- _, existed := limiter.items[id]
- return existed
-}
-
-// Only for test
-func (limiter *RateLimiter) truncate() {
- limiter.mux.RLock()
- defer limiter.mux.RUnlock()
-
- for key, _ := range limiter.items {
- delete(limiter.items, key)
- }
-}
-
-// Only for test
-func (limiter *RateLimiter) get(id string) (*Item, bool) {
- limiter.mux.RLock()
- defer limiter.mux.RUnlock()
-
- item, existed := limiter.items[id]
- return item, existed
-}
diff --git a/server/libs/limiter/rate_limiter_test.go b/server/libs/limiter/rate_limiter_test.go
deleted file mode 100644
index 3f90dcf..0000000
--- a/server/libs/limiter/rate_limiter_test.go
+++ /dev/null
@@ -1,161 +0,0 @@
-package limiter
-
-import (
- "fmt"
- "math/rand"
- "testing"
- "time"
-)
-
-var rnd = rand.New(rand.NewSource(time.Now().UnixNano()))
-
-const rndCap = 10000
-const addCap = 1
-
-// how to set time
-// extend: wait can be greater than ttl/2
-// cyc is smaller than ttl and wait, then it can be clean in time
-const cap = 40
-const ttl = 3
-const cyc = 1
-const bucketCap = 2
-const id1 = "id1"
-const id2 = "id2"
-const op1 int16 = 0
-const op2 int16 = 1
-
-var customCaps = map[int16]int16{
- op2: 1000,
-}
-
-const wait = 1
-
-var limiter = NewRateLimiter(cap, ttl, cyc, bucketCap, customCaps).(*RateLimiter)
-
-func printItem(id string) {
- item, existed := limiter.get(id1)
- if existed {
- fmt.Println("expired, now, existed", item.Expired, now(), existed)
- for id, bucket := range item.Buckets {
- fmt.Println("\tid, bucket", id, bucket)
- }
- } else {
- fmt.Println("not existed")
- }
-}
-
-var idSeed = 0
-
-func randId() string {
- idSeed++
- return fmt.Sprintf("%d", idSeed)
-}
-
-func TestAccess(t *testing.T) {
- func(t *testing.T) {
- canAccess := limiter.Access(id1, op1)
- if !canAccess {
- t.Fatal("access: fail")
- }
-
- for i := 0; i < bucketCap; i++ {
- canAccess = limiter.Access(id1, op1)
- }
-
- if canAccess {
- t.Fatal("access: fail to deny access")
- }
-
- time.Sleep(time.Duration(limiter.GetCyc()) * time.Second)
-
- canAccess = limiter.Access(id1, op1)
- if !canAccess {
- t.Fatal("access: fail to refresh tokens")
- }
- }(t)
-}
-
-func TestCap(t *testing.T) {
- originalCap := limiter.GetCap()
- fmt.Printf("cap:info: %d\n", originalCap)
-
- ok := limiter.ExpandCap(originalCap + addCap)
-
- if !ok || limiter.GetCap() != originalCap+addCap {
- t.Fatal("cap: fail to expand")
- }
-
- ok = limiter.ExpandCap(limiter.GetSize() - addCap)
- if ok {
- t.Fatal("cap: shrink cap")
- }
-
- ids := []string{}
- for limiter.GetSize() < limiter.GetCap() {
- id := randId()
- ids = append(ids, id)
-
- ok := limiter.Access(id, 0)
- if !ok {
- t.Fatal("cap: not full")
- }
- }
-
- if limiter.GetSize() != limiter.GetCap() {
- t.Fatal("cap: incorrect size")
- }
-
- if limiter.Access(randId(), 0) {
- t.Fatal("cap: more than cap")
- }
-
- limiter.truncate()
-}
-
-func TestTtl(t *testing.T) {
- var addTtl int32 = 1
- originalTTL := limiter.GetTTL()
- fmt.Printf("ttl:info: %d\n", originalTTL)
-
- limiter.UpdateTTL(originalTTL + addTtl)
- if limiter.GetTTL() != originalTTL+addTtl {
- t.Fatal("ttl: update fail")
- }
-}
-
-func cycTest(t *testing.T) {
- var addCyc int32 = 1
- originalCyc := limiter.GetCyc()
- fmt.Printf("cyc:info: %d\n", originalCyc)
-
- limiter.UpdateCyc(originalCyc + addCyc)
- if limiter.GetCyc() != originalCyc+addCyc {
- t.Fatal("cyc: update fail")
- }
-}
-
-func autoCleanTest(t *testing.T) {
- ids := []string{
- randId(),
- randId(),
- }
-
- for _, id := range ids {
- ok := limiter.Access(id, 0)
- if ok {
- t.Fatal("autoClean: warning: add fail")
- }
- }
-
- time.Sleep(time.Duration(limiter.GetTTL()+wait) * time.Second)
-
- for _, id := range ids {
- _, exist := limiter.get(id)
- if exist {
- t.Fatal("autoClean: item still exist")
- }
- }
-}
-
-// func snapshotTest(t *testing.T) {
-// }
diff --git a/server/libs/logutil/logutil.go b/server/libs/logutil/logutil.go
deleted file mode 100644
index 15b2f6a..0000000
--- a/server/libs/logutil/logutil.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package logutil
-
-type LogUtil interface {
- Print(v ...interface{})
- Printf(format string, v ...interface{})
- Println(v ...interface{})
-}
diff --git a/server/libs/logutil/slogger.go b/server/libs/logutil/slogger.go
deleted file mode 100644
index f0cd3f6..0000000
--- a/server/libs/logutil/slogger.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package logutil
-
-import (
- "io"
- "log"
-)
-
-func NewSlog(out io.Writer, prefix string) LogUtil {
- return log.New(out, prefix, log.Ldate|log.Ltime|log.Lshortfile)
-}
-
-type Slog *log.Logger
diff --git a/server/libs/qtube/downloader.go b/server/libs/qtube/downloader.go
deleted file mode 100644
index 9f8e0dd..0000000
--- a/server/libs/qtube/downloader.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package qtube
-
-import (
- "net/http"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/fileidx"
-)
-
-type Downloader interface {
- ServeFile(res http.ResponseWriter, req *http.Request, fileInfo *fileidx.FileInfo) error
-}
diff --git a/server/libs/qtube/qtube.go b/server/libs/qtube/qtube.go
deleted file mode 100644
index e15f39a..0000000
--- a/server/libs/qtube/qtube.go
+++ /dev/null
@@ -1,280 +0,0 @@
-package qtube
-
-import (
- "errors"
- "fmt"
- "io"
- "net/http"
- "path/filepath"
- "strconv"
- "strings"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/fileidx"
-)
-
-var (
- ErrCopy = errors.New("ServeFile: copy error")
- ErrUnknown = errors.New("ServeFile: unknown error")
-)
-
-type httpRange struct {
- start, length int64
-}
-
-func (ra *httpRange) GetStart() int64 {
- return ra.start
-}
-func (ra *httpRange) GetLength() int64 {
- return ra.length
-}
-func (ra *httpRange) SetStart(start int64) {
- ra.start = start
-}
-func (ra *httpRange) SetLength(length int64) {
- ra.length = length
-}
-
-func NewQTube(root string, copySpeed, maxRangeLen int64, filer FileReadSeekCloser) Downloader {
- return &QTube{
- Root: root,
- BytesPerSec: copySpeed,
- MaxRangeLen: maxRangeLen,
- Filer: filer,
- }
-}
-
-type QTube struct {
- Root string
- BytesPerSec int64
- MaxRangeLen int64
- Filer FileReadSeekCloser
-}
-
-type FileReadSeekCloser interface {
- Open(filePath string) (ReadSeekCloser, error)
-}
-
-type ReadSeekCloser interface {
- io.Reader
- io.Seeker
- io.Closer
-}
-
-const (
- ErrorInvalidRange = "ServeFile: invalid Range"
- ErrorInvalidSize = "ServeFile: invalid Range total size"
-)
-
-func (tb *QTube) ServeFile(res http.ResponseWriter, req *http.Request, fileInfo *fileidx.FileInfo) error {
- headerRange := req.Header.Get("Range")
-
- switch {
- case req.Method == http.MethodHead:
- res.Header().Set("Accept-Ranges", "bytes")
- res.Header().Set("Content-Length", fmt.Sprintf("%d", fileInfo.Uploaded))
- res.Header().Set("Content-Type", "application/octet-stream")
- res.WriteHeader(http.StatusOK)
-
- return nil
- case headerRange == "":
- return tb.serveAll(res, fileInfo)
- default:
- return tb.serveRanges(res, headerRange, fileInfo)
- }
-}
-
-func (tb *QTube) serveAll(res http.ResponseWriter, fileInfo *fileidx.FileInfo) error {
- res.Header().Set("Accept-Ranges", "bytes")
- res.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, filepath.Base(fileInfo.PathLocal)))
- res.Header().Set("Content-Length", fmt.Sprintf("%d", fileInfo.Uploaded))
- res.Header().Set("Content-Type", "application/octet-stream")
- res.Header().Set("Last-Modified", time.Unix(fileInfo.ModTime, 0).UTC().Format(http.TimeFormat))
- res.WriteHeader(http.StatusOK)
-
- // TODO: need verify path
- file, openErr := tb.Filer.Open(filepath.Join(tb.Root, fileInfo.PathLocal))
- defer file.Close()
- if openErr != nil {
- return openErr
- }
-
- copyErr := tb.throttledCopyN(res, file, fileInfo.Uploaded)
- if copyErr != nil && copyErr != io.EOF {
- return copyErr
- }
-
- return nil
-}
-
-func (tb *QTube) serveRanges(res http.ResponseWriter, headerRange string, fileInfo *fileidx.FileInfo) error {
- ranges, rangeErr := getRanges(headerRange, fileInfo.Uploaded)
- if rangeErr != nil {
- http.Error(res, rangeErr.Error(), http.StatusRequestedRangeNotSatisfiable)
- return errors.New(rangeErr.Error())
- }
-
- switch {
- case len(ranges) == 1 || len(ranges) > 1:
- if tb.copyRange(res, ranges[0], fileInfo) != nil {
- return ErrCopy
- }
- default:
- // TODO: add support for multiple ranges
- return ErrUnknown
- }
-
- return nil
-}
-
-func getRanges(headerRange string, size int64) ([]httpRange, error) {
- ranges, raParseErr := parseRange(headerRange, size)
- // TODO: check max number of ranges, range start end
- if len(ranges) <= 0 || raParseErr != nil {
- return nil, errors.New(ErrorInvalidRange)
- }
- if sumRangesSize(ranges) > size {
- return nil, errors.New(ErrorInvalidSize)
- }
-
- return ranges, nil
-}
-
-func (tb *QTube) copyRange(res http.ResponseWriter, ra httpRange, fileInfo *fileidx.FileInfo) error {
- // TODO: comfirm this wont cause problem
- if ra.GetLength() > tb.MaxRangeLen {
- ra.SetLength(tb.MaxRangeLen)
- }
-
- // TODO: add headers(ETag): https://tools.ietf.org/html/rfc7233#section-4.1 p11 2nd paragraph
- res.Header().Set("Accept-Ranges", "bytes")
- res.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, filepath.Base(fileInfo.PathLocal)))
- res.Header().Set("Content-Type", "application/octet-stream")
- res.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", ra.start, ra.start+ra.length-1, fileInfo.Uploaded))
- res.Header().Set("Content-Length", strconv.FormatInt(ra.GetLength(), 10))
- res.Header().Set("Last-Modified", time.Unix(fileInfo.ModTime, 0).UTC().Format(http.TimeFormat))
- res.WriteHeader(http.StatusPartialContent)
-
- // TODO: need verify path
- file, openErr := tb.Filer.Open(filepath.Join(tb.Root, fileInfo.PathLocal))
- defer file.Close()
- if openErr != nil {
- return openErr
- }
-
- if _, seekErr := file.Seek(ra.start, io.SeekStart); seekErr != nil {
- return seekErr
- }
-
- copyErr := tb.throttledCopyN(res, file, ra.length)
- if copyErr != nil && copyErr != io.EOF {
- return copyErr
- }
-
- return nil
-}
-
-func (tb *QTube) throttledCopyN(dst io.Writer, src io.Reader, length int64) error {
- sum := int64(0)
- timeSlot := time.Duration(1 * time.Second)
-
- for sum < length {
- start := time.Now()
- chunkSize := length - sum
- if length-sum > tb.BytesPerSec {
- chunkSize = tb.BytesPerSec
- }
-
- copied, err := io.CopyN(dst, src, chunkSize)
- if err != nil {
- return err
- }
-
- sum += copied
- end := time.Now()
- if end.Before(start.Add(timeSlot)) {
- time.Sleep(start.Add(timeSlot).Sub(end))
- }
- }
-
- return nil
-}
-
-func parseRange(headerRange string, size int64) ([]httpRange, error) {
- if headerRange == "" {
- return nil, nil // header not present
- }
-
- const keyByte = "bytes="
- if !strings.HasPrefix(headerRange, keyByte) {
- return nil, errors.New("byte= not found")
- }
-
- var ranges []httpRange
- noOverlap := false
- for _, ra := range strings.Split(headerRange[len(keyByte):], ",") {
- ra = strings.TrimSpace(ra)
- if ra == "" {
- continue
- }
-
- i := strings.Index(ra, "-")
- if i < 0 {
- return nil, errors.New("- not found")
- }
-
- start, end := strings.TrimSpace(ra[:i]), strings.TrimSpace(ra[i+1:])
- var r httpRange
- if start == "" {
- i, err := strconv.ParseInt(end, 10, 64)
- if err != nil {
- return nil, errors.New("invalid range")
- }
- if i > size {
- i = size
- }
- r.start = size - i
- r.length = size - r.start
- } else {
- i, err := strconv.ParseInt(start, 10, 64)
- if err != nil || i < 0 {
- return nil, errors.New("invalid range")
- }
- if i >= size {
- // If the range begins after the size of the content,
- // then it does not overlap.
- noOverlap = true
- continue
- }
- r.start = i
- if end == "" {
- // If no end is specified, range extends to end of the file.
- r.length = size - r.start
- } else {
- i, err := strconv.ParseInt(end, 10, 64)
- if err != nil || r.start > i {
- return nil, errors.New("invalid range")
- }
- if i >= size {
- i = size - 1
- }
- r.length = i - r.start + 1
- }
- }
- ranges = append(ranges, r)
- }
- if noOverlap && len(ranges) == 0 {
- // The specified ranges did not overlap with the content.
- return nil, errors.New("parseRanges: no overlap")
- }
- return ranges, nil
-}
-
-func sumRangesSize(ranges []httpRange) (size int64) {
- for _, ra := range ranges {
- size += ra.length
- }
- return
-}
diff --git a/server/libs/qtube/qtube_test.go b/server/libs/qtube/qtube_test.go
deleted file mode 100644
index e980e8e..0000000
--- a/server/libs/qtube/qtube_test.go
+++ /dev/null
@@ -1,354 +0,0 @@
-package qtube
-
-import (
- "bytes"
- "fmt"
- "net/http"
- "strings"
- "testing"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/fileidx"
-)
-
-// Range format examples:
-// Range: =-
-// Range: =-
-// Range: =-, -
-// Range: =-, -, -
-func TestGetRanges(t *testing.T) {
- type Input struct {
- HeaderRange string
- Size int64
- }
- type Output struct {
- Ranges []httpRange
- ErrorMsg string
- }
- type testCase struct {
- Desc string
- Input
- Output
- }
-
- testCases := []testCase{
- testCase{
- Desc: "invalid range",
- Input: Input{
- HeaderRange: "bytes=start-invalid end",
- Size: 0,
- },
- Output: Output{
- ErrorMsg: ErrorInvalidRange,
- },
- },
- testCase{
- Desc: "invalid range total size",
- Input: Input{
- HeaderRange: "bytes=0-1, 2-3, 0-1, 0-2",
- Size: 3,
- },
- Output: Output{
- ErrorMsg: ErrorInvalidSize,
- },
- },
- testCase{
- Desc: "range ok",
- Input: Input{
- HeaderRange: "bytes=0-1, 2-3",
- Size: 4,
- },
- Output: Output{
- Ranges: []httpRange{
- httpRange{start: 0, length: 2},
- httpRange{start: 2, length: 2},
- },
- ErrorMsg: "",
- },
- },
- }
-
- for _, tCase := range testCases {
- ranges, err := getRanges(tCase.HeaderRange, tCase.Size)
- if err != nil {
- if err.Error() != tCase.ErrorMsg || len(tCase.Ranges) != 0 {
- t.Fatalf("getRanges: incorrect errorMsg want: %v got: %v", tCase.ErrorMsg, err.Error())
- } else {
- continue
- }
- } else {
- for id, ra := range ranges {
- if ra.GetStart() != tCase.Ranges[id].GetStart() {
- t.Fatalf("getRanges: incorrect range start, got: %v want: %v", ra.GetStart(), tCase.Ranges[id])
- }
- if ra.GetLength() != tCase.Ranges[id].GetLength() {
- t.Fatalf("getRanges: incorrect range length, got: %v want: %v", ra.GetLength(), tCase.Ranges[id])
- }
- }
- }
- }
-}
-
-func TestThrottledCopyN(t *testing.T) {
- type Init struct {
- BytesPerSec int64
- MaxRangeLen int64
- }
- type Input struct {
- Src string
- Length int64
- }
- // after starting throttledCopyN by DstAtTime.AtMs millisecond,
- // copied valueshould equal to DstAtTime.Dst.
- type DstAtTime struct {
- AtMS int
- Dst string
- }
- type Output struct {
- ExpectDsts []DstAtTime
- }
- type testCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- verifyDsts := func(dst *bytes.Buffer, expectDsts []DstAtTime) {
- for _, expectDst := range expectDsts {
- // fmt.Printf("sleep: %d\n", time.Now().UnixNano())
- time.Sleep(time.Duration(expectDst.AtMS) * time.Millisecond)
- dstStr := string(dst.Bytes())
- // fmt.Printf("check: %d\n", time.Now().UnixNano())
- if dstStr != expectDst.Dst {
- panic(
- fmt.Sprintf(
- "throttledCopyN want: <%s> | got: <%s> | at: %d",
- expectDst.Dst,
- dstStr,
- expectDst.AtMS,
- ),
- )
- }
- }
- }
-
- testCases := []testCase{
- testCase{
- Desc: "4 byte per sec",
- Init: Init{
- BytesPerSec: 5,
- MaxRangeLen: 10,
- },
- Input: Input{
- Src: "aaaa_aaaa_",
- Length: 10,
- },
- Output: Output{
- ExpectDsts: []DstAtTime{
- DstAtTime{AtMS: 200, Dst: "aaaa_"},
- DstAtTime{AtMS: 200, Dst: "aaaa_"},
- DstAtTime{AtMS: 200, Dst: "aaaa_"},
- DstAtTime{AtMS: 600, Dst: "aaaa_aaaa_"},
- DstAtTime{AtMS: 200, Dst: "aaaa_aaaa_"},
- DstAtTime{AtMS: 200, Dst: "aaaa_aaaa_"},
- },
- },
- },
- }
-
- for _, tCase := range testCases {
- tb := NewQTube("", tCase.BytesPerSec, tCase.MaxRangeLen, &stubFiler{}).(*QTube)
- dst := bytes.NewBuffer(make([]byte, len(tCase.Src)))
- dst.Reset()
-
- go verifyDsts(dst, tCase.ExpectDsts)
- tb.throttledCopyN(dst, strings.NewReader(tCase.Src), tCase.Length)
- }
-}
-
-// TODO: using same stub with testhelper
-type stubWriter struct {
- Headers http.Header
- Response []byte
- StatusCode int
-}
-
-func (w *stubWriter) Header() http.Header {
- return w.Headers
-}
-
-func (w *stubWriter) Write(body []byte) (int, error) {
- w.Response = append(w.Response, body...)
- return len(body), nil
-}
-
-func (w *stubWriter) WriteHeader(statusCode int) {
- w.StatusCode = statusCode
-}
-
-func TestCopyRange(t *testing.T) {
- type Init struct {
- Content string
- }
- type Input struct {
- Range httpRange
- Info fileidx.FileInfo
- }
- type Output struct {
- StatusCode int
- Headers map[string][]string
- Body string
- }
- type testCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []testCase{
- testCase{
- Desc: "copy ok",
- Init: Init{
- Content: "abcd_abcd_",
- },
- Input: Input{
- Range: httpRange{
- start: 6,
- length: 3,
- },
- Info: fileidx.FileInfo{
- ModTime: 0,
- Uploaded: 10,
- PathLocal: "filename.jpg",
- },
- },
- Output: Output{
- StatusCode: 206,
- Headers: map[string][]string{
- "Accept-Ranges": []string{"bytes"},
- "Content-Disposition": []string{`attachment; filename="filename.jpg"`},
- "Content-Type": []string{"application/octet-stream"},
- "Content-Range": []string{"bytes 6-8/10"},
- "Content-Length": []string{"3"},
- "Last-Modified": []string{time.Unix(0, 0).UTC().Format(http.TimeFormat)},
- },
- Body: "abc",
- },
- },
- }
-
- for _, tCase := range testCases {
- filer := &stubFiler{
- &StubFile{
- Content: tCase.Content,
- Offset: 0,
- },
- }
- tb := NewQTube("", 100, 100, filer).(*QTube)
- res := &stubWriter{
- Headers: make(map[string][]string),
- Response: make([]byte, 0),
- }
- err := tb.copyRange(res, tCase.Range, &tCase.Info)
- if err != nil {
- t.Fatalf("copyRange: %v", err)
- }
- if res.StatusCode != tCase.Output.StatusCode {
- t.Fatalf("copyRange: statusCode not match got: %v want: %v", res.StatusCode, tCase.Output.StatusCode)
- }
- if string(res.Response) != tCase.Output.Body {
- t.Fatalf("copyRange: body not match \ngot: %v \nwant: %v", string(res.Response), tCase.Output.Body)
- }
- for key, vals := range tCase.Output.Headers {
- if res.Header().Get(key) != vals[0] {
- t.Fatalf("copyRange: header not match %v got: %v want: %v", key, res.Header().Get(key), vals[0])
- }
- }
- if res.StatusCode != tCase.Output.StatusCode {
- t.Fatalf("copyRange: statusCodes are not match got: %v want: %v", res.StatusCode, tCase.Output.StatusCode)
- }
- }
-}
-
-func TestServeAll(t *testing.T) {
- type Init struct {
- Content string
- }
- type Input struct {
- Info fileidx.FileInfo
- }
- type Output struct {
- StatusCode int
- Headers map[string][]string
- Body string
- }
- type testCase struct {
- Desc string
- Init
- Input
- Output
- }
-
- testCases := []testCase{
- testCase{
- Desc: "copy ok",
- Init: Init{
- Content: "abcd_abcd_",
- },
- Input: Input{
- Info: fileidx.FileInfo{
- ModTime: 0,
- Uploaded: 10,
- PathLocal: "filename.jpg",
- },
- },
- Output: Output{
- StatusCode: 200,
- Headers: map[string][]string{
- "Accept-Ranges": []string{"bytes"},
- "Content-Disposition": []string{`attachment; filename="filename.jpg"`},
- "Content-Type": []string{"application/octet-stream"},
- "Content-Length": []string{"10"},
- "Last-Modified": []string{time.Unix(0, 0).UTC().Format(http.TimeFormat)},
- },
- Body: "abcd_abcd_",
- },
- },
- }
-
- for _, tCase := range testCases {
- filer := &stubFiler{
- &StubFile{
- Content: tCase.Content,
- Offset: 0,
- },
- }
- tb := NewQTube("", 100, 100, filer).(*QTube)
- res := &stubWriter{
- Headers: make(map[string][]string),
- Response: make([]byte, 0),
- }
- err := tb.serveAll(res, &tCase.Info)
- if err != nil {
- t.Fatalf("serveAll: %v", err)
- }
- if res.StatusCode != tCase.Output.StatusCode {
- t.Fatalf("serveAll: statusCode not match got: %v want: %v", res.StatusCode, tCase.Output.StatusCode)
- }
- if string(res.Response) != tCase.Output.Body {
- t.Fatalf("serveAll: body not match \ngot: %v \nwant: %v", string(res.Response), tCase.Output.Body)
- }
- for key, vals := range tCase.Output.Headers {
- if res.Header().Get(key) != vals[0] {
- t.Fatalf("serveAll: header not match %v got: %v want: %v", key, res.Header().Get(key), vals[0])
- }
- }
- if res.StatusCode != tCase.Output.StatusCode {
- t.Fatalf("serveAll: statusCodes are not match got: %v want: %v", res.StatusCode, tCase.Output.StatusCode)
- }
- }
-}
diff --git a/server/libs/qtube/test_helper.go b/server/libs/qtube/test_helper.go
deleted file mode 100644
index e0fa910..0000000
--- a/server/libs/qtube/test_helper.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package qtube
-
-type StubFile struct {
- Content string
- Offset int64
-}
-
-func (file *StubFile) Read(p []byte) (int, error) {
- copied := copy(p[:], []byte(file.Content)[:len(p)])
- return copied, nil
-}
-
-func (file *StubFile) Seek(offset int64, whence int) (int64, error) {
- file.Offset = offset
- return offset, nil
-}
-
-func (file *StubFile) Close() error {
- return nil
-}
-
-type stubFiler struct {
- file *StubFile
-}
-
-func (filer *stubFiler) Open(filePath string) (ReadSeekCloser, error) {
- return filer.file, nil
-}
diff --git a/server/libs/walls/access_walls.go b/server/libs/walls/access_walls.go
deleted file mode 100644
index d5995a3..0000000
--- a/server/libs/walls/access_walls.go
+++ /dev/null
@@ -1,102 +0,0 @@
-package walls
-
-import (
- "fmt"
- "net/http"
- "strconv"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/ihexxa/quickshare/server/libs/encrypt"
- "github.com/ihexxa/quickshare/server/libs/limiter"
-)
-
-type AccessWalls struct {
- cf *cfg.Config
- IpLimiter limiter.Limiter
- OpLimiter limiter.Limiter
- EncrypterMaker encrypt.EncrypterMaker
-}
-
-func NewAccessWalls(
- cf *cfg.Config,
- ipLimiter limiter.Limiter,
- opLimiter limiter.Limiter,
- encrypterMaker encrypt.EncrypterMaker,
-) Walls {
- return &AccessWalls{
- cf: cf,
- IpLimiter: ipLimiter,
- OpLimiter: opLimiter,
- EncrypterMaker: encrypterMaker,
- }
-}
-
-func (walls *AccessWalls) PassIpLimit(remoteAddr string) bool {
- if !walls.cf.Production {
- return true
- }
- return walls.IpLimiter.Access(remoteAddr, walls.cf.OpIdIpVisit)
-
-}
-
-func (walls *AccessWalls) PassOpLimit(resourceId string, opId int16) bool {
- if !walls.cf.Production {
- return true
- }
- return walls.OpLimiter.Access(resourceId, opId)
-}
-
-func (walls *AccessWalls) PassLoginCheck(tokenStr string, req *http.Request) bool {
- if !walls.cf.Production {
- return true
- }
-
- return walls.passLoginCheck(tokenStr)
-}
-
-func (walls *AccessWalls) passLoginCheck(tokenStr string) bool {
- token, getLoginTokenOk := walls.GetLoginToken(tokenStr)
- return getLoginTokenOk && token.AdminId == walls.cf.AdminId
-}
-
-func (walls *AccessWalls) GetLoginToken(tokenStr string) (*LoginToken, bool) {
- tokenMaker := walls.EncrypterMaker(string(walls.cf.SecretKeyByte))
- if !tokenMaker.FromStr(tokenStr) {
- return nil, false
- }
-
- adminIdFromToken, adminIdOk := tokenMaker.Get(walls.cf.KeyAdminId)
- expiresStr, expiresStrOk := tokenMaker.Get(walls.cf.KeyExpires)
- if !adminIdOk || !expiresStrOk {
- return nil, false
- }
-
- expires, expiresParseErr := strconv.ParseInt(expiresStr, 10, 64)
- if expiresParseErr != nil ||
- adminIdFromToken != walls.cf.AdminId ||
- expires <= time.Now().Unix() {
- return nil, false
- }
-
- return &LoginToken{
- AdminId: adminIdFromToken,
- Expires: expires,
- }, true
-}
-
-func (walls *AccessWalls) MakeLoginToken(userId string) string {
- expires := time.Now().Add(time.Duration(walls.cf.CookieMaxAge) * time.Second).Unix()
-
- tokenMaker := walls.EncrypterMaker(string(walls.cf.SecretKeyByte))
- tokenMaker.Add(walls.cf.KeyAdminId, userId)
- tokenMaker.Add(walls.cf.KeyExpires, fmt.Sprintf("%d", expires))
-
- tokenStr, ok := tokenMaker.ToStr()
- if !ok {
- return ""
- }
- return tokenStr
-}
diff --git a/server/libs/walls/access_walls_test.go b/server/libs/walls/access_walls_test.go
deleted file mode 100644
index d75a6c2..0000000
--- a/server/libs/walls/access_walls_test.go
+++ /dev/null
@@ -1,145 +0,0 @@
-package walls
-
-import (
- "fmt"
- "testing"
- "time"
-)
-
-import (
- "github.com/ihexxa/quickshare/server/libs/cfg"
- "github.com/ihexxa/quickshare/server/libs/encrypt"
- "github.com/ihexxa/quickshare/server/libs/limiter"
-)
-
-func newAccessWalls(limiterCap int64, limiterTtl int32, limiterCyc int32, bucketCap int16) *AccessWalls {
- config := cfg.NewConfig()
- config.Production = true
- config.LimiterCap = limiterCap
- config.LimiterTtl = limiterTtl
- config.LimiterCyc = limiterCyc
- config.BucketCap = bucketCap
- encrypterMaker := encrypt.JwtEncrypterMaker
- ipLimiter := limiter.NewRateLimiter(config.LimiterCap, config.LimiterTtl, config.LimiterCyc, config.BucketCap, map[int16]int16{})
- opLimiter := limiter.NewRateLimiter(config.LimiterCap, config.LimiterTtl, config.LimiterCyc, config.BucketCap, map[int16]int16{})
-
- return NewAccessWalls(config, ipLimiter, opLimiter, encrypterMaker).(*AccessWalls)
-}
-func TestIpLimit(t *testing.T) {
- ip := "0.0.0.0"
- limit := int16(10)
- ttl := int32(60)
- cyc := int32(5)
- walls := newAccessWalls(1000, ttl, cyc, limit)
-
- testIpLimit(t, walls, ip, limit)
- // wait for tokens are re-fulfilled
- time.Sleep(time.Duration(cyc) * time.Second)
- testIpLimit(t, walls, ip, limit)
-
- fmt.Println("ip limit: passed")
-}
-
-func testIpLimit(t *testing.T, walls Walls, ip string, limit int16) {
- for i := int16(0); i < limit; i++ {
- if !walls.PassIpLimit(ip) {
- t.Fatalf("ipLimiter: should be passed %d", time.Now().Unix())
- }
- }
-
- if walls.PassIpLimit(ip) {
- t.Fatalf("ipLimiter: should not be passed %d", time.Now().Unix())
- }
-}
-
-func TestOpLimit(t *testing.T) {
- resourceId := "id"
- op1 := int16(1)
- op2 := int16(2)
- limit := int16(10)
- ttl := int32(1)
- walls := newAccessWalls(1000, 5, ttl, limit)
-
- testOpLimit(t, walls, resourceId, op1, limit)
- testOpLimit(t, walls, resourceId, op2, limit)
- // wait for tokens are re-fulfilled
- time.Sleep(time.Duration(ttl) * time.Second)
- testOpLimit(t, walls, resourceId, op1, limit)
- testOpLimit(t, walls, resourceId, op2, limit)
-
- fmt.Println("op limit: passed")
-}
-
-func testOpLimit(t *testing.T, walls Walls, resourceId string, op int16, limit int16) {
- for i := int16(0); i < limit; i++ {
- if !walls.PassOpLimit(resourceId, op) {
- t.Fatalf("opLimiter: should be passed")
- }
- }
-
- if walls.PassOpLimit(resourceId, op) {
- t.Fatalf("opLimiter: should not be passed")
- }
-}
-
-func TestLoginCheck(t *testing.T) {
- walls := newAccessWalls(1000, 5, 1, 10)
-
- testValidToken(t, walls)
- testInvalidAdminIdToken(t, walls)
- testExpiredToken(t, walls)
-}
-
-func testValidToken(t *testing.T, walls *AccessWalls) {
- config := cfg.NewConfig()
-
- tokenMaker := encrypt.JwtEncrypterMaker(string(config.SecretKeyByte))
- tokenMaker.Add(config.KeyAdminId, config.AdminId)
- tokenMaker.Add(config.KeyExpires, fmt.Sprintf("%d", time.Now().Unix()+int64(10)))
- tokenStr, getTokenOk := tokenMaker.ToStr()
- if !getTokenOk {
- t.Fatalf("passLoginCheck: fail to generate token")
- }
-
- if !walls.passLoginCheck(tokenStr) {
- t.Fatalf("loginCheck: should be passed")
- }
-
- fmt.Println("loginCheck: valid token passed")
-}
-
-func testInvalidAdminIdToken(t *testing.T, walls *AccessWalls) {
- config := cfg.NewConfig()
-
- tokenMaker := encrypt.JwtEncrypterMaker(string(config.SecretKeyByte))
- tokenMaker.Add(config.KeyAdminId, "invalid admin id")
- tokenMaker.Add(config.KeyExpires, fmt.Sprintf("%d", time.Now().Unix()+int64(10)))
- tokenStr, getTokenOk := tokenMaker.ToStr()
- if !getTokenOk {
- t.Fatalf("passLoginCheck: fail to generate token")
- }
-
- if walls.passLoginCheck(tokenStr) {
- t.Fatalf("loginCheck: should not be passed")
- }
-
- fmt.Println("loginCheck: invalid admin id passed")
-}
-
-func testExpiredToken(t *testing.T, walls *AccessWalls) {
- config := cfg.NewConfig()
-
- tokenMaker := encrypt.JwtEncrypterMaker(string(config.SecretKeyByte))
- tokenMaker.Add(config.KeyAdminId, config.AdminId)
- tokenMaker.Add(config.KeyExpires, fmt.Sprintf("%d", time.Now().Unix()-int64(1)))
- tokenStr, getTokenOk := tokenMaker.ToStr()
- if !getTokenOk {
- t.Fatalf("passLoginCheck: fail to generate token")
- }
-
- if walls.passLoginCheck(tokenStr) {
- t.Fatalf("loginCheck: should not be passed")
- }
-
- fmt.Println("loginCheck: expired token passed")
-}
diff --git a/server/libs/walls/walls.go b/server/libs/walls/walls.go
deleted file mode 100644
index d42306d..0000000
--- a/server/libs/walls/walls.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package walls
-
-import (
- "net/http"
-)
-
-type Walls interface {
- PassIpLimit(remoteAddr string) bool
- PassOpLimit(resourceId string, opId int16) bool
- PassLoginCheck(tokenStr string, req *http.Request) bool
- MakeLoginToken(uid string) string
-}
-
-type LoginToken struct {
- AdminId string
- Expires int64
-}
diff --git a/src/client/web/build/template/index.template.html b/src/client/web/build/template/index.template.html
index 770436a..8737a10 100644
--- a/src/client/web/build/template/index.template.html
+++ b/src/client/web/build/template/index.template.html
@@ -8,10 +8,10 @@
content="initial-scale=1.0, maximum-scale=1, minimum-scale=1, user-scalable=no,uc-fitscreen=yes"
/>
-
-
+