2023-04-24 00:49:53 +04:00
|
|
|
package http
|
2022-02-07 04:49:21 +04:00
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/sha256"
|
2022-02-07 19:42:09 +04:00
|
|
|
"dwelling-upload/pkg/logging"
|
2022-02-07 04:49:21 +04:00
|
|
|
"dwelling-upload/pkg/utils"
|
2022-05-25 00:27:30 +04:00
|
|
|
"dwelling-upload/web"
|
2022-02-07 04:49:21 +04:00
|
|
|
"encoding/base64"
|
|
|
|
"encoding/hex"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2023-05-24 21:45:46 +04:00
|
|
|
"log"
|
2022-02-07 04:49:21 +04:00
|
|
|
"net/http"
|
2022-02-08 19:21:44 +04:00
|
|
|
"net/url"
|
2022-02-07 04:49:21 +04:00
|
|
|
"os"
|
|
|
|
"path"
|
2022-02-08 19:02:37 +04:00
|
|
|
"strings"
|
2022-02-07 04:49:21 +04:00
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
type UploadHandlers struct {
|
2023-05-21 21:27:44 +04:00
|
|
|
logFile *logging.Logger
|
2022-02-08 00:25:50 +04:00
|
|
|
|
2023-05-24 22:19:41 +04:00
|
|
|
uploadDir string
|
2022-02-08 00:25:50 +04:00
|
|
|
uploadDirSize *int64
|
2023-05-24 22:19:41 +04:00
|
|
|
|
|
|
|
hashSalt string
|
|
|
|
|
|
|
|
keepForHours int
|
|
|
|
limitStorage int64
|
|
|
|
limitFileSize int64
|
2022-02-07 04:49:21 +04:00
|
|
|
}
|
|
|
|
|
2023-05-24 22:19:41 +04:00
|
|
|
func NewUploadHandlers(lFile *logging.Logger, uploadDir string, uploadDirSize *int64,
|
|
|
|
hashSalt string, keepForHours int, limStorage, limFileSz int64) *UploadHandlers {
|
2022-02-07 04:49:21 +04:00
|
|
|
return &UploadHandlers{
|
2023-05-21 21:27:44 +04:00
|
|
|
logFile: lFile,
|
2023-05-24 22:19:41 +04:00
|
|
|
uploadDir: uploadDir,
|
|
|
|
uploadDirSize: uploadDirSize,
|
|
|
|
hashSalt: hashSalt,
|
|
|
|
keepForHours: keepForHours,
|
|
|
|
limitStorage: limStorage,
|
|
|
|
limitFileSize: limFileSz}
|
2022-02-07 04:49:21 +04:00
|
|
|
}
|
|
|
|
|
2022-03-06 22:34:34 +04:00
|
|
|
func (*UploadHandlers) AssetsFS() http.FileSystem {
|
2022-05-25 00:27:30 +04:00
|
|
|
return web.Assets()
|
2022-02-08 17:46:44 +04:00
|
|
|
}
|
|
|
|
|
2022-02-07 04:49:21 +04:00
|
|
|
func (h *UploadHandlers) Index(w http.ResponseWriter, r *http.Request) {
|
2023-05-24 22:19:41 +04:00
|
|
|
var storCapacity int64 = h.limitStorage << 20
|
|
|
|
var fMaxSize int64 = h.limitFileSize << 20
|
2022-02-07 04:49:21 +04:00
|
|
|
|
|
|
|
_, _, capStr := utils.ConvertFileSize(storCapacity)
|
2022-02-08 00:25:50 +04:00
|
|
|
_, _, usedStr := utils.ConvertFileSize(*h.uploadDirSize)
|
|
|
|
_, _, availStr := utils.ConvertFileSize(storCapacity - *h.uploadDirSize)
|
2022-02-07 04:49:21 +04:00
|
|
|
_, _, fMaxSzStr := utils.ConvertFileSize(fMaxSize)
|
|
|
|
|
2023-05-24 22:19:41 +04:00
|
|
|
web.Index(utils.MainSite(r.Host), storCapacity, *h.uploadDirSize, h.keepForHours, fMaxSzStr, usedStr, capStr, availStr, w)
|
2022-02-07 04:49:21 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (h *UploadHandlers) Upload(w http.ResponseWriter, r *http.Request) {
|
2023-05-24 22:19:41 +04:00
|
|
|
var fMaxSizeBytes int64 = h.limitFileSize << 20
|
|
|
|
var storCapacity int64 = h.limitStorage << 20
|
2022-02-07 04:49:21 +04:00
|
|
|
|
|
|
|
r.Body = http.MaxBytesReader(w, r.Body, fMaxSizeBytes)
|
|
|
|
|
|
|
|
if err := r.ParseMultipartForm(fMaxSizeBytes); err != nil {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("failed to parse upload form:", err)
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusExpectationFailed, "Failed to parse upload form.")
|
2022-02-07 04:49:21 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
f, fHandler, err := r.FormFile("file")
|
|
|
|
if err != nil {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("failed to open incoming file:", err)
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusInternalServerError, "Error reading an incoming file.")
|
2022-02-07 04:49:21 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
defer func() {
|
|
|
|
os.Remove(fHandler.Filename)
|
|
|
|
f.Close()
|
|
|
|
}()
|
|
|
|
|
2022-02-09 01:48:19 +04:00
|
|
|
var leftSpace int64 = storCapacity - *h.uploadDirSize
|
|
|
|
|
|
|
|
if leftSpace < fHandler.Size {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("not enough space left in storage, only", leftSpace>>20, "MiB left")
|
2023-05-25 00:08:52 +04:00
|
|
|
if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") {
|
|
|
|
http.Error(w, "Not enough space left, sorry", http.StatusInternalServerError)
|
|
|
|
} else {
|
|
|
|
web.ErrorNoSpace(utils.MainSite(r.Host), w)
|
|
|
|
}
|
2023-05-24 23:57:02 +04:00
|
|
|
return
|
2022-02-09 01:48:19 +04:00
|
|
|
}
|
|
|
|
|
2022-02-07 04:49:21 +04:00
|
|
|
s256 := sha256.New()
|
|
|
|
if _, err := io.Copy(s256, f); err != nil {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("failed to compute a SHA-256 hash:", err)
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusInternalServerError, "A hash for the file cannot be computed.")
|
2022-02-07 04:49:21 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-07 23:40:41 +04:00
|
|
|
fHash := hex.EncodeToString(s256.Sum(nil))
|
2023-05-24 22:19:41 +04:00
|
|
|
s256.Write([]byte(h.hashSalt))
|
2022-02-07 04:49:21 +04:00
|
|
|
fSaltedHash := base64.RawURLEncoding.EncodeToString(s256.Sum(nil))
|
|
|
|
|
|
|
|
f.Seek(0, io.SeekStart)
|
|
|
|
|
2023-05-24 22:19:41 +04:00
|
|
|
fPath := path.Join(h.uploadDir, fSaltedHash)
|
2022-02-07 04:49:21 +04:00
|
|
|
|
|
|
|
_, err = os.Stat(fPath)
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
fDst, err := os.Create(fPath)
|
|
|
|
if err != nil {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("failed to open file for writing", err)
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusInternalServerError, "File cannot be written.")
|
2022-02-07 04:49:21 +04:00
|
|
|
return
|
|
|
|
}
|
2022-02-07 22:27:34 +04:00
|
|
|
defer fDst.Close()
|
2022-02-07 04:49:21 +04:00
|
|
|
|
2022-02-08 02:12:01 +04:00
|
|
|
// We initialy set a dst file size to occupy space equal to uploaded's size.
|
|
|
|
// This is called a sparse file, if you need to know.
|
|
|
|
// It allows us to have a relatively small buffer size for inotify watcher.
|
|
|
|
// And it really affects that. I tested it.
|
|
|
|
fDst.Seek(fHandler.Size-1, io.SeekStart)
|
|
|
|
fDst.Write([]byte{0})
|
|
|
|
fDst.Seek(0, io.SeekStart)
|
|
|
|
|
2022-02-07 22:27:34 +04:00
|
|
|
_, err = io.Copy(fDst, f)
|
2022-02-07 04:49:21 +04:00
|
|
|
if err != nil {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("failed to copy uploaded file to destination:", err)
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusInternalServerError, "Failed to copy uploaded file to the storage.")
|
2022-02-07 04:49:21 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-07 23:40:41 +04:00
|
|
|
typ, _ := utils.NetworkType(r.Host)
|
2022-02-08 19:02:37 +04:00
|
|
|
|
2023-05-21 21:27:44 +04:00
|
|
|
h.logFile.Printf("up | %s | %s | %s | SHA256 %s | %s | %d | %s", r.Header.Get("X-Real-IP"), typ,
|
2022-03-07 23:40:41 +04:00
|
|
|
fHandler.Filename, fHash, fSaltedHash, fHandler.Size, r.UserAgent())
|
2022-02-07 04:49:21 +04:00
|
|
|
|
|
|
|
w.WriteHeader(http.StatusCreated)
|
|
|
|
} else {
|
|
|
|
os.Chtimes(fPath, time.Now(), time.Now())
|
2022-02-07 22:27:34 +04:00
|
|
|
w.WriteHeader(http.StatusFound)
|
2022-02-07 04:49:21 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
downloadURL := path.Join("/f", fSaltedHash, fHandler.Filename)
|
2022-02-08 19:21:44 +04:00
|
|
|
downloadURLParsed, _ := url.Parse(downloadURL)
|
2022-02-07 04:49:21 +04:00
|
|
|
|
2022-08-01 01:09:11 +04:00
|
|
|
_, scheme := utils.NetworkType(r.Host)
|
|
|
|
site := scheme + "://" + r.Host
|
|
|
|
|
2022-02-08 19:02:37 +04:00
|
|
|
if strings.Contains(r.UserAgent(), "curl") {
|
2022-08-01 01:09:11 +04:00
|
|
|
w.Write([]byte(site + downloadURLParsed.String() + "\r\n"))
|
2022-02-08 19:02:37 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-05-25 00:08:52 +04:00
|
|
|
if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") {
|
|
|
|
fmt.Fprintln(w, downloadURLParsed.String(), "will be kept for", h.keepForHours)
|
|
|
|
} else {
|
|
|
|
web.Uploaded(utils.MainSite(r.Host), site, downloadURLParsed.String(), h.keepForHours, w)
|
|
|
|
}
|
2022-02-07 04:49:21 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (h *UploadHandlers) Download(w http.ResponseWriter, r *http.Request) {
|
2023-04-24 00:49:53 +04:00
|
|
|
saltedHash := GetURLParam(r, "hash")
|
2022-02-07 04:49:21 +04:00
|
|
|
|
2023-05-24 22:19:41 +04:00
|
|
|
path := path.Join(h.uploadDir, saltedHash)
|
2022-02-07 04:49:21 +04:00
|
|
|
|
|
|
|
stat, err := os.Stat(path)
|
|
|
|
if os.IsNotExist(err) {
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusNotFound, "")
|
2022-02-07 04:49:21 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-04-24 00:49:53 +04:00
|
|
|
name := GetURLParam(r, "name")
|
2022-03-29 18:41:05 +04:00
|
|
|
|
2022-02-07 04:49:21 +04:00
|
|
|
w.Header().Add("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", name))
|
|
|
|
|
|
|
|
fd, err := os.Open(path)
|
|
|
|
if err != nil {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("failed to open file to read:", err)
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusInternalServerError, "Failed to open file to read.")
|
2022-02-07 04:49:21 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
defer fd.Close()
|
|
|
|
|
2022-02-08 19:02:37 +04:00
|
|
|
netTyp, _ := utils.NetworkType(r.Host)
|
|
|
|
|
2023-05-21 21:27:44 +04:00
|
|
|
h.logFile.Printf("dw | %s | %s | %s | %s | %s", r.Header.Get("X-Real-IP"), netTyp, name, saltedHash, r.UserAgent())
|
2022-02-07 19:42:09 +04:00
|
|
|
|
2022-02-07 04:49:21 +04:00
|
|
|
http.ServeContent(w, r, path, stat.ModTime(), fd)
|
|
|
|
}
|
2022-02-07 22:51:47 +04:00
|
|
|
|
2022-07-01 03:31:33 +04:00
|
|
|
func (h *UploadHandlers) Delete(w http.ResponseWriter, r *http.Request) {
|
2022-07-01 03:43:52 +04:00
|
|
|
var saltedHash string
|
|
|
|
if r.Method == "DELETE" {
|
2023-04-24 00:49:53 +04:00
|
|
|
saltedHash = GetURLParam(r, "hash")
|
2022-07-01 03:43:52 +04:00
|
|
|
} else {
|
|
|
|
r.ParseForm()
|
|
|
|
saltedHash = r.FormValue("hash")
|
|
|
|
}
|
2022-07-01 03:31:33 +04:00
|
|
|
|
2023-05-24 22:19:41 +04:00
|
|
|
path := path.Join(h.uploadDir, saltedHash)
|
2022-07-01 03:31:33 +04:00
|
|
|
|
|
|
|
_, err := os.Stat(path)
|
|
|
|
if os.IsNotExist(err) {
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusNotFound, "")
|
2022-07-01 03:31:33 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err = os.Remove(path)
|
|
|
|
if err != nil {
|
2023-05-24 21:45:46 +04:00
|
|
|
log.Println("failed to remove a file:", err)
|
2023-05-24 23:50:49 +04:00
|
|
|
Error(w, r, http.StatusInternalServerError, "Failed to remove a file.")
|
2022-07-01 03:31:33 +04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
netTyp, _ := utils.NetworkType(r.Host)
|
|
|
|
|
2023-05-21 21:27:44 +04:00
|
|
|
h.logFile.Printf("dt | %s | %s | %s | %s", r.Header.Get("X-Real-IP"), netTyp, saltedHash, r.UserAgent())
|
2022-07-01 03:31:33 +04:00
|
|
|
|
|
|
|
w.WriteHeader(http.StatusNoContent)
|
2023-05-25 00:08:52 +04:00
|
|
|
if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") {
|
|
|
|
fmt.Fprintln(w, "File was successfully deleted.")
|
|
|
|
} else {
|
|
|
|
web.Deleted(utils.MainSite(r.Host), w)
|
|
|
|
}
|
2022-07-01 03:31:33 +04:00
|
|
|
}
|
|
|
|
|
2023-05-24 23:50:10 +04:00
|
|
|
func Error(w http.ResponseWriter, r *http.Request, code int, reason string) {
|
|
|
|
if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") {
|
|
|
|
http.Error(w, reason, code)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
web.ErrorXXX(utils.MainSite(r.Host), code, reason, w)
|
|
|
|
}
|
|
|
|
|
2023-05-13 16:29:32 +04:00
|
|
|
func RobotsTxt(w http.ResponseWriter, r *http.Request) {
|
2023-05-13 16:00:38 +04:00
|
|
|
data, _ := web.AssetsGetFile("robots.txt")
|
|
|
|
w.Write(data)
|
|
|
|
}
|
2023-05-21 20:13:15 +04:00
|
|
|
|
|
|
|
func Favicon(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data, _ := web.AssetsGetFile("img/favicon.svg")
|
|
|
|
w.Write(data)
|
|
|
|
}
|