1
0
dwelling-upload/internal/http/handlers.go

246 lines
6.5 KiB
Go
Raw Permalink Normal View History

2023-04-24 00:49:53 +04:00
package http
2022-02-07 04:49:21 +04:00
import (
"crypto/sha256"
"dwelling-upload/pkg/utils"
"dwelling-upload/web"
2022-02-07 04:49:21 +04:00
"encoding/base64"
"encoding/hex"
"fmt"
"io"
"log"
2022-02-07 04:49:21 +04:00
"net/http"
2022-02-08 19:21:44 +04:00
"net/url"
2022-02-07 04:49:21 +04:00
"os"
"path"
"strings"
2022-02-07 04:49:21 +04:00
"time"
"git.arav.su/Arav/httpr"
2022-02-07 04:49:21 +04:00
)
type UploadHandlers struct {
logFile *log.Logger
2023-05-24 22:19:41 +04:00
uploadDir string
uploadDirSize *int64
2023-05-24 22:19:41 +04:00
hashSalt string
keepForHours int
limitStorage int64
limitFileSize int64
2022-02-07 04:49:21 +04:00
}
func NewUploadHandlers(lFile *log.Logger, uploadDir string, uploadDirSize *int64,
2023-05-24 22:19:41 +04:00
hashSalt string, keepForHours int, limStorage, limFileSz int64) *UploadHandlers {
2022-02-07 04:49:21 +04:00
return &UploadHandlers{
logFile: lFile,
2023-05-24 22:19:41 +04:00
uploadDir: uploadDir,
uploadDirSize: uploadDirSize,
hashSalt: hashSalt,
keepForHours: keepForHours,
limitStorage: limStorage,
limitFileSize: limFileSz}
2022-02-07 04:49:21 +04:00
}
func (h *UploadHandlers) Index(w http.ResponseWriter, r *http.Request) {
2023-05-24 22:19:41 +04:00
var storCapacity int64 = h.limitStorage << 20
var fMaxSize int64 = h.limitFileSize << 20
2022-02-07 04:49:21 +04:00
_, _, availStr := utils.ConvertFileSize(storCapacity - *h.uploadDirSize)
2022-02-07 04:49:21 +04:00
_, _, fMaxSzStr := utils.ConvertFileSize(fMaxSize)
web.Index(utils.MainSite(r.Host), h.keepForHours, fMaxSzStr, availStr, w)
2022-02-07 04:49:21 +04:00
}
func (h *UploadHandlers) Upload(w http.ResponseWriter, r *http.Request) {
2023-05-24 22:19:41 +04:00
var fMaxSizeBytes int64 = h.limitFileSize << 20
var storCapacity int64 = h.limitStorage << 20
2022-02-07 04:49:21 +04:00
r.Body = http.MaxBytesReader(w, r.Body, fMaxSizeBytes)
if err := r.ParseMultipartForm(fMaxSizeBytes); err != nil {
log.Println("failed to parse upload form:", err)
Error(w, r, "Failed to parse upload form.", http.StatusExpectationFailed)
2022-02-07 04:49:21 +04:00
return
}
f, fHandler, err := r.FormFile("file")
if err != nil {
log.Println("failed to open incoming file:", err)
Error(w, r, "Error reading an incoming file.", http.StatusInternalServerError)
2022-02-07 04:49:21 +04:00
return
}
defer os.Remove(fHandler.Filename)
defer f.Close()
2022-02-07 04:49:21 +04:00
var leftSpace int64 = storCapacity - *h.uploadDirSize
if leftSpace < fHandler.Size {
log.Println("not enough space left in storage, only", leftSpace>>20, "MiB left")
Error(w, r, "Not enough space left, sorry.", http.StatusInternalServerError)
return
}
2022-02-07 04:49:21 +04:00
s256 := sha256.New()
if _, err := io.Copy(s256, f); err != nil {
log.Println("failed to compute a SHA-256 hash:", err)
Error(w, r, "A hash for the file cannot be computed.", http.StatusInternalServerError)
2022-02-07 04:49:21 +04:00
return
}
2022-03-07 23:40:41 +04:00
fHash := hex.EncodeToString(s256.Sum(nil))
2023-05-24 22:19:41 +04:00
s256.Write([]byte(h.hashSalt))
s256.Write([]byte(time.Now().String()))
2022-02-07 04:49:21 +04:00
fSaltedHash := base64.RawURLEncoding.EncodeToString(s256.Sum(nil))
f.Seek(0, io.SeekStart)
2023-05-24 22:19:41 +04:00
fPath := path.Join(h.uploadDir, fSaltedHash)
2022-02-07 04:49:21 +04:00
_, err = os.Stat(fPath)
if os.IsNotExist(err) {
fDst, err := os.Create(fPath)
if err != nil {
log.Println("failed to open file for writing", err)
Error(w, r, "File cannot be written.", http.StatusInternalServerError)
2022-02-07 04:49:21 +04:00
return
}
2022-02-07 22:27:34 +04:00
defer fDst.Close()
2022-02-07 04:49:21 +04:00
// We initialy set a dst file size to occupy space equal to uploaded's size.
// This is called a sparse file, if you need to know.
// It allows us to have a relatively small buffer size for inotify watcher.
// And it really affects that. I tested it.
fDst.Seek(fHandler.Size-1, io.SeekStart)
fDst.Write([]byte{0})
fDst.Seek(0, io.SeekStart)
2023-08-05 04:46:25 +04:00
if _, err = io.Copy(fDst, f); err != nil {
log.Println("failed to copy uploaded file to destination:", err)
Error(w, r, "Failed to copy uploaded file to the storage.", http.StatusInternalServerError)
2022-02-07 04:49:21 +04:00
return
}
2022-03-07 23:40:41 +04:00
typ, _ := utils.NetworkType(r.Host)
ip := r.Header.Get("X-Real-IP")
if typ != "www" && typ != "ygg" {
ip = ""
}
2023-08-05 04:46:25 +04:00
h.logFile.Printf("| up | %s | %s | %s | SHA256 %s | %s | %d | %s",
ip, typ, fHandler.Filename, fHash, fSaltedHash, fHandler.Size, r.UserAgent())
2022-02-07 04:49:21 +04:00
w.WriteHeader(http.StatusCreated)
} else {
os.Chtimes(fPath, time.Now(), time.Now())
2022-02-07 22:27:34 +04:00
w.WriteHeader(http.StatusFound)
2022-02-07 04:49:21 +04:00
}
downloadURL := path.Join("/", fSaltedHash, fHandler.Filename)
2022-02-08 19:21:44 +04:00
downloadURLParsed, _ := url.Parse(downloadURL)
2022-02-07 04:49:21 +04:00
_, scheme := utils.NetworkType(r.Host)
site := scheme + "://" + r.Host
2023-05-25 00:08:52 +04:00
if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") {
fmt.Fprintln(w, site+downloadURLParsed.String(), "will be kept for", h.keepForHours)
2023-05-25 00:08:52 +04:00
} else {
web.Uploaded(utils.MainSite(r.Host), site, downloadURLParsed.String(), h.keepForHours, w)
}
2022-02-07 04:49:21 +04:00
}
func (h *UploadHandlers) Download(w http.ResponseWriter, r *http.Request) {
saltedHash := httpr.Param(r, "hash")
2022-02-07 04:49:21 +04:00
2023-05-24 22:19:41 +04:00
path := path.Join(h.uploadDir, saltedHash)
2022-02-07 04:49:21 +04:00
stat, err := os.Stat(path)
if os.IsNotExist(err) {
Error(w, r, "", http.StatusNotFound)
2022-02-07 04:49:21 +04:00
return
}
name := httpr.Param(r, "name")
2022-02-07 04:49:21 +04:00
w.Header().Add("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", name))
fd, err := os.Open(path)
if err != nil {
log.Println("failed to open file to read:", err)
Error(w, r, "Failed to open file to read.", http.StatusInternalServerError)
2022-02-07 04:49:21 +04:00
return
}
defer fd.Close()
typ, _ := utils.NetworkType(r.Host)
ip := r.Header.Get("X-Real-IP")
if typ != "www" && typ != "ygg" {
ip = ""
}
2023-08-05 04:46:25 +04:00
h.logFile.Printf("| dw | %s | %s | %s | %s | %s",
ip, typ, name, saltedHash, r.UserAgent())
2022-02-07 19:42:09 +04:00
2022-02-07 04:49:21 +04:00
http.ServeContent(w, r, path, stat.ModTime(), fd)
}
func (h *UploadHandlers) Delete(w http.ResponseWriter, r *http.Request) {
var saltedHash string
if r.Method == "DELETE" {
saltedHash = httpr.Param(r, "hash")
} else {
r.ParseForm()
saltedHash = r.FormValue("hash")
}
2023-05-24 22:19:41 +04:00
path := path.Join(h.uploadDir, saltedHash)
2023-08-05 04:46:25 +04:00
if _, err := os.Stat(path); os.IsNotExist(err) {
Error(w, r, "", http.StatusNotFound)
return
}
2023-08-05 04:46:25 +04:00
if err := os.Remove(path); err != nil {
log.Println("failed to remove a file:", err)
Error(w, r, "Failed to remove a file.", http.StatusInternalServerError)
return
}
typ, _ := utils.NetworkType(r.Host)
ip := r.Header.Get("X-Real-IP")
if typ != "www" && typ != "ygg" {
ip = ""
}
2023-08-05 04:46:25 +04:00
h.logFile.Printf("| dt | %s | %s | %s | %s",
ip, typ, saltedHash, r.UserAgent())
2023-05-25 00:08:52 +04:00
if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") {
fmt.Fprintln(w, "File was successfully deleted.")
} else {
web.Deleted(utils.MainSite(r.Host), w)
}
}
func Error(w http.ResponseWriter, r *http.Request, reason string, code int) {
if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") {
http.Error(w, reason, code)
return
}
w.WriteHeader(code)
web.ErrorXXX(utils.MainSite(r.Host), code, reason, w)
}
func RobotsTxt(w http.ResponseWriter, r *http.Request) {
2023-05-13 16:00:38 +04:00
data, _ := web.AssetsGetFile("robots.txt")
w.Write(data)
}
2023-05-21 20:13:15 +04:00
func Favicon(w http.ResponseWriter, r *http.Request) {
data, _ := web.AssetsGetFile("img/favicon.svg")
w.Write(data)
}