package http import ( "crypto/sha256" "dwelling-upload/pkg/utils" "dwelling-upload/web" "encoding/base64" "encoding/hex" "fmt" "io" "log" "net/http" "net/url" "os" "path" "strings" "time" "git.arav.su/Arav/httpr" ) type UploadHandlers struct { logFile *log.Logger uploadDir string uploadDirSize *int64 hashSalt string keepForHours int limitStorage int64 limitFileSize int64 } func NewUploadHandlers(lFile *log.Logger, uploadDir string, uploadDirSize *int64, hashSalt string, keepForHours int, limStorage, limFileSz int64) *UploadHandlers { return &UploadHandlers{ logFile: lFile, uploadDir: uploadDir, uploadDirSize: uploadDirSize, hashSalt: hashSalt, keepForHours: keepForHours, limitStorage: limStorage, limitFileSize: limFileSz} } func (h *UploadHandlers) Index(w http.ResponseWriter, r *http.Request) { var storCapacity int64 = h.limitStorage << 20 var fMaxSize int64 = h.limitFileSize << 20 _, _, availStr := utils.ConvertFileSize(storCapacity - *h.uploadDirSize) _, _, fMaxSzStr := utils.ConvertFileSize(fMaxSize) web.Index(utils.MainSite(r.Host), h.keepForHours, fMaxSzStr, availStr, w) } func (h *UploadHandlers) Upload(w http.ResponseWriter, r *http.Request) { var fMaxSizeBytes int64 = h.limitFileSize << 20 var storCapacity int64 = h.limitStorage << 20 r.Body = http.MaxBytesReader(w, r.Body, fMaxSizeBytes) if err := r.ParseMultipartForm(fMaxSizeBytes); err != nil { log.Println("failed to parse upload form:", err) Error(w, r, http.StatusExpectationFailed, "Failed to parse upload form.") return } f, fHandler, err := r.FormFile("file") if err != nil { log.Println("failed to open incoming file:", err) Error(w, r, http.StatusInternalServerError, "Error reading an incoming file.") return } defer os.Remove(fHandler.Filename) defer f.Close() var leftSpace int64 = storCapacity - *h.uploadDirSize if leftSpace < fHandler.Size { log.Println("not enough space left in storage, only", leftSpace>>20, "MiB left") if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") { http.Error(w, "Not enough space left, sorry", http.StatusInternalServerError) } else { web.ErrorNoSpace(utils.MainSite(r.Host), w) } return } s256 := sha256.New() if _, err := io.Copy(s256, f); err != nil { log.Println("failed to compute a SHA-256 hash:", err) Error(w, r, http.StatusInternalServerError, "A hash for the file cannot be computed.") return } fHash := hex.EncodeToString(s256.Sum(nil)) s256.Write([]byte(h.hashSalt)) fSaltedHash := base64.RawURLEncoding.EncodeToString(s256.Sum(nil)) f.Seek(0, io.SeekStart) fPath := path.Join(h.uploadDir, fSaltedHash) _, err = os.Stat(fPath) if os.IsNotExist(err) { fDst, err := os.Create(fPath) if err != nil { log.Println("failed to open file for writing", err) Error(w, r, http.StatusInternalServerError, "File cannot be written.") return } defer fDst.Close() // We initialy set a dst file size to occupy space equal to uploaded's size. // This is called a sparse file, if you need to know. // It allows us to have a relatively small buffer size for inotify watcher. // And it really affects that. I tested it. fDst.Seek(fHandler.Size-1, io.SeekStart) fDst.Write([]byte{0}) fDst.Seek(0, io.SeekStart) _, err = io.Copy(fDst, f) if err != nil { log.Println("failed to copy uploaded file to destination:", err) Error(w, r, http.StatusInternalServerError, "Failed to copy uploaded file to the storage.") return } typ, _ := utils.NetworkType(r.Host) h.logFile.Printf("| up | %s | %s | %s | SHA256 %s | %s | %d | %s", r.Header.Get("X-Real-IP"), typ, fHandler.Filename, fHash, fSaltedHash, fHandler.Size, r.UserAgent()) w.WriteHeader(http.StatusCreated) } else { os.Chtimes(fPath, time.Now(), time.Now()) w.WriteHeader(http.StatusFound) } downloadURL := path.Join("/", fSaltedHash, fHandler.Filename) downloadURLParsed, _ := url.Parse(downloadURL) _, scheme := utils.NetworkType(r.Host) site := scheme + "://" + r.Host if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") { fmt.Fprintln(w, site+downloadURLParsed.String(), "will be kept for", h.keepForHours) } else { web.Uploaded(utils.MainSite(r.Host), site, downloadURLParsed.String(), h.keepForHours, w) } } func (h *UploadHandlers) Download(w http.ResponseWriter, r *http.Request) { saltedHash := httpr.Param(r, "hash") path := path.Join(h.uploadDir, saltedHash) stat, err := os.Stat(path) if os.IsNotExist(err) { Error(w, r, http.StatusNotFound, "") return } name := httpr.Param(r, "name") w.Header().Add("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", name)) fd, err := os.Open(path) if err != nil { log.Println("failed to open file to read:", err) Error(w, r, http.StatusInternalServerError, "Failed to open file to read.") return } defer fd.Close() netTyp, _ := utils.NetworkType(r.Host) h.logFile.Printf("| dw | %s | %s | %s | %s | %s", r.Header.Get("X-Real-IP"), netTyp, name, saltedHash, r.UserAgent()) http.ServeContent(w, r, path, stat.ModTime(), fd) } func (h *UploadHandlers) Delete(w http.ResponseWriter, r *http.Request) { var saltedHash string if r.Method == "DELETE" { saltedHash = httpr.Param(r, "hash") } else { r.ParseForm() saltedHash = r.FormValue("hash") } path := path.Join(h.uploadDir, saltedHash) _, err := os.Stat(path) if os.IsNotExist(err) { Error(w, r, http.StatusNotFound, "") return } err = os.Remove(path) if err != nil { log.Println("failed to remove a file:", err) Error(w, r, http.StatusInternalServerError, "Failed to remove a file.") return } netTyp, _ := utils.NetworkType(r.Host) h.logFile.Printf("| dt | %s | %s | %s | %s", r.Header.Get("X-Real-IP"), netTyp, saltedHash, r.UserAgent()) if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") { fmt.Fprintln(w, "File was successfully deleted.") } else { web.Deleted(utils.MainSite(r.Host), w) } } func Error(w http.ResponseWriter, r *http.Request, code int, reason string) { if strings.Contains(r.UserAgent(), "curl") || strings.Contains(r.UserAgent(), "Wget") { http.Error(w, reason, code) return } web.ErrorXXX(utils.MainSite(r.Host), code, reason, w) } func RobotsTxt(w http.ResponseWriter, r *http.Request) { data, _ := web.AssetsGetFile("robots.txt") w.Write(data) } func Favicon(w http.ResponseWriter, r *http.Request) { data, _ := web.AssetsGetFile("img/favicon.svg") w.Write(data) }