Enhancement: File Handling and Compression Support #4

Merged
delorean merged 2 commits from compression into main 2024-02-28 18:23:50 +00:00
2 changed files with 97 additions and 23 deletions

80
main.go
View File

@ -7,6 +7,9 @@ import (
"os" "os"
"strconv" "strconv"
"time" "time"
"compress/gzip"
"strings"
"bytes"
"github.com/BurntSushi/toml" "github.com/BurntSushi/toml"
"github.com/gabriel-vasile/mimetype" "github.com/gabriel-vasile/mimetype"
@ -109,31 +112,76 @@ func Exists(path string) bool {
return true return true
} }
func isValidGzipHeader(data []byte) bool {
return len(data) >= 2 && data[0] == 0x1f && data[1] == 0x8b
}
func UploadHandler(w http.ResponseWriter, r *http.Request) { func UploadHandler(w http.ResponseWriter, r *http.Request) {
// expiry time // expiry time
var ttl int64 var ttl int64
ttl = 0 ttl = 0
file, _, err := r.FormFile("file") var file, header, err = r.FormFile("file")
if err != nil { if err != nil {
w.WriteHeader(http.StatusBadRequest) w.WriteHeader(http.StatusBadRequest)
return return
} }
defer file.Close() defer file.Close()
mtype, err := mimetype.DetectReader(file) var buffer bytes.Buffer
if err != nil { _, err = io.Copy(&buffer, file)
w.Write([]byte("error detecting the mime type of your file\n")) if err != nil {
return log.Error().Err(err).Msg("error reading file data")
} w.WriteHeader(http.StatusInternalServerError)
file.Seek(0, 0) return
}
var data bytes.Buffer
var name = header.Filename
println("file: " + name)
if strings.HasSuffix(name, ".5000") {
buffer.WriteTo(&data)
if isValidGzipHeader(data.Bytes()[:2]) {
gz, err := gzip.NewReader(&data)
if err != nil {
log.Error().Err(err).Msg("error creating gzip reader")
w.WriteHeader(http.StatusInternalServerError)
return
}
defer gz.Close()
_, err = io.Copy(&buffer, gz)
if err != nil {
log.Error().Err(err).Msg("error decompressing gzip file")
w.WriteHeader(http.StatusInternalServerError)
return
}
name = strings.TrimSuffix(name, ".5000")
data.Write(buffer.Bytes())
} else {
log.Error().Msg("Invalid gzip file")
w.WriteHeader(http.StatusBadRequest)
return
}
} else {
data.Write(buffer.Bytes())
}
mtype, err := mimetype.DetectReader(&data)
if err != nil {
log.Error().Err(err).Msg("error detecting MIME type")
w.WriteHeader(http.StatusInternalServerError)
return
}
// Check if expiry time is present and length is too long // Check if expiry time is present and length is too long
if r.PostFormValue("expiry") != "" { if r.PostFormValue("expiry") != "" {
ttl, err = strconv.ParseInt(r.PostFormValue("expiry"), 10, 64) ttl, err = strconv.ParseInt(r.PostFormValue("expiry"), 10, 64)
if err != nil { if err != nil {
log.Error().Err(err).Msg("expiry could not be parsed") log.Error().Err(err).Msg("expiry could not be parsed")
w.WriteHeader(http.StatusBadRequest)
} else { } else {
// Get maximum ttl length from config and kill upload if specified ttl is too long, this can probably be handled better in the future // Get maximum ttl length from config and kill upload if specified ttl is too long, this can probably be handled better in the future
if ttl < 1 || ttl > int64(conf.MaxTTL) { if ttl < 1 || ttl > int64(conf.MaxTTL) {
@ -143,13 +191,12 @@ func UploadHandler(w http.ResponseWriter, r *http.Request) {
} }
} }
// Default to conf if not present // Default to conf if not present
if ttl == 0 { if ttl == 0 {
ttl = int64(conf.DefaultTTL) ttl = int64(conf.DefaultTTL)
} }
// generate + check name // generate + check name
var name string
for { for {
id := NameGen() id := NameGen()
name = id + mtype.Extension() name = id + mtype.Extension()
@ -167,15 +214,22 @@ func UploadHandler(w http.ResponseWriter, r *http.Request) {
log.Error().Err(err).Msg("failed to put expiry") log.Error().Err(err).Msg("failed to put expiry")
} }
f, err := os.OpenFile(conf.FileFolder+"/"+name, os.O_WRONLY|os.O_CREATE, 0644) f, err := os.OpenFile(conf.FileFolder+"/"+name, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0644)
if err != nil { if err != nil {
log.Error().Err(err).Msg("error opening a file for write") log.Error().Err(err).Msg("error opening a file for write")
w.WriteHeader(http.StatusInternalServerError) // change to json w.WriteHeader(http.StatusInternalServerError) // change to json
return return
} }
defer f.Close() defer f.Close()
io.Copy(f, file) io.Copy(f, &buffer)
if err != nil {
log.Error().Err(err).Msg("error copying file")
w.WriteHeader(http.StatusInternalServerError)
return
}
buffer.Reset()
data.Reset()
log.Info().Str("name", name).Int64("ttl", ttl).Msg("wrote new file") log.Info().Str("name", name).Int64("ttl", ttl).Msg("wrote new file")
hostedurl := "https://" + conf.VHost + "/uploads/" + name hostedurl := "https://" + conf.VHost + "/uploads/" + name

View File

@ -1,6 +1,7 @@
<!DOCTYPE html> <!DOCTYPE html>
<html style="overflow: hidden;"> <html style="overflow: hidden;">
<head> <head>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/pako/1.0.4/pako_deflate.min.js"></script>
<script type="text/javascript" src="//code.jquery.com/jquery-1.10.2.min.js"></script> <script type="text/javascript" src="//code.jquery.com/jquery-1.10.2.min.js"></script>
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="icon" href="fist.ico"> <link rel="icon" href="fist.ico">
@ -241,16 +242,35 @@
</div> </div>
</body> </body>
</html> </html>
<script> <script>
$('#chooseFile').bind('change', function () { $(document).ready(function () {
var filename = $("#chooseFile").val(); $('#chooseFile').bind('change', function (e) {
if (/^\s*$/.test(filename)) { var file = e.target.files[0];
$(".file-upload").removeClass('active'); if (!file) {
$("#noFile").text("No file chosen..."); $(".file-upload").removeClass('active');
} $("#noFile").text("No file chosen...");
else { return;
$(".file-upload").addClass('active'); }
$("#noFile").text(filename.replace("C:\\fakepath\\", ""));
} $(".file-upload").addClass('active');
$("#noFile").text(file.name.replace("C:\\fakepath\\", ""));
var reader = new FileReader();
reader.onload = function(event) {
var binaryData = event.target.result;
// Compress the file data
var compressed = pako.gzip(new Uint8Array(binaryData), { level:1});
var compressedFile = new Blob([compressed], { type: "application/gzip" });
updateFileInput(compressedFile, file.name);
};
reader.readAsArrayBuffer(file);
});
function updateFileInput(compressedFile, fileName) {
var dataTransfer = new DataTransfer();
dataTransfer.items.add(new File([compressedFile], fileName + '.5000'));
$('#chooseFile')[0].files = dataTransfer.files;
}
}); });
</script> </script>