diff --git a/db/database.go b/db/database.go index 9e76362..0ea68aa 100644 --- a/db/database.go +++ b/db/database.go @@ -235,27 +235,6 @@ func (db *mySQLdb) GetFiles(ownerID string) ([]*models.File, error) { return files, err } -func (db *mySQLdb) UpdateUploadedByte(byte int64, fileID string) { - var file models.File - db.DB.Table("files").Where("id = ?", fileID).First(&file) - file.UploadedByte = byte - db.Save(&file) -} - -func (db *mySQLdb) UpdateUploadedChunk(index int64, fileID string) { - var file models.File - db.DB.Table("files").Where("id = ?", fileID).First(&file) - file.UploadedChunk = index - db.Save(&file) -} - -func (db *mySQLdb) FinalizeFileUpload(fileID string) { - var file models.File - db.DB.Table("files").Where("id = ?", fileID).First(&file) - file.Done = true - db.Save(&file) -} - func (db *mySQLdb) InitializeTotp(email string, secret string) error { var user models.User err := db.DB.Table("users").Where("email = ?", email).First(&user).Error @@ -316,7 +295,6 @@ func (db *postgresDB) GetAllUsers() ([]models.User, error) { var users []models.User err := db.DB.Table("users").Select("user_id, username, email").Find(&users).Error if err != nil { - fmt.Println(err) return nil, err } return users, nil @@ -368,26 +346,6 @@ func (db *postgresDB) GetFiles(ownerID string) ([]*models.File, error) { return files, err } -func (db *postgresDB) UpdateUploadedByte(byte int64, fileID string) { - var file models.File - db.DB.Table("files").Where("id = $1", fileID).First(&file) - file.UploadedByte = byte - db.Save(&file) -} -func (db *postgresDB) UpdateUploadedChunk(index int64, fileID string) { - var file models.File - db.DB.Table("files").Where("id = $1", fileID).First(&file) - file.UploadedChunk = index - db.Save(&file) -} - -func (db *postgresDB) FinalizeFileUpload(fileID string) { - var file models.File - db.DB.Table("files").Where("id = $1", fileID).First(&file) - file.Done = true - db.Save(&file) -} - func (db *postgresDB) InitializeTotp(email string, secret string) error { var user models.User err := db.DB.Table("users").Where("email = $1", email).First(&user).Error diff --git a/handler/download/file/file.go b/handler/download/file/file.go index e80c883..cf0c674 100644 --- a/handler/download/file/file.go +++ b/handler/download/file/file.go @@ -1,7 +1,9 @@ package downloadFileHandler import ( + "fmt" "github.com/fossyy/filekeeper/app" + "io" "net/http" "os" "path/filepath" @@ -28,22 +30,21 @@ func GET(w http.ResponseWriter, r *http.Request) { return } - openFile, err := os.OpenFile(filepath.Join(saveFolder, file.Name), os.O_RDONLY, 0) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - app.Server.Logger.Error(err.Error()) - return - } - defer openFile.Close() + w.Header().Set("Content-Disposition", "attachment; filename="+file.Name) + w.Header().Set("Content-Type", "application/octet-stream") + for i := 0; i <= int(file.TotalChunk); i++ { + chunkPath := filepath.Join(saveFolder, file.Name, fmt.Sprintf("chunk_%d", i)) - stat, err := openFile.Stat() - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - app.Server.Logger.Error(err.Error()) - return + chunkFile, err := os.Open(chunkPath) + if err != nil { + http.Error(w, fmt.Sprintf("Error opening chunk: %v", err), http.StatusInternalServerError) + return + } + _, err = io.Copy(w, chunkFile) + chunkFile.Close() + if err != nil { + http.Error(w, fmt.Sprintf("Error writing chunk: %v", err), http.StatusInternalServerError) + return + } } - - w.Header().Set("Content-Disposition", "attachment; filename="+stat.Name()) - http.ServeContent(w, r, stat.Name(), stat.ModTime(), openFile) - return } diff --git a/handler/upload/initialisation/initialisation.go b/handler/upload/initialisation/initialisation.go index bd302a7..15560b4 100644 --- a/handler/upload/initialisation/initialisation.go +++ b/handler/upload/initialisation/initialisation.go @@ -3,6 +3,7 @@ package initialisation import ( "encoding/json" "errors" + "fmt" "github.com/fossyy/filekeeper/app" "io" "net/http" @@ -38,6 +39,26 @@ func POST(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) return } + fileData = &types.FileWithDetail{ + ID: fileData.ID, + OwnerID: fileData.OwnerID, + Name: fileData.Name, + Size: fileData.Size, + Downloaded: fileData.Downloaded, + } + fileData.Chunk = make(map[string]bool) + fileData.Done = true + saveFolder := filepath.Join("uploads", userSession.UserID.String(), fileData.ID.String(), fileData.Name) + for i := 0; i <= int(fileInfo.Chunk-1); i++ { + fileName := fmt.Sprintf("%s/chunk_%d", saveFolder, i) + + if _, err := os.Stat(fileName); os.IsNotExist(err) { + fileData.Chunk[fmt.Sprintf("chunk_%d", i)] = false + fileData.Done = false + } else { + fileData.Chunk[fmt.Sprintf("chunk_%d", i)] = true + } + } respondJSON(w, upload) return } @@ -45,11 +66,19 @@ func POST(w http.ResponseWriter, r *http.Request) { return } - if fileData.Done { - respondJSON(w, map[string]bool{"Done": true}) - return - } + fileData.Chunk = make(map[string]bool) + fileData.Done = true + saveFolder := filepath.Join("uploads", userSession.UserID.String(), fileData.ID.String(), fileData.Name) + for i := 0; i <= int(fileInfo.Chunk-1); i++ { + fileName := fmt.Sprintf("%s/chunk_%d", saveFolder, i) + if _, err := os.Stat(fileName); os.IsNotExist(err) { + fileData.Chunk[fmt.Sprintf("chunk_%d", i)] = false + fileData.Done = false + } else { + fileData.Chunk[fmt.Sprintf("chunk_%d", i)] = true + } + } respondJSON(w, fileData) } @@ -81,14 +110,12 @@ func handleNewUpload(user types.User, file types.FileInfo) (models.File, error) } newFile := models.File{ - ID: fileID, - OwnerID: ownerID, - Name: file.Name, - Size: file.Size, - Downloaded: 0, - UploadedByte: 0, - UploadedChunk: -1, - Done: false, + ID: fileID, + OwnerID: ownerID, + Name: file.Name, + Size: file.Size, + TotalChunk: file.Chunk - 1, + Downloaded: 0, } err = app.Server.Database.CreateFile(&newFile) @@ -96,7 +123,6 @@ func handleNewUpload(user types.User, file types.FileInfo) (models.File, error) app.Server.Logger.Error(err.Error()) return models.File{}, err } - return newFile, nil } diff --git a/handler/upload/upload.go b/handler/upload/upload.go index 00304eb..97b5ade 100644 --- a/handler/upload/upload.go +++ b/handler/upload/upload.go @@ -1,8 +1,16 @@ package uploadHandler import ( + "fmt" + "github.com/fossyy/filekeeper/app" + "github.com/fossyy/filekeeper/types" filesView "github.com/fossyy/filekeeper/view/client/upload" + "io" "net/http" + "os" + "path/filepath" + "strconv" + "strings" ) func GET(w http.ResponseWriter, r *http.Request) { @@ -14,74 +22,85 @@ func GET(w http.ResponseWriter, r *http.Request) { } func POST(w http.ResponseWriter, r *http.Request) { + fileID := r.PathValue("id") + if err := r.ParseMultipartForm(32 << 20); err != nil { + w.WriteHeader(http.StatusInternalServerError) + return + } + + userSession := r.Context().Value("user").(types.User) + + uploadDir := "uploads" + if _, err := os.Stat(uploadDir); os.IsNotExist(err) { + if err := os.Mkdir(uploadDir, os.ModePerm); err != nil { + app.Server.Logger.Error("error getting upload info: " + err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + } + + file, err := app.Server.Service.GetFile(fileID) + if err != nil { + app.Server.Logger.Error("error getting upload info: " + err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + + rawIndex := r.FormValue("index") + index, err := strconv.Atoi(rawIndex) + if err != nil { + return + } + + currentDir, err := os.Getwd() + if err != nil { + app.Server.Logger.Error("unable to get current directory") + w.WriteHeader(http.StatusInternalServerError) + return + } + + basePath := filepath.Join(currentDir, uploadDir) + cleanBasePath := filepath.Clean(basePath) + + saveFolder := filepath.Join(cleanBasePath, userSession.UserID.String(), file.ID.String(), file.Name) + + cleanSaveFolder := filepath.Clean(saveFolder) + + if !strings.HasPrefix(cleanSaveFolder, cleanBasePath) { + app.Server.Logger.Error("invalid path") + w.WriteHeader(http.StatusInternalServerError) + return + } + + if _, err := os.Stat(saveFolder); os.IsNotExist(err) { + if err := os.MkdirAll(saveFolder, os.ModePerm); err != nil { + app.Server.Logger.Error("error creating save folder: " + err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + } + + fileByte, _, err := r.FormFile("chunk") + if err != nil { + app.Server.Logger.Error("error getting upload info: " + err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + defer fileByte.Close() + + dst, err := os.OpenFile(filepath.Join(saveFolder, fmt.Sprintf("chunk_%d", index)), os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666) + if err != nil { + app.Server.Logger.Error("error making upload folder: " + err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + + defer dst.Close() + if _, err := io.Copy(dst, fileByte); err != nil { + app.Server.Logger.Error("error copying byte to file dst: " + err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + return - //fileID := r.PathValue("id") - //if err := r.ParseMultipartForm(32 << 20); err != nil { - // w.WriteHeader(http.StatusInternalServerError) - // return - //} - // - //userSession := r.Context().Value("user").(types.User) - // - //uploadDir := "uploads" - //if _, err := os.Stat(uploadDir); os.IsNotExist(err) { - // if err := os.Mkdir(uploadDir, os.ModePerm); err != nil { - // app.Server.Logger.Error("error getting upload info: " + err.Error()) - // w.WriteHeader(http.StatusInternalServerError) - // return - // } - //} - // - //file, err := app.Server.Service.GetFile(fileID) - //if err != nil { - // app.Server.Logger.Error("error getting upload info: " + err.Error()) - // w.WriteHeader(http.StatusInternalServerError) - // return - //} - // - //currentDir, _ := os.Getwd() - //basePath := filepath.Join(currentDir, uploadDir) - //saveFolder := filepath.Join(basePath, userSession.UserID.String(), file.ID.String()) - // - //if filepath.Dir(saveFolder) != filepath.Join(basePath, userSession.UserID.String()) { - // app.Server.Logger.Error("invalid path") - // w.WriteHeader(http.StatusInternalServerError) - // return - //} - // - //fileByte, fileHeader, err := r.FormFile("chunk") - //if err != nil { - // app.Server.Logger.Error("error getting upload info: " + err.Error()) - // w.WriteHeader(http.StatusInternalServerError) - // return - //} - //defer fileByte.Close() - // - //rawIndex := r.FormValue("index") - //index, err := strconv.Atoi(rawIndex) - //if err != nil { - // return - //} - // - //file.UpdateProgress(int64(index), file.UploadedByte+int64(fileHeader.Size)) - // - //dst, err := os.OpenFile(filepath.Join(saveFolder, file.Name), os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666) - //if err != nil { - // app.Server.Logger.Error("error making upload folder: " + err.Error()) - // w.WriteHeader(http.StatusInternalServerError) - // return - //} - // - //defer dst.Close() - //if _, err := io.Copy(dst, fileByte); err != nil { - // app.Server.Logger.Error("error copying byte to file dst: " + err.Error()) - // w.WriteHeader(http.StatusInternalServerError) - // return - //} - // - //if file.UploadedByte >= file.Size { - // file.FinalizeFileUpload() - // return - //} - //return } diff --git a/public/upload.js b/public/upload.js index 94ae924..03e4241 100644 --- a/public/upload.js +++ b/public/upload.js @@ -32,7 +32,7 @@ async function handleFile(file){ if (responseData.Done === false) { addNewUploadElement(file) const fileChunks = await splitFile(file, chunkSize); - await uploadChunks(file.name,file.size, fileChunks, responseData.UploadedChunk, responseData.ID); + await uploadChunks(file.name,file.size, fileChunks, responseData.Chunk, responseData.ID); } else { alert("file already uploaded") } @@ -123,7 +123,7 @@ async function splitFile(file, chunkSize) { return fileChunks; } -async function uploadChunks(name, size, chunks, uploadedChunk= -1, FileID) { +async function uploadChunks(name, size, chunks, chunkArray, FileID) { let byteUploaded = 0 let progress1 = document.getElementById(`progress-${name}-1`); let progress2 = document.getElementById(`progress-${name}-2`); @@ -132,7 +132,7 @@ async function uploadChunks(name, size, chunks, uploadedChunk= -1, FileID) { for (let index = 0; index < chunks.length; index++) { const percentComplete = Math.round((index + 1) / chunks.length * 100); const chunk = chunks[index]; - if (!(index <= uploadedChunk)) { + if (!(chunkArray["chunk_"+index])) { const formData = new FormData(); formData.append('name', name); formData.append('chunk', chunk); @@ -152,13 +152,19 @@ async function uploadChunks(name, size, chunks, uploadedChunk= -1, FileID) { const totalTime = (endTime - startTime) / 1000; const uploadSpeed = chunk.size / totalTime / 1024 / 1024; byteUploaded += chunk.size - console.log(byteUploaded) progress3.innerText = `${uploadSpeed.toFixed(2)} MB/s`; progress4.innerText = `Uploading ${percentComplete}% - ${convertFileSize(byteUploaded)} of ${ convertFileSize(size)}`; } else { progress1.setAttribute("aria-valuenow", percentComplete); progress2.style.width = `${percentComplete}%`; + progress3.innerText = `Fixing Missing Byte`; + progress4.innerText = `Uploading Missing Byte ${percentComplete}% - ${convertFileSize(byteUploaded)} of ${ convertFileSize(size)}`; byteUploaded += chunk.size } } + console.log(chunks) + console.log(chunkArray) + + progress3.innerText = `Done`; + progress4.innerText = `File Uploaded 100% - ${convertFileSize(byteUploaded)} of ${ convertFileSize(size)}`; } \ No newline at end of file diff --git a/service/service.go b/service/service.go index 39deec4..cde1412 100644 --- a/service/service.go +++ b/service/service.go @@ -5,6 +5,7 @@ import ( "encoding/json" "github.com/fossyy/filekeeper/app" "github.com/fossyy/filekeeper/types" + "github.com/fossyy/filekeeper/types/models" "github.com/redis/go-redis/v9" "time" ) @@ -21,7 +22,7 @@ func NewService(db types.Database, cache types.CachingServer) *Service { } } -func (r *Service) GetUser(ctx context.Context, email string) (*types.UserWithExpired, error) { +func (r *Service) GetUser(ctx context.Context, email string) (*models.User, error) { userJSON, err := app.Server.Cache.GetCache(ctx, "UserCache:"+email) if err == redis.Nil { userData, err := r.db.GetUser(email) @@ -29,13 +30,12 @@ func (r *Service) GetUser(ctx context.Context, email string) (*types.UserWithExp return nil, err } - user := &types.UserWithExpired{ + user := &models.User{ UserID: userData.UserID, Username: userData.Username, Email: userData.Email, Password: userData.Password, Totp: userData.Totp, - AccessAt: time.Now(), } newUserJSON, _ := json.Marshal(user) @@ -50,7 +50,7 @@ func (r *Service) GetUser(ctx context.Context, email string) (*types.UserWithExp return nil, err } - var user types.UserWithExpired + var user models.User err = json.Unmarshal([]byte(userJSON), &user) if err != nil { return nil, err @@ -66,7 +66,7 @@ func (r *Service) DeleteUser(email string) { } } -func (r *Service) GetFile(id string) (*types.FileWithExpired, error) { +func (r *Service) GetFile(id string) (*models.File, error) { fileJSON, err := r.cache.GetCache(context.Background(), "FileCache:"+id) if err == redis.Nil { uploadData, err := r.db.GetFile(id) @@ -74,30 +74,18 @@ func (r *Service) GetFile(id string) (*types.FileWithExpired, error) { return nil, err } - fileCache := &types.FileWithExpired{ - ID: uploadData.ID, - OwnerID: uploadData.OwnerID, - Name: uploadData.Name, - Size: uploadData.Size, - Downloaded: uploadData.Downloaded, - UploadedByte: uploadData.UploadedByte, - UploadedChunk: uploadData.UploadedChunk, - Done: uploadData.Done, - AccessAt: time.Now(), - } - - newFileJSON, _ := json.Marshal(fileCache) + newFileJSON, _ := json.Marshal(uploadData) err = r.cache.SetCache(context.Background(), "FileCache:"+id, newFileJSON, time.Hour*24) if err != nil { return nil, err } - return fileCache, nil + return uploadData, nil } if err != nil { return nil, err } - var fileCache types.FileWithExpired + var fileCache models.File err = json.Unmarshal([]byte(fileJSON), &fileCache) if err != nil { return nil, err @@ -105,16 +93,18 @@ func (r *Service) GetFile(id string) (*types.FileWithExpired, error) { return &fileCache, nil } -func (r *Service) GetUserFile(name, ownerID string) (*types.FileWithExpired, error) { +func (r *Service) GetUserFile(name, ownerID string) (*types.FileWithDetail, error) { fileData, err := r.db.GetUserFile(name, ownerID) if err != nil { return nil, err } - file, err := r.GetFile(fileData.ID.String()) - if err != nil { - return nil, err + dada := &types.FileWithDetail{ + ID: fileData.ID, + OwnerID: fileData.OwnerID, + Name: fileData.Name, + Size: fileData.Size, + Downloaded: fileData.Downloaded, } - - return file, nil + return dada, nil } diff --git a/types/models/models.go b/types/models/models.go index 804a953..d1dd8aa 100644 --- a/types/models/models.go +++ b/types/models/models.go @@ -11,12 +11,10 @@ type User struct { } type File struct { - ID uuid.UUID `gorm:"primaryKey;not null;unique"` - OwnerID uuid.UUID `gorm:"not null"` - Name string `gorm:"not null"` - Size int64 `gorm:"not null"` - Downloaded int64 `gorm:"not null;default=0"` - UploadedByte int64 `gorm:"not null;default=0"` - UploadedChunk int64 `gorm:"not null;default=0"` - Done bool `gorm:"not null;default=false"` + ID uuid.UUID `gorm:"primaryKey;not null;unique"` + OwnerID uuid.UUID `gorm:"not null"` + Name string `gorm:"not null"` + Size int64 `gorm:"not null"` + TotalChunk int64 `gorm:"not null"` + Downloaded int64 `gorm:"not null;default=0"` } diff --git a/types/types.go b/types/types.go index 1d71e8f..b16ca03 100644 --- a/types/types.go +++ b/types/types.go @@ -33,25 +33,14 @@ type FileData struct { Downloaded int64 } -type UserWithExpired struct { - UserID uuid.UUID - Username string - Email string - Password string - Totp string - AccessAt time.Time -} - -type FileWithExpired struct { - ID uuid.UUID - OwnerID uuid.UUID - Name string - Size int64 - Downloaded int64 - UploadedByte int64 - UploadedChunk int64 - Done bool - AccessAt time.Time +type FileWithDetail struct { + ID uuid.UUID + OwnerID uuid.UUID + Name string + Size int64 + Downloaded int64 + Chunk map[string]bool + Done bool } type Database interface { @@ -68,10 +57,6 @@ type Database interface { GetUserFile(name string, ownerID string) (*models.File, error) GetFiles(ownerID string) ([]*models.File, error) - UpdateUploadedByte(index int64, fileID string) - UpdateUploadedChunk(index int64, fileID string) - FinalizeFileUpload(fileID string) - InitializeTotp(email string, secret string) error } @@ -83,8 +68,8 @@ type CachingServer interface { } type Services interface { - GetUser(ctx context.Context, email string) (*UserWithExpired, error) + GetUser(ctx context.Context, email string) (*models.User, error) DeleteUser(email string) - GetFile(id string) (*FileWithExpired, error) - GetUserFile(name, ownerID string) (*FileWithExpired, error) + GetFile(id string) (*models.File, error) + GetUserFile(name, ownerID string) (*FileWithDetail, error) }