2016-01-13 19:25:25 +00:00
package fic
2016-01-07 17:43:02 +00:00
import (
2024-03-17 14:03:30 +00:00
"compress/gzip"
2018-02-03 01:03:08 +00:00
"crypto"
_ "crypto/sha1"
2017-01-04 00:30:24 +00:00
"encoding/hex"
"errors"
2023-11-05 11:25:46 +00:00
"fmt"
2019-07-05 17:07:28 +00:00
"hash"
2016-01-07 17:43:02 +00:00
"io"
"os"
"path"
2016-01-20 21:44:34 +00:00
"strings"
2017-11-24 19:08:14 +00:00
2018-02-03 01:03:08 +00:00
_ "golang.org/x/crypto/blake2b"
2016-01-07 17:43:02 +00:00
)
2018-03-09 18:07:08 +00:00
// FilesDir stores the location where files to be served are stored.
2017-11-25 15:05:03 +00:00
var FilesDir string = "./FILES/"
2018-03-09 18:07:08 +00:00
// OptionalDigest permits to avoid importation failure if no digest are given.
2017-11-25 15:05:03 +00:00
var OptionalDigest bool = false
2018-03-09 18:07:08 +00:00
// StrongDigest forces the use of BLAKE2b hash in place of SHA1 (or mixed SHA1/BLAKE2b).
2017-11-25 15:05:03 +00:00
var StrongDigest bool = false
2016-01-20 21:44:34 +00:00
2018-06-20 19:12:00 +00:00
// Set PlainDigest if digest errors should contain the whole calculated hash, to be paste directly into DIGESTS file.
var PlainDigest bool = false
2018-03-09 18:07:08 +00:00
// EFile represents a challenge file.
2016-01-07 17:43:02 +00:00
type EFile struct {
2020-01-18 23:54:00 +00:00
Id int64 ` json:"id,omitempty" `
2018-03-09 18:07:08 +00:00
// origin holds the import relative path of the file
2019-07-05 17:07:28 +00:00
origin string
2018-03-09 18:07:08 +00:00
// Path is the location where the file is stored, relatively to FilesDir
2019-07-05 17:07:28 +00:00
Path string ` json:"path" `
2018-03-09 18:07:08 +00:00
// IdExercice is the identifier of the underlying challenge
2020-01-18 23:54:00 +00:00
IdExercice int64 ` json:"idExercice,omitempty" `
2018-03-09 18:07:08 +00:00
// Name is the title displayed to players
2019-07-05 17:07:28 +00:00
Name string ` json:"name" `
2018-03-09 18:07:08 +00:00
// Checksum stores the cached hash of the file
2019-07-05 17:07:28 +00:00
Checksum [ ] byte ` json:"checksum" `
2022-10-31 17:29:41 +00:00
// ChecksumShown stores the hash of the downloaded file (in case of gzipped content)
ChecksumShown [ ] byte ` json:"checksum_shown" `
2018-03-09 18:07:08 +00:00
// Size contains the cached size of the file
2019-07-05 17:07:28 +00:00
Size int64 ` json:"size" `
2022-11-23 15:55:49 +00:00
// Disclaimer contains a string to display before downloading the content
Disclaimer string ` json:"disclaimer" `
2022-10-31 17:52:29 +00:00
// Published indicates if the file should be shown or not
Published bool ` json:"published" `
2016-01-07 17:43:02 +00:00
}
2022-07-11 17:57:33 +00:00
// NewDummyFile creates an EFile, without any link to an actual Exercice File.
// It is used to check the file validity
2022-11-23 15:55:49 +00:00
func ( e * Exercice ) NewDummyFile ( origin string , dest string , checksum [ ] byte , checksumShown [ ] byte , disclaimer string , size int64 ) * EFile {
2022-07-11 17:57:33 +00:00
return & EFile {
2022-10-31 17:29:41 +00:00
Id : 0 ,
origin : origin ,
Path : dest ,
IdExercice : e . Id ,
Name : path . Base ( origin ) ,
Checksum : checksum ,
2022-11-03 20:35:05 +00:00
ChecksumShown : checksumShown ,
2022-10-31 17:29:41 +00:00
Size : size ,
2022-11-23 15:55:49 +00:00
Disclaimer : disclaimer ,
2022-10-31 17:52:29 +00:00
Published : true ,
2022-07-11 17:57:33 +00:00
}
}
2018-03-09 18:07:08 +00:00
// GetFiles returns a list of all files living in the database.
2021-11-22 14:35:07 +00:00
func GetFiles ( ) ( [ ] * EFile , error ) {
2022-11-23 15:55:49 +00:00
if rows , err := DBQuery ( "SELECT id_file, id_exercice, origin, path, name, cksum, cksum_shown, size, disclaimer, published FROM exercice_files" ) ; err != nil {
2016-02-01 17:57:47 +00:00
return nil , err
} else {
defer rows . Close ( )
2021-11-22 14:35:07 +00:00
files := [ ] * EFile { }
2016-02-01 17:57:47 +00:00
for rows . Next ( ) {
2021-11-22 14:35:07 +00:00
f := & EFile { }
2022-11-23 15:55:49 +00:00
if err := rows . Scan ( & f . Id , & f . IdExercice , & f . origin , & f . Path , & f . Name , & f . Checksum , & f . ChecksumShown , & f . Size , & f . Disclaimer , & f . Published ) ; err != nil {
2016-02-01 17:57:47 +00:00
return nil , err
}
files = append ( files , f )
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
return files , nil
}
}
2018-03-09 18:07:08 +00:00
// GetFile retrieves the file with the given id.
2021-11-22 14:35:07 +00:00
func GetFile ( id int64 ) ( f * EFile , err error ) {
f = & EFile { }
2022-11-23 15:55:49 +00:00
err = DBQueryRow ( "SELECT id_file, origin, path, name, cksum, cksum_shown, size, disclaimer, published FROM exercice_files WHERE id_file = ?" , id ) . Scan ( & f . Id , & f . origin , & f . Path , & f . Name , & f . Checksum , & f . ChecksumShown , & f . Size , & f . Disclaimer , & f . Published )
2018-12-01 15:15:28 +00:00
return
2017-12-27 00:53:01 +00:00
}
2022-05-16 09:38:46 +00:00
func ( e * Exercice ) GetFile ( id int64 ) ( f * EFile , err error ) {
f = & EFile { }
2022-11-23 15:55:49 +00:00
err = DBQueryRow ( "SELECT id_file, origin, path, name, cksum, cksum_shown, size, disclaimer, published FROM exercice_files WHERE id_file = ? AND id_exercice = ?" , id , e . Id ) . Scan ( & f . Id , & f . origin , & f . Path , & f . Name , & f . Checksum , & f . ChecksumShown , & f . Size , & f . Disclaimer , & f . Published )
2022-05-16 09:38:46 +00:00
return
}
2018-03-09 18:07:08 +00:00
// GetFileByPath retrieves the file that should be found at the given location.
2021-11-22 14:35:07 +00:00
func GetFileByPath ( path string ) ( * EFile , error ) {
2017-12-04 07:39:45 +00:00
path = strings . TrimPrefix ( path , FilesDir )
2021-11-22 14:35:07 +00:00
f := & EFile { }
2022-11-23 15:55:49 +00:00
if err := DBQueryRow ( "SELECT id_file, origin, path, id_exercice, name, cksum, cksum_shown, size, disclaimer, published FROM exercice_files WHERE path = ?" , path ) . Scan ( & f . Id , & f . origin , & f . Path , & f . IdExercice , & f . Name , & f . Checksum , & f . ChecksumShown , & f . Size , & f . Disclaimer , & f . Published ) ; err != nil {
2017-12-04 07:39:45 +00:00
return f , err
}
return f , nil
}
2018-09-07 18:53:08 +00:00
// GetFileByFilename retrieves the file that should be called so.
2021-11-22 14:35:07 +00:00
func ( e * Exercice ) GetFileByFilename ( filename string ) ( f * EFile , err error ) {
2018-09-07 18:53:08 +00:00
filename = path . Base ( filename )
2021-11-22 14:35:07 +00:00
f = & EFile { }
2022-11-23 15:55:49 +00:00
err = DBQueryRow ( "SELECT id_file, origin, path, id_exercice, name, cksum, cksum_shown, size, disclaimer, published FROM exercice_files WHERE id_exercice = ? AND origin LIKE ?" , e . Id , "%/" + filename ) . Scan ( & f . Id , & f . origin , & f . Path , & f . IdExercice , & f . Name , & f . Checksum , & f . ChecksumShown , & f . Size , & f . Disclaimer , & f . Published )
2018-09-07 18:53:08 +00:00
return
}
2018-03-09 18:07:08 +00:00
// GetFiles returns a list of files coming with the challenge.
2021-11-22 14:35:07 +00:00
func ( e * Exercice ) GetFiles ( ) ( [ ] * EFile , error ) {
2022-11-23 15:55:49 +00:00
if rows , err := DBQuery ( "SELECT id_file, origin, path, name, cksum, cksum_shown, size, disclaimer, published FROM exercice_files WHERE id_exercice = ?" , e . Id ) ; err != nil {
2016-01-07 17:43:02 +00:00
return nil , err
} else {
defer rows . Close ( )
2021-11-22 14:35:07 +00:00
files := [ ] * EFile { }
2016-01-07 17:43:02 +00:00
for rows . Next ( ) {
2021-11-22 14:35:07 +00:00
f := & EFile { }
2016-01-13 00:20:21 +00:00
f . IdExercice = e . Id
2022-11-23 15:55:49 +00:00
if err := rows . Scan ( & f . Id , & f . origin , & f . Path , & f . Name , & f . Checksum , & f . ChecksumShown , & f . Size , & f . Disclaimer , & f . Published ) ; err != nil {
2016-01-07 17:43:02 +00:00
return nil , err
}
files = append ( files , f )
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
return files , nil
}
}
2018-03-09 18:07:08 +00:00
// GetFileByPath retrieves the file that should be found at the given location, limited to the challenge files.
2021-11-22 14:35:07 +00:00
func ( e * Exercice ) GetFileByPath ( path string ) ( * EFile , error ) {
2017-12-04 07:39:45 +00:00
path = strings . TrimPrefix ( path , FilesDir )
2021-11-22 14:35:07 +00:00
f := & EFile { }
2022-11-23 15:55:49 +00:00
if err := DBQueryRow ( "SELECT id_file, origin, path, name, cksum, cksum_shown, size, disclaimer, published FROM exercice_files WHERE id_exercice = ? AND path = ?" , e . Id , path ) . Scan ( & f . Id , & f . origin , & f . Path , & f . Name , & f . Checksum , & f . ChecksumShown , & f . Size , & f . Disclaimer , & f . Published ) ; err != nil {
2021-11-22 14:35:07 +00:00
return nil , err
2017-12-04 07:39:45 +00:00
}
return f , nil
}
2018-06-20 19:12:00 +00:00
// minifyHash returns only the begining and the end of the given hash.
// Use this function to ensure people doesn't fill their file with our calculated hash.
func minifyHash ( hash string ) string {
if PlainDigest {
return hash
} else {
return hash [ 0 : len ( hash ) / 3 ] + "..." + hash [ len ( hash ) / 2 : ]
}
}
2019-07-05 17:07:28 +00:00
// CheckBufferHash checks if the bufio has the given digest.
2022-11-21 13:38:16 +00:00
func CreateHashBuffers ( rd io . Reader ) ( * hash . Hash , * hash . Hash ) {
2019-07-05 17:07:28 +00:00
hash160 := crypto . SHA1 . New ( )
hash512 := crypto . BLAKE2b_512 . New ( )
w := io . MultiWriter ( hash160 , hash512 )
2022-11-21 13:38:16 +00:00
io . Copy ( w , rd )
return & hash160 , & hash512
2019-07-05 17:07:28 +00:00
}
// CheckBufferHash checks if the bufio has the given digest.
func CheckBufferHash ( hash160 * hash . Hash , hash512 * hash . Hash , digest [ ] byte ) ( [ ] byte , error ) {
result160 := ( * hash160 ) . Sum ( nil )
result512 := ( * hash512 ) . Sum ( nil )
if len ( digest ) != len ( result512 ) {
if len ( digest ) != len ( result160 ) {
return [ ] byte { } , errors . New ( "Digests doesn't match: calculated: sha1:" + minifyHash ( hex . EncodeToString ( result160 ) ) + " & blake2b:" + minifyHash ( hex . EncodeToString ( result512 ) ) + " vs. given: " + hex . EncodeToString ( digest ) )
} else if StrongDigest {
return [ ] byte { } , errors . New ( "Invalid digests: SHA-1 checksums are no more accepted. Calculated sha1:" + minifyHash ( hex . EncodeToString ( result160 ) ) + " & blake2b:" + minifyHash ( hex . EncodeToString ( result512 ) ) + " vs. given: " + hex . EncodeToString ( digest ) )
}
for k := range result160 {
if result160 [ k ] != digest [ k ] {
return [ ] byte { } , errors . New ( "Digests doesn't match: calculated: sha1:" + minifyHash ( hex . EncodeToString ( result160 ) ) + " & blake2b:" + minifyHash ( hex . EncodeToString ( result512 ) ) + " vs. given: " + hex . EncodeToString ( digest ) )
}
}
} else {
for k := range result512 {
if result512 [ k ] != digest [ k ] {
return [ ] byte { } , errors . New ( "Digests doesn't match: calculated: " + minifyHash ( hex . EncodeToString ( result512 ) ) + " vs. given: " + hex . EncodeToString ( digest ) )
}
}
}
return result512 , nil
}
2018-03-09 18:07:08 +00:00
// checkFileHash checks if the file at the given filePath has the given digest.
// It also returns the file's size.
2019-07-05 17:07:28 +00:00
func checkFileHash ( filePath string , digest [ ] byte ) ( dgst [ ] byte , size int64 , err error ) {
2017-12-27 00:53:01 +00:00
if digest == nil {
2021-11-22 14:35:07 +00:00
return [ ] byte { } , 0 , errors . New ( "no digest given" )
2020-01-18 22:45:28 +00:00
} else if fi , errr := os . Stat ( filePath ) ; errr != nil {
2019-07-05 17:07:28 +00:00
return [ ] byte { } , 0 , errr
2020-01-18 22:45:28 +00:00
} else if fd , errr := os . Open ( filePath ) ; errr != nil {
2019-07-05 17:07:28 +00:00
return [ ] byte { } , fi . Size ( ) , errr
2016-01-07 17:43:02 +00:00
} else {
defer fd . Close ( )
2019-07-05 17:07:28 +00:00
size = fi . Size ( )
2016-01-07 17:43:02 +00:00
2022-11-21 13:38:16 +00:00
hash160 , hash512 := CreateHashBuffers ( fd )
2017-01-04 00:30:24 +00:00
2019-07-05 17:07:28 +00:00
dgst , err = CheckBufferHash ( hash160 , hash512 , digest )
2017-01-04 00:30:24 +00:00
2019-07-05 17:07:28 +00:00
return
2017-12-27 00:53:01 +00:00
}
}
2018-03-09 18:07:08 +00:00
// ImportFile registers (ou updates if it already exists in database) the file in database.
2022-11-23 15:55:49 +00:00
func ( e * Exercice ) ImportFile ( filePath string , origin string , digest [ ] byte , digestshown [ ] byte , disclaimer string , published bool ) ( interface { } , error ) {
2017-12-27 00:53:01 +00:00
if result512 , size , err := checkFileHash ( filePath , digest ) ; ! OptionalDigest && err != nil {
2023-10-13 14:05:15 +00:00
return nil , err
2017-12-27 00:53:01 +00:00
} else {
2017-11-27 01:45:33 +00:00
dPath := strings . TrimPrefix ( filePath , FilesDir )
if f , err := e . GetFileByPath ( dPath ) ; err != nil {
2022-11-23 15:55:49 +00:00
return e . AddFile ( dPath , origin , path . Base ( filePath ) , result512 , digestshown , size , disclaimer , published )
2017-11-27 01:45:33 +00:00
} else {
// Don't need to update Path and Name, because they are related to dPath
f . IdExercice = e . Id
f . origin = origin
f . Checksum = result512
2022-10-31 17:29:41 +00:00
f . ChecksumShown = digestshown
2017-12-27 00:53:01 +00:00
f . Size = size
2022-11-23 15:55:49 +00:00
f . Disclaimer = disclaimer
2022-10-31 17:52:29 +00:00
f . Published = published
2017-11-27 01:45:33 +00:00
if _ , err := f . Update ( ) ; err != nil {
return nil , err
} else {
return f , nil
}
}
2016-01-07 17:43:02 +00:00
}
}
2018-03-09 18:07:08 +00:00
// AddFile creates and fills a new struct File and registers it into the database.
2022-11-23 15:55:49 +00:00
func ( e * Exercice ) AddFile ( path string , origin string , name string , checksum [ ] byte , checksumshown [ ] byte , size int64 , disclaimer string , published bool ) ( * EFile , error ) {
if res , err := DBExec ( "INSERT INTO exercice_files (id_exercice, origin, path, name, cksum, cksum_shown, size, disclaimer, published) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)" , e . Id , origin , path , name , checksum , checksumshown , size , disclaimer , published ) ; err != nil {
2021-11-22 14:35:07 +00:00
return nil , err
2016-01-07 17:43:02 +00:00
} else if fid , err := res . LastInsertId ( ) ; err != nil {
2021-11-22 14:35:07 +00:00
return nil , err
2016-01-07 17:43:02 +00:00
} else {
2022-11-23 15:55:49 +00:00
return & EFile { fid , origin , path , e . Id , name , checksum , checksumshown , size , disclaimer , published } , nil
2016-01-07 17:43:02 +00:00
}
}
2018-03-09 18:07:08 +00:00
// Update applies modifications back to the database.
2021-11-22 14:35:07 +00:00
func ( f * EFile ) Update ( ) ( int64 , error ) {
2022-11-23 15:55:49 +00:00
if res , err := DBExec ( "UPDATE exercice_files SET id_exercice = ?, origin = ?, path = ?, name = ?, cksum = ?, cksum_shown = ?, size = ?, disclaimer = ?, published = ? WHERE id_file = ?" , f . IdExercice , f . origin , f . Path , f . Name , f . Checksum , f . ChecksumShown , f . Size , f . Disclaimer , f . Published , f . Id ) ; err != nil {
2016-01-07 17:43:02 +00:00
return 0 , err
} else if nb , err := res . RowsAffected ( ) ; err != nil {
return 0 , err
} else {
return nb , err
}
}
2018-03-09 18:07:08 +00:00
// Delete the file from the database.
2016-01-07 17:43:02 +00:00
func ( f EFile ) Delete ( ) ( int64 , error ) {
2016-01-13 00:20:21 +00:00
if res , err := DBExec ( "DELETE FROM exercice_files WHERE id_file = ?" , f . Id ) ; err != nil {
2016-01-07 17:43:02 +00:00
return 0 , err
} else if nb , err := res . RowsAffected ( ) ; err != nil {
return 0 , err
} else {
return nb , err
}
}
2016-02-01 17:57:47 +00:00
2018-03-09 18:07:08 +00:00
// WipeFiles deletes (only in the database, not on disk) files coming with the challenge.
2017-12-08 23:52:43 +00:00
func ( e Exercice ) WipeFiles ( ) ( int64 , error ) {
2020-01-20 16:28:37 +00:00
if _ , err := DBExec ( "DELETE FROM exercice_files_okey_deps WHERE id_flag IN (SELECT id_flag FROM exercice_flags WHERE id_exercice = ?)" , e . Id ) ; err != nil {
return 0 , err
} else if _ , err := DBExec ( "DELETE FROM exercice_files_omcq_deps WHERE id_mcq IN (SELECT id_mcq FROM exercice_mcq WHERE id_exercice = ?)" , e . Id ) ; err != nil {
2019-01-18 13:41:00 +00:00
return 0 , err
} else if res , err := DBExec ( "DELETE FROM exercice_files WHERE id_exercice = ?" , e . Id ) ; err != nil {
2017-12-08 23:52:43 +00:00
return 0 , err
} else if nb , err := res . RowsAffected ( ) ; err != nil {
return 0 , err
} else {
return nb , err
}
}
2018-03-09 18:07:08 +00:00
// ClearFiles removes all certificates from database (but not files on disks).
2017-12-27 00:53:01 +00:00
func ClearFiles ( ) ( int64 , error ) {
if res , err := DBExec ( "DELETE FROM exercice_files" ) ; err != nil {
return 0 , err
} else if nb , err := res . RowsAffected ( ) ; err != nil {
return 0 , err
} else {
return nb , err
}
}
2018-03-09 18:07:08 +00:00
// GetOrigin access the private field origin of the file.
2021-11-22 14:35:07 +00:00
func ( f * EFile ) GetOrigin ( ) string {
2016-02-01 17:57:47 +00:00
return f . origin
}
2018-08-19 20:19:49 +00:00
2018-09-07 18:53:08 +00:00
// AddDepend insert a new dependency to a given flag.
2021-11-22 14:35:07 +00:00
func ( f * EFile ) AddDepend ( j Flag ) ( err error ) {
if k , ok := j . ( * FlagKey ) ; ok {
2020-01-20 16:28:37 +00:00
_ , err = DBExec ( "INSERT INTO exercice_files_okey_deps (id_file, id_flag) VALUES (?, ?)" , f . Id , k . Id )
2021-11-22 14:35:07 +00:00
} else if m , ok := j . ( * MCQ ) ; ok {
2020-01-20 16:28:37 +00:00
_ , err = DBExec ( "INSERT INTO exercice_files_omcq_deps (id_file, id_mcq) VALUES (?, ?)" , f . Id , m . Id )
2019-01-02 20:51:09 +00:00
} else {
2023-11-05 11:25:46 +00:00
err = fmt . Errorf ( "dependancy type for flag (%T) not implemented for this file" , j )
2019-01-02 20:51:09 +00:00
}
2018-09-07 18:53:08 +00:00
return
}
2019-07-21 21:46:23 +00:00
// DeleteDepend insert a new dependency to a given flag.
2021-11-22 14:35:07 +00:00
func ( f * EFile ) DeleteDepend ( j Flag ) ( err error ) {
if k , ok := j . ( * FlagKey ) ; ok {
2020-01-20 16:28:37 +00:00
_ , err = DBExec ( "DELETE FROM exercice_files_okey_deps WHERE id_file = ? AND id_flag = ?" , f . Id , k . Id )
2021-11-22 14:35:07 +00:00
} else if m , ok := j . ( * MCQ ) ; ok {
2020-01-20 16:28:37 +00:00
_ , err = DBExec ( "DELETE FROM exercice_files_omcq_deps WHERE id_file = ? AND id_mcq = ?" , f . Id , m . Id )
2019-07-21 21:46:23 +00:00
} else {
2023-11-05 11:25:46 +00:00
err = fmt . Errorf ( "dependancy type for flag (%T) not implemented for this file" , j )
2019-07-21 21:46:23 +00:00
}
return
}
2018-09-07 18:53:08 +00:00
// GetDepends retrieve the flag's dependency list.
2021-11-22 14:35:07 +00:00
func ( f * EFile ) GetDepends ( ) ( [ ] Flag , error ) {
2020-01-20 16:28:37 +00:00
var deps = make ( [ ] Flag , 0 )
if rows , err := DBQuery ( "SELECT id_flag FROM exercice_files_okey_deps WHERE id_file = ?" , f . Id ) ; err != nil {
2018-09-07 18:53:08 +00:00
return nil , err
} else {
defer rows . Close ( )
for rows . Next ( ) {
2021-08-30 16:33:14 +00:00
var d int
2018-09-07 18:53:08 +00:00
if err := rows . Scan ( & d ) ; err != nil {
return nil , err
}
2022-05-31 20:03:51 +00:00
deps = append ( deps , & FlagKey { d , f . IdExercice , 0 , "" , "" , "" , "" , "" , false , false , false , nil , false , [ ] byte { } , 0 , 0 } )
2018-09-07 18:53:08 +00:00
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
2020-01-20 16:28:37 +00:00
}
2018-09-07 18:53:08 +00:00
2020-01-20 16:28:37 +00:00
if rows , err := DBQuery ( "SELECT id_mcq FROM exercice_files_omcq_deps WHERE id_file = ?" , f . Id ) ; err != nil {
return nil , err
} else {
defer rows . Close ( )
for rows . Next ( ) {
2021-08-30 16:33:14 +00:00
var d int
2020-01-20 16:28:37 +00:00
if err := rows . Scan ( & d ) ; err != nil {
return nil , err
}
2021-11-22 14:35:07 +00:00
deps = append ( deps , & MCQ { d , f . IdExercice , 0 , "" , [ ] * MCQ_entry { } } )
2020-01-20 16:28:37 +00:00
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
2018-09-07 18:53:08 +00:00
}
2020-01-20 16:28:37 +00:00
return deps , nil
2018-09-07 18:53:08 +00:00
}
2018-08-19 20:19:49 +00:00
// CheckFileOnDisk recalculates the hash of the file on disk.
2021-11-22 14:35:07 +00:00
func ( f * EFile ) CheckFileOnDisk ( ) error {
2018-08-19 20:19:49 +00:00
if _ , size , err := checkFileHash ( path . Join ( FilesDir , f . Path ) , f . Checksum ) ; err != nil {
return err
} else if size == 0 {
2023-04-03 21:57:05 +00:00
if _ , _ , err := checkFileHash ( path . Join ( FilesDir , f . Path + ".gz" ) , f . Checksum ) ; err != nil {
return errors . New ( "empty file" )
} else {
return nil
}
2018-08-19 20:19:49 +00:00
} else {
return nil
}
}
2024-03-17 14:03:30 +00:00
// GunzipFileOnDisk gunzip a compressed file.
func ( f * EFile ) GunzipFileOnDisk ( ) error {
if ! strings . HasSuffix ( f . origin , ".gz" ) || strings . HasSuffix ( f . origin , ".tar.gz" ) {
return nil
}
fdIn , err := os . Open ( path . Join ( FilesDir , f . Path + ".gz" ) )
if err != nil {
return err
}
defer fdIn . Close ( )
fdOut , err := os . Create ( path . Join ( FilesDir , strings . TrimSuffix ( f . Path , ".gz" ) ) )
if err != nil {
return err
}
defer fdOut . Close ( )
gunzipfd , err := gzip . NewReader ( fdIn )
if err != nil {
return err
}
defer gunzipfd . Close ( )
_ , err = io . Copy ( fdOut , gunzipfd )
if err != nil {
return err
}
return nil
}