2017-12-09 00:21:58 +00:00
package sync
import (
2022-11-21 13:39:00 +00:00
"compress/gzip"
2017-12-09 00:21:58 +00:00
"encoding/hex"
"fmt"
2022-11-21 11:02:36 +00:00
"log"
2022-05-16 09:38:46 +00:00
"net/http"
2022-11-21 11:02:36 +00:00
"os"
2017-12-09 00:21:58 +00:00
"path"
"strings"
"unicode"
2022-05-16 09:38:46 +00:00
"github.com/gin-gonic/gin"
2020-01-18 23:54:00 +00:00
2017-12-09 00:21:58 +00:00
"srs.epita.fr/fic-server/libfic"
)
2022-07-01 22:01:05 +00:00
func BuildFilesListInto ( i Importer , exercice * fic . Exercice , into string ) ( files [ ] string , digests map [ string ] [ ] byte , errs [ ] error ) {
2018-01-06 15:50:47 +00:00
// If no files directory, don't display error
2019-07-05 17:07:28 +00:00
if ! i . exists ( path . Join ( exercice . Path , into ) ) {
2018-01-06 15:50:47 +00:00
return
}
2017-12-09 00:21:58 +00:00
2019-07-05 17:07:28 +00:00
// Parse DIGESTS.txt
2022-05-24 19:52:58 +00:00
if digs , err := GetFileContent ( i , path . Join ( exercice . Path , into , "DIGESTS.txt" ) ) ; err != nil {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewExerciceError ( exercice , fmt . Errorf ( "unable to read %s: %w" , path . Join ( into , "DIGESTS.txt" ) , err ) ) )
2017-12-09 00:21:58 +00:00
} else {
2019-07-05 17:07:28 +00:00
digests = map [ string ] [ ] byte { }
2017-12-09 00:21:58 +00:00
for nline , d := range strings . Split ( digs , "\n" ) {
2018-12-07 23:53:13 +00:00
if dsplt := strings . SplitN ( d , " " , 2 ) ; len ( dsplt ) < 2 {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewExerciceError ( exercice , fmt . Errorf ( "unable to parse %s line %d: invalid format" , path . Join ( into , "DIGESTS.txt" ) , nline + 1 ) ) )
2017-12-09 00:21:58 +00:00
continue
} else if hash , err := hex . DecodeString ( dsplt [ 0 ] ) ; err != nil {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewExerciceError ( exercice , fmt . Errorf ( "unable to parse %s line %d: %w" , path . Join ( into , "DIGESTS.txt" ) , nline + 1 , err ) ) )
2017-12-09 00:21:58 +00:00
continue
} else {
2018-12-07 23:53:13 +00:00
digests [ strings . TrimFunc ( dsplt [ 1 ] , unicode . IsSpace ) ] = hash
2017-12-09 00:21:58 +00:00
}
}
2019-07-05 17:07:28 +00:00
}
// Read file list
if flist , err := i . listDir ( path . Join ( exercice . Path , into ) ) ; err != nil {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewExerciceError ( exercice , err ) )
2019-07-05 17:07:28 +00:00
} else {
for _ , fname := range flist {
2021-07-30 09:32:23 +00:00
if fname == "DIGESTS.txt" || fname == ".gitattributes" {
2019-07-05 17:07:28 +00:00
continue
}
2017-12-09 00:21:58 +00:00
2020-01-19 00:06:03 +00:00
if matched , _ := path . Match ( "*.[0-9][0-9]" , fname ) ; matched {
2017-12-12 06:11:01 +00:00
fname = fname [ : len ( fname ) - 3 ]
2020-01-19 00:06:03 +00:00
} else if matched , _ := path . Match ( "*[0-9][0-9]" , fname ) ; matched {
2017-12-12 06:11:01 +00:00
fname = fname [ : len ( fname ) - 2 ]
} else if matched , _ := path . Match ( "*_MERGED" , fname ) ; matched {
continue
}
2019-07-05 17:07:28 +00:00
fileFound := false
for _ , f := range files {
if fname == f {
fileFound = true
break
2017-12-12 06:11:01 +00:00
}
}
2019-07-05 17:07:28 +00:00
if ! fileFound {
files = append ( files , fname )
2017-12-12 06:11:01 +00:00
}
2019-07-05 17:07:28 +00:00
}
}
2017-12-12 06:11:01 +00:00
2019-07-05 17:07:28 +00:00
return
}
2020-01-16 17:53:27 +00:00
// CheckExerciceFilesPresence limits remote checks to presence, don't get it to check digest.
2022-07-01 22:01:05 +00:00
func CheckExerciceFilesPresence ( i Importer , exercice * fic . Exercice ) ( files [ ] string , errs [ ] error ) {
2020-01-16 17:53:27 +00:00
flist , digests , berrs := BuildFilesListInto ( i , exercice , "files" )
errs = append ( errs , berrs ... )
for _ , fname := range flist {
if ! i . exists ( path . Join ( exercice . Path , "files" , fname ) ) {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "No such file or directory" ) ) )
2020-01-16 17:53:27 +00:00
} else if _ , ok := digests [ fname ] ; ! ok {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to import file: No digest given" ) ) )
2020-01-16 17:53:27 +00:00
} else {
files = append ( files , fname )
}
}
for fname := range digests {
if ! i . exists ( path . Join ( exercice . Path , "files" , fname ) ) {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to read file: No such file or directory. Check your DIGESTS.txt for legacy entries." ) ) )
2020-01-16 17:53:27 +00:00
}
}
return
}
2019-07-05 17:07:28 +00:00
// CheckExerciceFiles checks that remote files have the right digest.
2022-10-29 15:03:57 +00:00
func CheckExerciceFiles ( i Importer , exercice * fic . Exercice , exceptions * CheckExceptions ) ( files [ ] string , errs [ ] error ) {
2020-01-16 17:53:27 +00:00
flist , digests , berrs := BuildFilesListInto ( i , exercice , "files" )
2019-07-05 17:07:28 +00:00
errs = append ( errs , berrs ... )
for _ , fname := range flist {
2022-11-21 13:38:16 +00:00
if fd , closer , err := GetFile ( i , path . Join ( exercice . Path , "files" , fname ) ) ; err != nil {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to read file: %w" , err ) ) )
2019-07-05 17:07:28 +00:00
continue
2022-07-11 17:57:33 +00:00
} else {
2022-11-21 13:38:16 +00:00
defer closer ( )
hash160 , hash512 := fic . CreateHashBuffers ( fd )
if _ , err := fic . CheckBufferHash ( hash160 , hash512 , digests [ fname ] ) ; err != nil {
errs = append ( errs , NewFileError ( exercice , fname , err ) )
2022-11-21 17:55:38 +00:00
} else if size , err := GetFileSize ( i , path . Join ( exercice . Path , "files" , fname ) ) ; err != nil {
2022-11-21 13:38:16 +00:00
errs = append ( errs , NewFileError ( exercice , fname , err ) )
} else {
var digest_shown [ ] byte
if strings . HasSuffix ( fname , ".gz" ) {
if d , exists := digests [ strings . TrimSuffix ( fname , ".gz" ) ] ; exists {
digest_shown = d
2022-11-21 13:39:00 +00:00
// Check that gunzipped file digest is correct
if fd , closer , err := GetFile ( i , path . Join ( exercice . Path , "files" , fname ) ) ; err != nil {
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to read file: %w" , err ) ) )
continue
} else if gunzipfd , err := gzip . NewReader ( fd ) ; err != nil {
closer ( )
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to gunzip file: %w" , err ) ) )
continue
} else {
defer gunzipfd . Close ( )
defer closer ( )
hash160_inflate , hash512_inflate := fic . CreateHashBuffers ( gunzipfd )
if _ , err := fic . CheckBufferHash ( hash160_inflate , hash512_inflate , digest_shown ) ; err != nil {
errs = append ( errs , NewFileError ( exercice , strings . TrimSuffix ( fname , ".gz" ) , err ) )
}
}
2022-11-21 13:38:16 +00:00
}
2022-11-03 20:35:05 +00:00
}
2022-11-23 15:55:49 +00:00
paramsFiles , err := GetExerciceFilesParams ( i , exercice )
if err != nil {
errs = append ( errs , NewChallengeTxtError ( exercice , 0 , err ) )
return
}
disclaimer := ""
if f , exists := paramsFiles [ fname ] ; exists {
// Call checks hooks
for _ , hk := range hooks . mdTextHooks {
for _ , err := range hk ( f . Disclaimer , exceptions ) {
errs = append ( errs , NewFileError ( exercice , fname , err ) )
}
}
if disclaimer , err = ProcessMarkdown ( i , fixnbsp ( f . Disclaimer ) , exercice . Path ) ; err != nil {
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "error during markdown formating of disclaimer: %w" , err ) ) )
}
}
file := exercice . NewDummyFile ( path . Join ( exercice . Path , "files" , fname ) , getDestinationFilePath ( path . Join ( exercice . Path , "files" , fname ) ) , ( * hash512 ) . Sum ( nil ) , digest_shown , disclaimer , size )
2022-07-11 17:57:33 +00:00
2022-11-21 13:38:16 +00:00
// Call checks hooks
for _ , h := range hooks . fileHooks {
for _ , e := range h ( file , exceptions ) {
errs = append ( errs , NewFileError ( exercice , fname , e ) )
}
2022-07-11 17:57:33 +00:00
}
}
2019-07-05 17:07:28 +00:00
}
2020-01-16 17:53:27 +00:00
files = append ( files , fname )
2019-07-05 17:07:28 +00:00
}
return
}
// SyncExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge.
// It takes care of DIGESTS.txt and ensure imported files match.
2022-10-29 15:03:57 +00:00
func SyncExerciceFiles ( i Importer , exercice * fic . Exercice , exceptions * CheckExceptions ) ( errs [ ] error ) {
2019-07-05 17:07:28 +00:00
if _ , err := exercice . WipeFiles ( ) ; err != nil {
2022-07-01 22:01:05 +00:00
errs = append ( errs , err )
2019-07-05 17:07:28 +00:00
}
2022-10-31 17:52:29 +00:00
paramsFiles , err := GetExerciceFilesParams ( i , exercice )
if err != nil {
errs = append ( errs , NewChallengeTxtError ( exercice , 0 , err ) )
return
}
2020-01-16 17:53:27 +00:00
files , digests , berrs := BuildFilesListInto ( i , exercice , "files" )
2019-07-05 17:07:28 +00:00
errs = append ( errs , berrs ... )
// Import standard files
for _ , fname := range files {
if f , err := i . importFile ( path . Join ( exercice . Path , "files" , fname ) ,
func ( filePath string , origin string ) ( interface { } , error ) {
2022-10-31 17:29:41 +00:00
var digest_shown [ ] byte
if strings . HasSuffix ( fname , ".gz" ) {
if d , exists := digests [ strings . TrimSuffix ( fname , ".gz" ) ] ; exists {
digest_shown = d
}
}
2022-10-31 17:52:29 +00:00
published := true
2022-11-23 15:55:49 +00:00
disclaimer := ""
2022-10-31 17:52:29 +00:00
if f , exists := paramsFiles [ fname ] ; exists {
published = ! f . Hidden
2022-11-23 15:55:49 +00:00
// Call checks hooks
for _ , hk := range hooks . mdTextHooks {
for _ , err := range hk ( f . Disclaimer , exceptions ) {
errs = append ( errs , NewFileError ( exercice , fname , err ) )
}
}
if disclaimer , err = ProcessMarkdown ( i , fixnbsp ( f . Disclaimer ) , exercice . Path ) ; err != nil {
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "error during markdown formating of disclaimer: %w" , err ) ) )
}
2022-10-31 17:52:29 +00:00
}
2022-11-23 15:55:49 +00:00
return exercice . ImportFile ( filePath , origin , digests [ fname ] , digest_shown , disclaimer , published )
2019-07-05 17:07:28 +00:00
} ) ; err != nil {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewFileError ( exercice , fname , err ) )
2019-07-05 17:07:28 +00:00
continue
2021-11-22 14:35:07 +00:00
} else if f . ( * fic . EFile ) . Size == 0 {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "imported file is empty!" ) ) )
} else {
2022-11-21 11:02:36 +00:00
file := f . ( * fic . EFile )
2022-07-11 17:57:33 +00:00
// Call checks hooks
for _ , h := range hooks . fileHooks {
2022-11-21 11:02:36 +00:00
for _ , e := range h ( file , exceptions ) {
2022-07-11 17:57:33 +00:00
errs = append ( errs , NewFileError ( exercice , fname , e ) )
}
}
2022-11-21 11:02:36 +00:00
// Create empty non-gziped file for nginx gzip-static module
if len ( file . ChecksumShown ) > 0 && strings . HasSuffix ( file . Name , ".gz" ) {
file . Name = strings . TrimSuffix ( file . Name , ".gz" )
file . Path = strings . TrimSuffix ( file . Path , ".gz" )
fd , err := os . Create ( path . Join ( fic . FilesDir , file . Path ) )
if err == nil {
fd . Close ( )
_ , err = file . Update ( )
if err != nil {
log . Println ( "Unable to update file after .gz removal:" , err . Error ( ) )
}
} else {
log . Printf ( "Unable to create %q: %s" , file . Path , err )
}
}
2017-12-09 00:21:58 +00:00
}
}
2018-01-06 15:50:47 +00:00
return
2017-12-09 00:21:58 +00:00
}
2020-01-18 23:54:00 +00:00
// ApiGetRemoteExerciceFiles is an accessor to remote exercice files list.
2022-05-16 09:38:46 +00:00
func ApiGetRemoteExerciceFiles ( c * gin . Context ) {
2022-10-29 15:03:57 +00:00
theme , exceptions , errs := BuildTheme ( GlobalImporter , c . Params . ByName ( "thid" ) )
2020-01-18 23:54:00 +00:00
if theme != nil {
2022-10-29 15:03:57 +00:00
exercice , _ , _ , _ , errs := BuildExercice ( GlobalImporter , theme , path . Join ( theme . Path , c . Params . ByName ( "exid" ) ) , nil , exceptions )
2020-01-18 23:54:00 +00:00
if exercice != nil {
2021-11-22 14:35:07 +00:00
files , digests , errs := BuildFilesListInto ( GlobalImporter , exercice , "files" )
2020-01-18 23:54:00 +00:00
if files != nil {
2021-11-22 14:35:07 +00:00
var ret [ ] * fic . EFile
2020-01-18 23:54:00 +00:00
for _ , fname := range files {
fPath := path . Join ( exercice . Path , "files" , fname )
2022-11-21 17:55:38 +00:00
fSize , _ := GetFileSize ( GlobalImporter , fPath )
2021-11-22 14:35:07 +00:00
ret = append ( ret , & fic . EFile {
2020-04-15 05:39:38 +00:00
Path : fPath ,
Name : fname ,
2020-01-18 23:54:00 +00:00
Checksum : digests [ fname ] ,
2020-04-15 05:39:38 +00:00
Size : fSize ,
2020-01-18 23:54:00 +00:00
} )
}
2022-05-16 09:38:46 +00:00
c . JSON ( http . StatusOK , ret )
2020-01-18 23:54:00 +00:00
} else {
2022-05-16 09:38:46 +00:00
c . AbortWithStatusJSON ( http . StatusBadRequest , gin . H { "errmsg" : fmt . Errorf ( "%q" , errs ) } )
return
2020-01-18 23:54:00 +00:00
}
} else {
2022-05-16 09:38:46 +00:00
c . AbortWithStatusJSON ( http . StatusBadRequest , gin . H { "errmsg" : fmt . Errorf ( "%q" , errs ) } )
return
2020-01-18 23:54:00 +00:00
}
} else {
2022-05-16 09:38:46 +00:00
c . AbortWithStatusJSON ( http . StatusBadRequest , gin . H { "errmsg" : fmt . Errorf ( "%q" , errs ) } )
return
2020-01-18 23:54:00 +00:00
}
}