2017-12-09 00:21:58 +00:00
package sync
import (
2022-11-21 13:39:00 +00:00
"compress/gzip"
2017-12-09 00:21:58 +00:00
"encoding/hex"
"fmt"
2023-10-13 14:05:15 +00:00
"io"
2022-11-21 11:02:36 +00:00
"log"
2022-05-16 09:38:46 +00:00
"net/http"
2023-10-13 21:05:08 +00:00
"net/url"
2022-11-21 11:02:36 +00:00
"os"
2017-12-09 00:21:58 +00:00
"path"
"strings"
"unicode"
2022-05-16 09:38:46 +00:00
"github.com/gin-gonic/gin"
2023-11-22 11:16:53 +00:00
"go.uber.org/multierr"
2020-01-18 23:54:00 +00:00
2017-12-09 00:21:58 +00:00
"srs.epita.fr/fic-server/libfic"
)
2023-10-13 21:05:08 +00:00
type remoteFileDomainWhitelist [ ] string
func ( l * remoteFileDomainWhitelist ) String ( ) string {
return fmt . Sprintf ( "%v" , * l )
}
func ( l * remoteFileDomainWhitelist ) Set ( value string ) error {
* l = append ( * l , value )
return nil
}
var RemoteFileDomainWhitelist remoteFileDomainWhitelist
func isURLAllowed ( in string ) bool {
if len ( RemoteFileDomainWhitelist ) == 0 {
return true
}
u , err := url . Parse ( in )
if err != nil {
return false
}
for _ , t := range RemoteFileDomainWhitelist {
if t == u . Host {
return true
}
}
return false
}
2023-11-22 11:16:53 +00:00
func BuildFilesListInto ( i Importer , exercice * fic . Exercice , into string ) ( files [ ] string , digests map [ string ] [ ] byte , errs error ) {
2018-01-06 15:50:47 +00:00
// If no files directory, don't display error
2023-05-03 08:54:02 +00:00
if ! i . Exists ( path . Join ( exercice . Path , into ) ) {
2018-01-06 15:50:47 +00:00
return
}
2017-12-09 00:21:58 +00:00
2019-07-05 17:07:28 +00:00
// Parse DIGESTS.txt
2022-05-24 19:52:58 +00:00
if digs , err := GetFileContent ( i , path . Join ( exercice . Path , into , "DIGESTS.txt" ) ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewExerciceError ( exercice , fmt . Errorf ( "unable to read %s: %w" , path . Join ( into , "DIGESTS.txt" ) , err ) ) )
2024-10-11 12:55:28 +00:00
} else if len ( digs ) > 0 {
2019-07-05 17:07:28 +00:00
digests = map [ string ] [ ] byte { }
2017-12-09 00:21:58 +00:00
for nline , d := range strings . Split ( digs , "\n" ) {
2018-12-07 23:53:13 +00:00
if dsplt := strings . SplitN ( d , " " , 2 ) ; len ( dsplt ) < 2 {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewExerciceError ( exercice , fmt . Errorf ( "unable to parse %s line %d: invalid format" , path . Join ( into , "DIGESTS.txt" ) , nline + 1 ) ) )
2017-12-09 00:21:58 +00:00
continue
} else if hash , err := hex . DecodeString ( dsplt [ 0 ] ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewExerciceError ( exercice , fmt . Errorf ( "unable to parse %s line %d: %w" , path . Join ( into , "DIGESTS.txt" ) , nline + 1 , err ) ) )
2017-12-09 00:21:58 +00:00
continue
} else {
2018-12-07 23:53:13 +00:00
digests [ strings . TrimFunc ( dsplt [ 1 ] , unicode . IsSpace ) ] = hash
2017-12-09 00:21:58 +00:00
}
}
2019-07-05 17:07:28 +00:00
}
// Read file list
2023-11-25 16:13:31 +00:00
if flist , err := i . ListDir ( path . Join ( exercice . Path , into ) ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewExerciceError ( exercice , err ) )
2019-07-05 17:07:28 +00:00
} else {
for _ , fname := range flist {
2021-07-30 09:32:23 +00:00
if fname == "DIGESTS.txt" || fname == ".gitattributes" {
2019-07-05 17:07:28 +00:00
continue
}
2017-12-09 00:21:58 +00:00
2020-01-19 00:06:03 +00:00
if matched , _ := path . Match ( "*.[0-9][0-9]" , fname ) ; matched {
2017-12-12 06:11:01 +00:00
fname = fname [ : len ( fname ) - 3 ]
2020-01-19 00:06:03 +00:00
} else if matched , _ := path . Match ( "*[0-9][0-9]" , fname ) ; matched {
2017-12-12 06:11:01 +00:00
fname = fname [ : len ( fname ) - 2 ]
} else if matched , _ := path . Match ( "*_MERGED" , fname ) ; matched {
continue
}
2019-07-05 17:07:28 +00:00
fileFound := false
for _ , f := range files {
if fname == f {
fileFound = true
break
2017-12-12 06:11:01 +00:00
}
}
2019-07-05 17:07:28 +00:00
if ! fileFound {
files = append ( files , fname )
2017-12-12 06:11:01 +00:00
}
2019-07-05 17:07:28 +00:00
}
}
2017-12-12 06:11:01 +00:00
2023-10-13 14:05:15 +00:00
// Complete with remote file names
if paramsFiles , err := GetExerciceFilesParams ( i , exercice ) ; err == nil {
for _ , pf := range paramsFiles {
if pf . URL != "" {
found := false
for _ , file := range files {
if file == pf . Filename {
found = true
break
}
}
if ! found {
files = append ( files , pf . Filename )
}
}
}
}
2019-07-05 17:07:28 +00:00
return
}
2020-01-16 17:53:27 +00:00
// CheckExerciceFilesPresence limits remote checks to presence, don't get it to check digest.
2023-11-22 11:16:53 +00:00
func CheckExerciceFilesPresence ( i Importer , exercice * fic . Exercice ) ( files [ ] string , errs error ) {
2020-01-16 17:53:27 +00:00
flist , digests , berrs := BuildFilesListInto ( i , exercice , "files" )
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , berrs )
2020-01-16 17:53:27 +00:00
2023-10-13 14:05:15 +00:00
paramsFiles , _ := GetExerciceFilesParams ( i , exercice )
2020-01-16 17:53:27 +00:00
for _ , fname := range flist {
2023-05-04 11:41:40 +00:00
if ! i . Exists ( path . Join ( exercice . Path , "files" , fname ) ) && ! i . Exists ( path . Join ( exercice . Path , "files" , fname + ".00" ) ) {
2023-10-13 14:05:15 +00:00
// File not found locally, is this a remote file?
if pf , exists := paramsFiles [ fname ] ; ! exists || pf . URL == "" {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "No such file or directory" ) ) )
2023-10-13 14:05:15 +00:00
continue
2023-10-13 21:05:08 +00:00
} else if ! isURLAllowed ( pf . URL ) {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "URL hostname is not whitelisted" ) ) )
2023-10-13 21:05:08 +00:00
continue
2023-10-13 14:05:15 +00:00
} else {
resp , err := http . Head ( pf . URL )
if err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , err ) )
2023-10-13 14:05:15 +00:00
continue
}
defer resp . Body . Close ( )
if resp . StatusCode >= 300 {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "Unexpected status code for the HTTP response: %d %s" , resp . StatusCode , resp . Status ) ) )
2023-10-13 14:05:15 +00:00
continue
}
}
}
if _ , ok := digests [ fname ] ; ! ok {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to import file: No digest given" ) ) )
2020-01-16 17:53:27 +00:00
} else {
files = append ( files , fname )
}
}
for fname := range digests {
2023-05-04 11:41:40 +00:00
if ! i . Exists ( path . Join ( exercice . Path , "files" , fname ) ) && ! i . Exists ( path . Join ( exercice . Path , "files" , fname + ".gz" ) ) && ! i . Exists ( path . Join ( exercice . Path , "files" , fname + ".00" ) ) && ! i . Exists ( path . Join ( exercice . Path , "files" , fname + ".gz.00" ) ) {
2023-10-13 14:05:15 +00:00
if pf , exists := paramsFiles [ fname ] ; ! exists || pf . URL == "" {
2023-10-22 18:16:51 +00:00
if pf , exists := paramsFiles [ fname + ".gz" ] ; ! exists || pf . URL == "" {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to read file: No such file or directory. Check your DIGESTS.txt for legacy entries." ) ) )
2023-10-22 18:16:51 +00:00
}
2023-10-13 14:05:15 +00:00
}
2020-01-16 17:53:27 +00:00
}
}
return
}
2019-07-05 17:07:28 +00:00
// CheckExerciceFiles checks that remote files have the right digest.
2023-11-22 11:16:53 +00:00
func CheckExerciceFiles ( i Importer , exercice * fic . Exercice , exceptions * CheckExceptions ) ( files [ ] string , errs error ) {
2020-01-16 17:53:27 +00:00
flist , digests , berrs := BuildFilesListInto ( i , exercice , "files" )
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , berrs )
2019-07-05 17:07:28 +00:00
2023-10-18 09:42:48 +00:00
paramsFiles , err := GetExerciceFilesParams ( i , exercice )
if err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewChallengeTxtError ( exercice , 0 , err ) )
2023-10-18 09:42:48 +00:00
}
2019-07-05 17:07:28 +00:00
for _ , fname := range flist {
2023-10-18 09:42:48 +00:00
dest := path . Join ( exercice . Path , "files" , fname )
if pf , exists := paramsFiles [ fname ] ; exists && pf . URL != "" {
if li , ok := i . ( LocalImporter ) ; ok {
2023-11-25 17:38:12 +00:00
errs = multierr . Append ( errs , DownloadExerciceFile ( paramsFiles [ fname ] , li . GetLocalPath ( dest ) , exercice , false ) )
2023-10-18 09:42:48 +00:00
} else {
2023-11-25 17:38:12 +00:00
errs = multierr . Append ( errs , DownloadExerciceFile ( paramsFiles [ fname ] , dest , exercice , false ) )
2023-10-18 09:42:48 +00:00
}
}
if fd , closer , err := GetFile ( i , dest ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to read file: %w" , err ) ) )
2019-07-05 17:07:28 +00:00
continue
2022-07-11 17:57:33 +00:00
} else {
2022-11-21 13:38:16 +00:00
defer closer ( )
hash160 , hash512 := fic . CreateHashBuffers ( fd )
if _ , err := fic . CheckBufferHash ( hash160 , hash512 , digests [ fname ] ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , err ) )
2022-11-21 17:55:38 +00:00
} else if size , err := GetFileSize ( i , path . Join ( exercice . Path , "files" , fname ) ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , err ) )
2022-11-21 13:38:16 +00:00
} else {
var digest_shown [ ] byte
if strings . HasSuffix ( fname , ".gz" ) {
if d , exists := digests [ strings . TrimSuffix ( fname , ".gz" ) ] ; exists {
digest_shown = d
2022-11-21 13:39:00 +00:00
// Check that gunzipped file digest is correct
if fd , closer , err := GetFile ( i , path . Join ( exercice . Path , "files" , fname ) ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to read file: %w" , err ) ) )
2022-11-21 13:39:00 +00:00
continue
} else if gunzipfd , err := gzip . NewReader ( fd ) ; err != nil {
closer ( )
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "unable to gunzip file: %w" , err ) ) )
2022-11-21 13:39:00 +00:00
continue
} else {
defer gunzipfd . Close ( )
defer closer ( )
hash160_inflate , hash512_inflate := fic . CreateHashBuffers ( gunzipfd )
if _ , err := fic . CheckBufferHash ( hash160_inflate , hash512_inflate , digest_shown ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , strings . TrimSuffix ( fname , ".gz" ) , err ) )
2022-11-21 13:39:00 +00:00
}
}
2022-11-21 13:38:16 +00:00
}
2022-11-03 20:35:05 +00:00
}
2022-11-23 15:55:49 +00:00
disclaimer := ""
if f , exists := paramsFiles [ fname ] ; exists {
// Call checks hooks
for _ , hk := range hooks . mdTextHooks {
2023-11-22 11:16:53 +00:00
for _ , err := range multierr . Errors ( hk ( f . Disclaimer , exercice . Language , exceptions ) ) {
errs = multierr . Append ( errs , NewFileError ( exercice , fname , err ) )
2022-11-23 15:55:49 +00:00
}
}
if disclaimer , err = ProcessMarkdown ( i , fixnbsp ( f . Disclaimer ) , exercice . Path ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "error during markdown formating of disclaimer: %w" , err ) ) )
2022-11-23 15:55:49 +00:00
}
}
2023-11-25 17:38:12 +00:00
file := exercice . NewDummyFile ( path . Join ( exercice . Path , "files" , fname ) , GetDestinationFilePath ( path . Join ( exercice . Path , "files" , fname ) , nil ) , ( * hash512 ) . Sum ( nil ) , digest_shown , disclaimer , size )
2022-07-11 17:57:33 +00:00
2022-11-21 13:38:16 +00:00
// Call checks hooks
for _ , h := range hooks . fileHooks {
2023-11-22 11:16:53 +00:00
for _ , e := range multierr . Errors ( h ( file , exercice , exceptions ) ) {
errs = multierr . Append ( errs , NewFileError ( exercice , fname , e ) )
2022-11-21 13:38:16 +00:00
}
2022-07-11 17:57:33 +00:00
}
}
2019-07-05 17:07:28 +00:00
}
2020-01-16 17:53:27 +00:00
files = append ( files , fname )
2019-07-05 17:07:28 +00:00
}
return
}
2023-11-25 17:38:12 +00:00
// DownloadExerciceFile is responsible to fetch remote files.
func DownloadExerciceFile ( pf ExerciceFile , dest string , exercice * fic . Exercice , force bool ) ( errs error ) {
2023-10-18 09:42:48 +00:00
if st , err := os . Stat ( dest ) ; ! force && ! os . IsNotExist ( err ) {
resp , err := http . Head ( pf . URL )
if err == nil && resp . ContentLength == st . Size ( ) {
return
}
}
if ! isURLAllowed ( pf . URL ) {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , path . Base ( dest ) , fmt . Errorf ( "URL hostname is not whitelisted" ) ) )
2023-10-18 09:42:48 +00:00
return
}
2023-11-22 11:16:53 +00:00
log . Println ( "Download exercice file: " , pf . URL )
2023-10-18 09:42:48 +00:00
resp , err := http . Get ( pf . URL )
if err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , path . Base ( dest ) , err ) )
2023-10-18 09:42:48 +00:00
return
}
defer resp . Body . Close ( )
if err = os . MkdirAll ( path . Dir ( dest ) , 0751 ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , path . Base ( dest ) , err ) )
2023-10-18 09:42:48 +00:00
return
}
// Write file
var fdto * os . File
if fdto , err = os . Create ( dest ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , path . Base ( dest ) , err ) )
2023-10-18 09:42:48 +00:00
return
} else {
defer fdto . Close ( )
_ , err = io . Copy ( fdto , resp . Body )
if err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , path . Base ( dest ) , err ) )
2023-10-18 09:42:48 +00:00
return
}
}
return
}
2019-07-05 17:07:28 +00:00
// SyncExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge.
// It takes care of DIGESTS.txt and ensure imported files match.
2023-11-22 11:16:53 +00:00
func SyncExerciceFiles ( i Importer , exercice * fic . Exercice , exceptions * CheckExceptions ) ( errs error ) {
2019-07-05 17:07:28 +00:00
if _ , err := exercice . WipeFiles ( ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , err )
2019-07-05 17:07:28 +00:00
}
2022-10-31 17:52:29 +00:00
paramsFiles , err := GetExerciceFilesParams ( i , exercice )
if err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewChallengeTxtError ( exercice , 0 , err ) )
2022-10-31 17:52:29 +00:00
return
}
2020-01-16 17:53:27 +00:00
files , digests , berrs := BuildFilesListInto ( i , exercice , "files" )
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , berrs )
2019-07-05 17:07:28 +00:00
// Import standard files
for _ , fname := range files {
2023-10-13 14:05:15 +00:00
actionAfterImport := func ( filePath string , origin string ) ( interface { } , error ) {
var digest_shown [ ] byte
if strings . HasSuffix ( fname , ".gz" ) {
if d , exists := digests [ strings . TrimSuffix ( fname , ".gz" ) ] ; exists {
digest_shown = d
2022-10-31 17:29:41 +00:00
}
2023-10-13 14:05:15 +00:00
}
2022-10-31 17:52:29 +00:00
2023-10-13 14:05:15 +00:00
published := true
disclaimer := ""
if f , exists := paramsFiles [ fname ] ; exists {
published = ! f . Hidden
2022-11-23 15:55:49 +00:00
2023-10-13 14:05:15 +00:00
// Call checks hooks
for _ , hk := range hooks . mdTextHooks {
2023-11-22 11:16:53 +00:00
for _ , err := range multierr . Errors ( hk ( f . Disclaimer , exercice . Language , exceptions ) ) {
errs = multierr . Append ( errs , NewFileError ( exercice , fname , err ) )
2022-11-23 15:55:49 +00:00
}
2023-10-13 14:05:15 +00:00
}
2022-11-23 15:55:49 +00:00
2023-10-13 14:05:15 +00:00
if disclaimer , err = ProcessMarkdown ( i , fixnbsp ( f . Disclaimer ) , exercice . Path ) ; err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "error during markdown formating of disclaimer: %w" , err ) ) )
2023-10-13 14:05:15 +00:00
}
}
return exercice . ImportFile ( filePath , origin , digests [ fname ] , digest_shown , disclaimer , published )
}
var f interface { }
if pf , exists := paramsFiles [ fname ] ; exists && pf . URL != "" {
2023-11-25 17:38:12 +00:00
dest := GetDestinationFilePath ( pf . URL , & pf . Filename )
2023-10-13 14:05:15 +00:00
if _ , err := os . Stat ( dest ) ; ! os . IsNotExist ( err ) {
if d , err := actionAfterImport ( dest , pf . URL ) ; err == nil {
f = d
}
}
if f == nil {
2023-11-25 17:38:12 +00:00
errs = multierr . Append ( errs , DownloadExerciceFile ( paramsFiles [ fname ] , dest , exercice , false ) )
2022-10-31 17:52:29 +00:00
2023-10-13 14:05:15 +00:00
f , err = actionAfterImport ( dest , pf . URL )
}
} else {
f , err = i . importFile ( path . Join ( exercice . Path , "files" , fname ) , actionAfterImport )
}
if err != nil {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , err ) )
2019-07-05 17:07:28 +00:00
continue
2023-10-13 14:05:15 +00:00
}
if f . ( * fic . EFile ) . Size == 0 {
2023-11-22 11:16:53 +00:00
errs = multierr . Append ( errs , NewFileError ( exercice , fname , fmt . Errorf ( "imported file is empty!" ) ) )
2022-07-11 17:57:33 +00:00
} else {
2022-11-21 11:02:36 +00:00
file := f . ( * fic . EFile )
2022-07-11 17:57:33 +00:00
// Call checks hooks
for _ , h := range hooks . fileHooks {
2023-11-22 11:16:53 +00:00
for _ , e := range multierr . Errors ( h ( file , exercice , exceptions ) ) {
errs = multierr . Append ( errs , NewFileError ( exercice , fname , e ) )
2022-07-11 17:57:33 +00:00
}
}
2022-11-21 11:02:36 +00:00
// Create empty non-gziped file for nginx gzip-static module
if len ( file . ChecksumShown ) > 0 && strings . HasSuffix ( file . Name , ".gz" ) {
file . Name = strings . TrimSuffix ( file . Name , ".gz" )
file . Path = strings . TrimSuffix ( file . Path , ".gz" )
fd , err := os . Create ( path . Join ( fic . FilesDir , file . Path ) )
if err == nil {
fd . Close ( )
_ , err = file . Update ( )
if err != nil {
log . Println ( "Unable to update file after .gz removal:" , err . Error ( ) )
}
} else {
log . Printf ( "Unable to create %q: %s" , file . Path , err )
}
}
2017-12-09 00:21:58 +00:00
}
}
2018-01-06 15:50:47 +00:00
return
2017-12-09 00:21:58 +00:00
}
2020-01-18 23:54:00 +00:00
// ApiGetRemoteExerciceFiles is an accessor to remote exercice files list.
2022-05-16 09:38:46 +00:00
func ApiGetRemoteExerciceFiles ( c * gin . Context ) {
2022-10-29 15:03:57 +00:00
theme , exceptions , errs := BuildTheme ( GlobalImporter , c . Params . ByName ( "thid" ) )
2020-01-18 23:54:00 +00:00
if theme != nil {
2023-07-09 17:05:58 +00:00
exercice , _ , _ , _ , _ , errs := BuildExercice ( GlobalImporter , theme , path . Join ( theme . Path , c . Params . ByName ( "exid" ) ) , nil , exceptions )
2020-01-18 23:54:00 +00:00
if exercice != nil {
2021-11-22 14:35:07 +00:00
files , digests , errs := BuildFilesListInto ( GlobalImporter , exercice , "files" )
2020-01-18 23:54:00 +00:00
if files != nil {
2021-11-22 14:35:07 +00:00
var ret [ ] * fic . EFile
2020-01-18 23:54:00 +00:00
for _ , fname := range files {
fPath := path . Join ( exercice . Path , "files" , fname )
2022-11-21 17:55:38 +00:00
fSize , _ := GetFileSize ( GlobalImporter , fPath )
2021-11-22 14:35:07 +00:00
ret = append ( ret , & fic . EFile {
2020-04-15 05:39:38 +00:00
Path : fPath ,
Name : fname ,
2020-01-18 23:54:00 +00:00
Checksum : digests [ fname ] ,
2020-04-15 05:39:38 +00:00
Size : fSize ,
2020-01-18 23:54:00 +00:00
} )
}
2022-05-16 09:38:46 +00:00
c . JSON ( http . StatusOK , ret )
2020-01-18 23:54:00 +00:00
} else {
2022-05-16 09:38:46 +00:00
c . AbortWithStatusJSON ( http . StatusBadRequest , gin . H { "errmsg" : fmt . Errorf ( "%q" , errs ) } )
return
2020-01-18 23:54:00 +00:00
}
} else {
2022-05-16 09:38:46 +00:00
c . AbortWithStatusJSON ( http . StatusBadRequest , gin . H { "errmsg" : fmt . Errorf ( "%q" , errs ) } )
return
2020-01-18 23:54:00 +00:00
}
} else {
2022-05-16 09:38:46 +00:00
c . AbortWithStatusJSON ( http . StatusBadRequest , gin . H { "errmsg" : fmt . Errorf ( "%q" , errs ) } )
return
2020-01-18 23:54:00 +00:00
}
}