server/admin/sync/exercice_files.go

297 lines
9.7 KiB
Go

package sync
import (
"compress/gzip"
"encoding/hex"
"fmt"
"log"
"net/http"
"os"
"path"
"strings"
"unicode"
"github.com/gin-gonic/gin"
"srs.epita.fr/fic-server/libfic"
)
func BuildFilesListInto(i Importer, exercice *fic.Exercice, into string) (files []string, digests map[string][]byte, errs []error) {
// If no files directory, don't display error
if !i.exists(path.Join(exercice.Path, into)) {
return
}
// Parse DIGESTS.txt
if digs, err := GetFileContent(i, path.Join(exercice.Path, into, "DIGESTS.txt")); err != nil {
errs = append(errs, NewExerciceError(exercice, fmt.Errorf("unable to read %s: %w", path.Join(into, "DIGESTS.txt"), err)))
} else {
digests = map[string][]byte{}
for nline, d := range strings.Split(digs, "\n") {
if dsplt := strings.SplitN(d, " ", 2); len(dsplt) < 2 {
errs = append(errs, NewExerciceError(exercice, fmt.Errorf("unable to parse %s line %d: invalid format", path.Join(into, "DIGESTS.txt"), nline+1)))
continue
} else if hash, err := hex.DecodeString(dsplt[0]); err != nil {
errs = append(errs, NewExerciceError(exercice, fmt.Errorf("unable to parse %s line %d: %w", path.Join(into, "DIGESTS.txt"), nline+1, err)))
continue
} else {
digests[strings.TrimFunc(dsplt[1], unicode.IsSpace)] = hash
}
}
}
// Read file list
if flist, err := i.listDir(path.Join(exercice.Path, into)); err != nil {
errs = append(errs, NewExerciceError(exercice, err))
} else {
for _, fname := range flist {
if fname == "DIGESTS.txt" || fname == ".gitattributes" {
continue
}
if matched, _ := path.Match("*.[0-9][0-9]", fname); matched {
fname = fname[:len(fname)-3]
} else if matched, _ := path.Match("*[0-9][0-9]", fname); matched {
fname = fname[:len(fname)-2]
} else if matched, _ := path.Match("*_MERGED", fname); matched {
continue
}
fileFound := false
for _, f := range files {
if fname == f {
fileFound = true
break
}
}
if !fileFound {
files = append(files, fname)
}
}
}
return
}
// CheckExerciceFilesPresence limits remote checks to presence, don't get it to check digest.
func CheckExerciceFilesPresence(i Importer, exercice *fic.Exercice) (files []string, errs []error) {
flist, digests, berrs := BuildFilesListInto(i, exercice, "files")
errs = append(errs, berrs...)
for _, fname := range flist {
if !i.exists(path.Join(exercice.Path, "files", fname)) {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("No such file or directory")))
} else if _, ok := digests[fname]; !ok {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to import file: No digest given")))
} else {
files = append(files, fname)
}
}
for fname := range digests {
if !i.exists(path.Join(exercice.Path, "files", fname)) {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to read file: No such file or directory. Check your DIGESTS.txt for legacy entries.")))
}
}
return
}
// CheckExerciceFiles checks that remote files have the right digest.
func CheckExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExceptions) (files []string, errs []error) {
flist, digests, berrs := BuildFilesListInto(i, exercice, "files")
errs = append(errs, berrs...)
for _, fname := range flist {
if fd, closer, err := GetFile(i, path.Join(exercice.Path, "files", fname)); err != nil {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to read file: %w", err)))
continue
} else {
defer closer()
hash160, hash512 := fic.CreateHashBuffers(fd)
if _, err := fic.CheckBufferHash(hash160, hash512, digests[fname]); err != nil {
errs = append(errs, NewFileError(exercice, fname, err))
} else if size, err := GetFileSize(i, path.Join(exercice.Path, "files", fname)); err != nil {
errs = append(errs, NewFileError(exercice, fname, err))
} else {
var digest_shown []byte
if strings.HasSuffix(fname, ".gz") {
if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists {
digest_shown = d
// Check that gunzipped file digest is correct
if fd, closer, err := GetFile(i, path.Join(exercice.Path, "files", fname)); err != nil {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to read file: %w", err)))
continue
} else if gunzipfd, err := gzip.NewReader(fd); err != nil {
closer()
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to gunzip file: %w", err)))
continue
} else {
defer gunzipfd.Close()
defer closer()
hash160_inflate, hash512_inflate := fic.CreateHashBuffers(gunzipfd)
if _, err := fic.CheckBufferHash(hash160_inflate, hash512_inflate, digest_shown); err != nil {
errs = append(errs, NewFileError(exercice, strings.TrimSuffix(fname, ".gz"), err))
}
}
}
}
paramsFiles, err := GetExerciceFilesParams(i, exercice)
if err != nil {
errs = append(errs, NewChallengeTxtError(exercice, 0, err))
return
}
disclaimer := ""
if f, exists := paramsFiles[fname]; exists {
// Call checks hooks
for _, hk := range hooks.mdTextHooks {
for _, err := range hk(f.Disclaimer, exercice.Language, exceptions) {
errs = append(errs, NewFileError(exercice, fname, err))
}
}
if disclaimer, err = ProcessMarkdown(i, fixnbsp(f.Disclaimer), exercice.Path); err != nil {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("error during markdown formating of disclaimer: %w", err)))
}
}
file := exercice.NewDummyFile(path.Join(exercice.Path, "files", fname), getDestinationFilePath(path.Join(exercice.Path, "files", fname)), (*hash512).Sum(nil), digest_shown, disclaimer, size)
// Call checks hooks
for _, h := range hooks.fileHooks {
for _, e := range h(file, exercice, exceptions) {
errs = append(errs, NewFileError(exercice, fname, e))
}
}
}
}
files = append(files, fname)
}
return
}
// SyncExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge.
// It takes care of DIGESTS.txt and ensure imported files match.
func SyncExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExceptions) (errs []error) {
if _, err := exercice.WipeFiles(); err != nil {
errs = append(errs, err)
}
paramsFiles, err := GetExerciceFilesParams(i, exercice)
if err != nil {
errs = append(errs, NewChallengeTxtError(exercice, 0, err))
return
}
files, digests, berrs := BuildFilesListInto(i, exercice, "files")
errs = append(errs, berrs...)
// Import standard files
for _, fname := range files {
if f, err := i.importFile(path.Join(exercice.Path, "files", fname),
func(filePath string, origin string) (interface{}, error) {
var digest_shown []byte
if strings.HasSuffix(fname, ".gz") {
if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists {
digest_shown = d
}
}
published := true
disclaimer := ""
if f, exists := paramsFiles[fname]; exists {
published = !f.Hidden
// Call checks hooks
for _, hk := range hooks.mdTextHooks {
for _, err := range hk(f.Disclaimer, exercice.Language, exceptions) {
errs = append(errs, NewFileError(exercice, fname, err))
}
}
if disclaimer, err = ProcessMarkdown(i, fixnbsp(f.Disclaimer), exercice.Path); err != nil {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("error during markdown formating of disclaimer: %w", err)))
}
}
return exercice.ImportFile(filePath, origin, digests[fname], digest_shown, disclaimer, published)
}); err != nil {
errs = append(errs, NewFileError(exercice, fname, err))
continue
} else if f.(*fic.EFile).Size == 0 {
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("imported file is empty!")))
} else {
file := f.(*fic.EFile)
// Call checks hooks
for _, h := range hooks.fileHooks {
for _, e := range h(file, exercice, exceptions) {
errs = append(errs, NewFileError(exercice, fname, e))
}
}
// Create empty non-gziped file for nginx gzip-static module
if len(file.ChecksumShown) > 0 && strings.HasSuffix(file.Name, ".gz") {
file.Name = strings.TrimSuffix(file.Name, ".gz")
file.Path = strings.TrimSuffix(file.Path, ".gz")
fd, err := os.Create(path.Join(fic.FilesDir, file.Path))
if err == nil {
fd.Close()
_, err = file.Update()
if err != nil {
log.Println("Unable to update file after .gz removal:", err.Error())
}
} else {
log.Printf("Unable to create %q: %s", file.Path, err)
}
}
}
}
return
}
// ApiGetRemoteExerciceFiles is an accessor to remote exercice files list.
func ApiGetRemoteExerciceFiles(c *gin.Context) {
theme, exceptions, errs := BuildTheme(GlobalImporter, c.Params.ByName("thid"))
if theme != nil {
exercice, _, _, _, errs := BuildExercice(GlobalImporter, theme, path.Join(theme.Path, c.Params.ByName("exid")), nil, exceptions)
if exercice != nil {
files, digests, errs := BuildFilesListInto(GlobalImporter, exercice, "files")
if files != nil {
var ret []*fic.EFile
for _, fname := range files {
fPath := path.Join(exercice.Path, "files", fname)
fSize, _ := GetFileSize(GlobalImporter, fPath)
ret = append(ret, &fic.EFile{
Path: fPath,
Name: fname,
Checksum: digests[fname],
Size: fSize,
})
}
c.JSON(http.StatusOK, ret)
} else {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Errorf("%q", errs)})
return
}
} else {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Errorf("%q", errs)})
return
}
} else {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Errorf("%q", errs)})
return
}
}