package sync import ( "compress/gzip" "encoding/hex" "fmt" "io" "log" "net/http" "net/url" "os" "path" "strings" "unicode" "github.com/gin-gonic/gin" "go.uber.org/multierr" "srs.epita.fr/fic-server/libfic" ) type remoteFileDomainWhitelist []string func (l *remoteFileDomainWhitelist) String() string { return fmt.Sprintf("%v", *l) } func (l *remoteFileDomainWhitelist) Set(value string) error { *l = append(*l, value) return nil } var RemoteFileDomainWhitelist remoteFileDomainWhitelist func isURLAllowed(in string) bool { if len(RemoteFileDomainWhitelist) == 0 { return true } u, err := url.Parse(in) if err != nil { return false } for _, t := range RemoteFileDomainWhitelist { if t == u.Host { return true } } return false } func BuildFilesListInto(i Importer, exercice *fic.Exercice, into string) (files []string, digests map[string][]byte, errs error) { // If no files directory, don't display error if !i.Exists(path.Join(exercice.Path, into)) { return } // Parse DIGESTS.txt if digs, err := GetFileContent(i, path.Join(exercice.Path, into, "DIGESTS.txt")); err != nil { errs = multierr.Append(errs, NewExerciceError(exercice, fmt.Errorf("unable to read %s: %w", path.Join(into, "DIGESTS.txt"), err))) } else { digests = map[string][]byte{} for nline, d := range strings.Split(digs, "\n") { if dsplt := strings.SplitN(d, " ", 2); len(dsplt) < 2 { errs = multierr.Append(errs, NewExerciceError(exercice, fmt.Errorf("unable to parse %s line %d: invalid format", path.Join(into, "DIGESTS.txt"), nline+1))) continue } else if hash, err := hex.DecodeString(dsplt[0]); err != nil { errs = multierr.Append(errs, NewExerciceError(exercice, fmt.Errorf("unable to parse %s line %d: %w", path.Join(into, "DIGESTS.txt"), nline+1, err))) continue } else { digests[strings.TrimFunc(dsplt[1], unicode.IsSpace)] = hash } } } // Read file list if flist, err := i.ListDir(path.Join(exercice.Path, into)); err != nil { errs = multierr.Append(errs, NewExerciceError(exercice, err)) } else { for _, fname := range flist { if fname == "DIGESTS.txt" || fname == ".gitattributes" { continue } if matched, _ := path.Match("*.[0-9][0-9]", fname); matched { fname = fname[:len(fname)-3] } else if matched, _ := path.Match("*[0-9][0-9]", fname); matched { fname = fname[:len(fname)-2] } else if matched, _ := path.Match("*_MERGED", fname); matched { continue } fileFound := false for _, f := range files { if fname == f { fileFound = true break } } if !fileFound { files = append(files, fname) } } } // Complete with remote file names if paramsFiles, err := GetExerciceFilesParams(i, exercice); err == nil { for _, pf := range paramsFiles { if pf.URL != "" { found := false for _, file := range files { if file == pf.Filename { found = true break } } if !found { files = append(files, pf.Filename) } } } } return } // CheckExerciceFilesPresence limits remote checks to presence, don't get it to check digest. func CheckExerciceFilesPresence(i Importer, exercice *fic.Exercice) (files []string, errs error) { flist, digests, berrs := BuildFilesListInto(i, exercice, "files") errs = multierr.Append(errs, berrs) paramsFiles, _ := GetExerciceFilesParams(i, exercice) for _, fname := range flist { if !i.Exists(path.Join(exercice.Path, "files", fname)) && !i.Exists(path.Join(exercice.Path, "files", fname+".00")) { // File not found locally, is this a remote file? if pf, exists := paramsFiles[fname]; !exists || pf.URL == "" { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("No such file or directory"))) continue } else if !isURLAllowed(pf.URL) { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("URL hostname is not whitelisted"))) continue } else { resp, err := http.Head(pf.URL) if err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, err)) continue } defer resp.Body.Close() if resp.StatusCode >= 300 { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("Unexpected status code for the HTTP response: %d %s", resp.StatusCode, resp.Status))) continue } } } if _, ok := digests[fname]; !ok { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to import file: No digest given"))) } else { files = append(files, fname) } } for fname := range digests { if !i.Exists(path.Join(exercice.Path, "files", fname)) && !i.Exists(path.Join(exercice.Path, "files", fname+".gz")) && !i.Exists(path.Join(exercice.Path, "files", fname+".00")) && !i.Exists(path.Join(exercice.Path, "files", fname+".gz.00")) { if pf, exists := paramsFiles[fname]; !exists || pf.URL == "" { if pf, exists := paramsFiles[fname+".gz"]; !exists || pf.URL == "" { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to read file: No such file or directory. Check your DIGESTS.txt for legacy entries."))) } } } } return } // CheckExerciceFiles checks that remote files have the right digest. func CheckExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExceptions) (files []string, errs error) { flist, digests, berrs := BuildFilesListInto(i, exercice, "files") errs = multierr.Append(errs, berrs) paramsFiles, err := GetExerciceFilesParams(i, exercice) if err != nil { errs = multierr.Append(errs, NewChallengeTxtError(exercice, 0, err)) } for _, fname := range flist { dest := path.Join(exercice.Path, "files", fname) if pf, exists := paramsFiles[fname]; exists && pf.URL != "" { if li, ok := i.(LocalImporter); ok { errs = multierr.Append(errs, DownloadExerciceFile(paramsFiles[fname], li.GetLocalPath(dest), exercice, false)) } else { errs = multierr.Append(errs, DownloadExerciceFile(paramsFiles[fname], dest, exercice, false)) } } if fd, closer, err := GetFile(i, dest); err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to read file: %w", err))) continue } else { defer closer() hash160, hash512 := fic.CreateHashBuffers(fd) if _, err := fic.CheckBufferHash(hash160, hash512, digests[fname]); err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, err)) } else if size, err := GetFileSize(i, path.Join(exercice.Path, "files", fname)); err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, err)) } else { var digest_shown []byte if strings.HasSuffix(fname, ".gz") { if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists { digest_shown = d // Check that gunzipped file digest is correct if fd, closer, err := GetFile(i, path.Join(exercice.Path, "files", fname)); err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to read file: %w", err))) continue } else if gunzipfd, err := gzip.NewReader(fd); err != nil { closer() errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to gunzip file: %w", err))) continue } else { defer gunzipfd.Close() defer closer() hash160_inflate, hash512_inflate := fic.CreateHashBuffers(gunzipfd) if _, err := fic.CheckBufferHash(hash160_inflate, hash512_inflate, digest_shown); err != nil { errs = multierr.Append(errs, NewFileError(exercice, strings.TrimSuffix(fname, ".gz"), err)) } } } } disclaimer := "" if f, exists := paramsFiles[fname]; exists { // Call checks hooks for _, hk := range hooks.mdTextHooks { for _, err := range multierr.Errors(hk(f.Disclaimer, exercice.Language, exceptions)) { errs = multierr.Append(errs, NewFileError(exercice, fname, err)) } } if disclaimer, err = ProcessMarkdown(i, fixnbsp(f.Disclaimer), exercice.Path); err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("error during markdown formating of disclaimer: %w", err))) } } file := exercice.NewDummyFile(path.Join(exercice.Path, "files", fname), GetDestinationFilePath(path.Join(exercice.Path, "files", fname), nil), (*hash512).Sum(nil), digest_shown, disclaimer, size) // Call checks hooks for _, h := range hooks.fileHooks { for _, e := range multierr.Errors(h(file, exercice, exceptions)) { errs = multierr.Append(errs, NewFileError(exercice, fname, e)) } } } } files = append(files, fname) } return } // DownloadExerciceFile is responsible to fetch remote files. func DownloadExerciceFile(pf ExerciceFile, dest string, exercice *fic.Exercice, force bool) (errs error) { if st, err := os.Stat(dest); !force && !os.IsNotExist(err) { resp, err := http.Head(pf.URL) if err == nil && resp.ContentLength == st.Size() { return } } if !isURLAllowed(pf.URL) { errs = multierr.Append(errs, NewFileError(exercice, path.Base(dest), fmt.Errorf("URL hostname is not whitelisted"))) return } log.Println("Download exercice file: ", pf.URL) resp, err := http.Get(pf.URL) if err != nil { errs = multierr.Append(errs, NewFileError(exercice, path.Base(dest), err)) return } defer resp.Body.Close() if err = os.MkdirAll(path.Dir(dest), 0751); err != nil { errs = multierr.Append(errs, NewFileError(exercice, path.Base(dest), err)) return } // Write file var fdto *os.File if fdto, err = os.Create(dest); err != nil { errs = multierr.Append(errs, NewFileError(exercice, path.Base(dest), err)) return } else { defer fdto.Close() _, err = io.Copy(fdto, resp.Body) if err != nil { errs = multierr.Append(errs, NewFileError(exercice, path.Base(dest), err)) return } } return } // SyncExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge. // It takes care of DIGESTS.txt and ensure imported files match. func SyncExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExceptions) (errs error) { if _, err := exercice.WipeFiles(); err != nil { errs = multierr.Append(errs, err) } paramsFiles, err := GetExerciceFilesParams(i, exercice) if err != nil { errs = multierr.Append(errs, NewChallengeTxtError(exercice, 0, err)) return } files, digests, berrs := BuildFilesListInto(i, exercice, "files") errs = multierr.Append(errs, berrs) // Import standard files for _, fname := range files { actionAfterImport := func(filePath string, origin string) (interface{}, error) { var digest_shown []byte if strings.HasSuffix(fname, ".gz") { if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists { digest_shown = d } } published := true disclaimer := "" if f, exists := paramsFiles[fname]; exists { published = !f.Hidden // Call checks hooks for _, hk := range hooks.mdTextHooks { for _, err := range multierr.Errors(hk(f.Disclaimer, exercice.Language, exceptions)) { errs = multierr.Append(errs, NewFileError(exercice, fname, err)) } } if disclaimer, err = ProcessMarkdown(i, fixnbsp(f.Disclaimer), exercice.Path); err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("error during markdown formating of disclaimer: %w", err))) } } return exercice.ImportFile(filePath, origin, digests[fname], digest_shown, disclaimer, published) } var f interface{} if pf, exists := paramsFiles[fname]; exists && pf.URL != "" { dest := GetDestinationFilePath(pf.URL, &pf.Filename) if _, err := os.Stat(dest); !os.IsNotExist(err) { if d, err := actionAfterImport(dest, pf.URL); err == nil { f = d } } if f == nil { errs = multierr.Append(errs, DownloadExerciceFile(paramsFiles[fname], dest, exercice, false)) f, err = actionAfterImport(dest, pf.URL) } } else { f, err = i.importFile(path.Join(exercice.Path, "files", fname), actionAfterImport) } if err != nil { errs = multierr.Append(errs, NewFileError(exercice, fname, err)) continue } if f.(*fic.EFile).Size == 0 { errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("imported file is empty!"))) } else { file := f.(*fic.EFile) // Call checks hooks for _, h := range hooks.fileHooks { for _, e := range multierr.Errors(h(file, exercice, exceptions)) { errs = multierr.Append(errs, NewFileError(exercice, fname, e)) } } // Create empty non-gziped file for nginx gzip-static module if len(file.ChecksumShown) > 0 && strings.HasSuffix(file.Name, ".gz") { file.Name = strings.TrimSuffix(file.Name, ".gz") file.Path = strings.TrimSuffix(file.Path, ".gz") fd, err := os.Create(path.Join(fic.FilesDir, file.Path)) if err == nil { fd.Close() _, err = file.Update() if err != nil { log.Println("Unable to update file after .gz removal:", err.Error()) } } else { log.Printf("Unable to create %q: %s", file.Path, err) } } } } return } // ApiGetRemoteExerciceFiles is an accessor to remote exercice files list. func ApiGetRemoteExerciceFiles(c *gin.Context) { theme, exceptions, errs := BuildTheme(GlobalImporter, c.Params.ByName("thid")) if theme != nil { exercice, _, _, _, _, errs := BuildExercice(GlobalImporter, theme, path.Join(theme.Path, c.Params.ByName("exid")), nil, exceptions) if exercice != nil { files, digests, errs := BuildFilesListInto(GlobalImporter, exercice, "files") if files != nil { var ret []*fic.EFile for _, fname := range files { fPath := path.Join(exercice.Path, "files", fname) fSize, _ := GetFileSize(GlobalImporter, fPath) ret = append(ret, &fic.EFile{ Path: fPath, Name: fname, Checksum: digests[fname], Size: fSize, }) } c.JSON(http.StatusOK, ret) } else { c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Errorf("%q", errs)}) return } } else { c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Errorf("%q", errs)}) return } } else { c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"errmsg": fmt.Errorf("%q", errs)}) return } }