2019-07-05 23:25:34 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2021-11-13 16:27:32 +00:00
|
|
|
"bufio"
|
|
|
|
"bytes"
|
2022-07-11 17:57:33 +00:00
|
|
|
"errors"
|
2019-07-05 23:25:34 +00:00
|
|
|
"flag"
|
|
|
|
"fmt"
|
2019-11-25 15:22:40 +00:00
|
|
|
"io/ioutil"
|
2019-07-05 23:25:34 +00:00
|
|
|
"log"
|
|
|
|
"os"
|
2021-11-13 16:27:32 +00:00
|
|
|
"os/exec"
|
2019-07-05 23:25:34 +00:00
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2021-11-13 19:47:40 +00:00
|
|
|
"strconv"
|
2021-11-13 16:27:32 +00:00
|
|
|
"strings"
|
2019-07-05 23:25:34 +00:00
|
|
|
|
2023-11-22 11:16:53 +00:00
|
|
|
"go.uber.org/multierr"
|
|
|
|
|
2019-07-05 23:25:34 +00:00
|
|
|
"srs.epita.fr/fic-server/admin/sync"
|
|
|
|
"srs.epita.fr/fic-server/libfic"
|
|
|
|
)
|
|
|
|
|
2021-11-13 19:47:40 +00:00
|
|
|
var (
|
2021-11-15 11:50:51 +00:00
|
|
|
ignoreBinaryFileUnder = 1500000
|
2021-11-13 19:47:40 +00:00
|
|
|
skipFileChecks = false
|
2022-05-10 08:23:28 +00:00
|
|
|
skipBinaryFileCheck = false
|
2022-07-11 17:57:33 +00:00
|
|
|
logMissingResolution = false
|
2021-11-13 19:47:40 +00:00
|
|
|
)
|
2020-01-16 17:53:27 +00:00
|
|
|
|
2021-11-15 11:50:51 +00:00
|
|
|
func formatFileSize(size int) string {
|
|
|
|
if size > 1000000000000 {
|
|
|
|
return fmt.Sprintf("%.1f TiB", float64(size)/float64(1<<40))
|
|
|
|
} else if size > 1000000000 {
|
|
|
|
return fmt.Sprintf("%.1f GiB", float64(size)/float64(1<<30))
|
|
|
|
} else if size > 1000000 {
|
|
|
|
return fmt.Sprintf("%.1f MiB", float64(size)/float64(1<<20))
|
|
|
|
} else if size > 1000 {
|
|
|
|
return fmt.Sprintf("%.1f KiB", float64(size)/float64(1<<10))
|
|
|
|
}
|
|
|
|
return fmt.Sprintf("%d B", size)
|
|
|
|
}
|
|
|
|
|
2021-11-13 16:27:32 +00:00
|
|
|
func searchBinaryInGit(edir string) (ret []string) {
|
|
|
|
// Check if git exists and if we are in a git repo
|
|
|
|
err := exec.Command("git", "-C", edir, "remote").Run()
|
|
|
|
|
|
|
|
if err == nil {
|
2021-11-13 19:47:40 +00:00
|
|
|
cmd := exec.Command("git", "-C", edir, "log", "--all", "--numstat", "--no-renames", "-z")
|
2021-11-13 16:27:32 +00:00
|
|
|
var out bytes.Buffer
|
|
|
|
cmd.Stdout = &out
|
|
|
|
err := cmd.Run()
|
|
|
|
|
|
|
|
if err == nil {
|
2021-11-13 19:47:40 +00:00
|
|
|
alreadySeen := map[string]string{}
|
2021-11-13 16:27:32 +00:00
|
|
|
commit := ""
|
2021-11-13 19:47:40 +00:00
|
|
|
|
|
|
|
scanner := bufio.NewScanner(&out)
|
|
|
|
// Split on \n and \0 (-z option)
|
|
|
|
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
|
|
|
for i := 0; i < len(data); i++ {
|
|
|
|
if data[i] == '\n' || data[i] == '\000' {
|
|
|
|
return i + 1, data[:i], nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !atEOF {
|
|
|
|
return 0, nil, nil
|
|
|
|
}
|
|
|
|
return 0, data, bufio.ErrFinalToken
|
|
|
|
})
|
|
|
|
|
2021-11-13 16:27:32 +00:00
|
|
|
for scanner.Scan() {
|
|
|
|
if strings.HasPrefix(scanner.Text(), "commit ") {
|
|
|
|
commit = strings.TrimPrefix(scanner.Text(), "commit ")
|
|
|
|
} else if strings.HasPrefix(scanner.Text(), "-\t-\t") {
|
|
|
|
fname := strings.TrimPrefix(scanner.Text(), "-\t-\t")
|
|
|
|
|
2021-11-13 19:47:40 +00:00
|
|
|
cmdfile := exec.Command("git", "-C", edir, "ls-tree", "-r", "-l", commit, fname)
|
|
|
|
var outfile bytes.Buffer
|
|
|
|
cmdfile.Stdout = &outfile
|
|
|
|
err = cmdfile.Run()
|
|
|
|
var fsize int = -1024
|
|
|
|
if err == nil {
|
|
|
|
fields := strings.Fields(outfile.String())
|
|
|
|
if len(fields) < 4 {
|
|
|
|
// This should be a file deletion
|
|
|
|
if _, ok := alreadySeen[fname]; !ok {
|
|
|
|
alreadySeen[fname] = fmt.Sprintf("%s (commit %s) deleted", fname, commit[:7])
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
} else if fsize, err = strconv.Atoi(fields[3]); err == nil && fsize < ignoreBinaryFileUnder {
|
2022-05-10 08:23:28 +00:00
|
|
|
if _, ok := alreadySeen[fname]; !ok || skipBinaryFileCheck {
|
2021-11-13 19:47:40 +00:00
|
|
|
continue
|
|
|
|
}
|
2022-05-10 08:23:28 +00:00
|
|
|
} else if _, ok := alreadySeen[fname]; !ok && skipBinaryFileCheck {
|
2021-11-15 11:50:51 +00:00
|
|
|
alreadySeen[fname] = fmt.Sprintf("%s (commit %s) (size %s)", fname, commit[:7], formatFileSize(fsize))
|
2021-11-13 19:47:40 +00:00
|
|
|
continue
|
|
|
|
}
|
2021-11-13 16:27:32 +00:00
|
|
|
}
|
2021-11-13 19:47:40 +00:00
|
|
|
if as, ok := alreadySeen[fname]; ok && as != "" {
|
|
|
|
ret = append(ret, as)
|
|
|
|
alreadySeen[fname] = ""
|
|
|
|
}
|
2021-11-15 11:50:51 +00:00
|
|
|
ret = append(ret, fmt.Sprintf("%s (commit %s) (size %s)", fname, commit[:7], formatFileSize(fsize)))
|
2021-11-13 16:27:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-22 11:16:53 +00:00
|
|
|
func checkExercice(theme *fic.Theme, edir string, dmap *map[int64]*fic.Exercice, exceptions *sync.CheckExceptions) (errs error) {
|
2023-07-09 17:05:58 +00:00
|
|
|
e, _, eid, exceptions, _, berrs := sync.BuildExercice(sync.GlobalImporter, theme, path.Join(theme.Path, edir), dmap, nil)
|
2023-11-22 11:16:53 +00:00
|
|
|
errs = multierr.Append(errs, berrs)
|
2019-07-05 23:25:34 +00:00
|
|
|
|
|
|
|
if e != nil {
|
|
|
|
// Files
|
2020-01-16 17:53:27 +00:00
|
|
|
var files []string
|
2023-11-22 11:16:53 +00:00
|
|
|
var cerrs error
|
2020-01-16 17:53:27 +00:00
|
|
|
if !skipFileChecks {
|
2022-10-29 15:03:57 +00:00
|
|
|
files, cerrs = sync.CheckExerciceFiles(sync.GlobalImporter, e, exceptions)
|
2020-01-16 17:53:27 +00:00
|
|
|
log.Printf("%d files checked.\n", len(files))
|
|
|
|
} else {
|
2021-11-22 14:35:07 +00:00
|
|
|
files, cerrs = sync.CheckExerciceFilesPresence(sync.GlobalImporter, e)
|
2020-01-16 17:53:27 +00:00
|
|
|
log.Printf("%d files presents but not checked (please check digest yourself).\n", len(files))
|
|
|
|
}
|
2023-11-22 11:16:53 +00:00
|
|
|
errs = multierr.Append(errs, cerrs)
|
2019-07-05 23:25:34 +00:00
|
|
|
|
|
|
|
// Flags
|
2022-10-29 15:03:57 +00:00
|
|
|
flags, cerrs := sync.CheckExerciceFlags(sync.GlobalImporter, e, files, exceptions)
|
2023-11-22 11:16:53 +00:00
|
|
|
errs = multierr.Append(errs, cerrs)
|
2019-07-05 23:25:34 +00:00
|
|
|
log.Printf("%d flags checked.\n", len(flags))
|
|
|
|
|
|
|
|
// Hints
|
2022-10-29 15:03:57 +00:00
|
|
|
hints, cerrs := sync.CheckExerciceHints(sync.GlobalImporter, e, exceptions)
|
2023-11-22 11:16:53 +00:00
|
|
|
errs = multierr.Append(errs, cerrs)
|
2019-07-05 23:25:34 +00:00
|
|
|
log.Printf("%d hints checked.\n", len(hints))
|
|
|
|
|
|
|
|
if dmap != nil {
|
2021-11-22 14:35:07 +00:00
|
|
|
(*dmap)[int64(eid)] = e
|
2019-07-05 23:25:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func main() {
|
|
|
|
cloudDAVBase := ""
|
|
|
|
cloudUsername := "fic"
|
|
|
|
cloudPassword := ""
|
|
|
|
localImporterDirectory := ""
|
2022-07-11 17:57:33 +00:00
|
|
|
checkplugins := sync.CheckPluginList{}
|
2019-07-05 23:25:34 +00:00
|
|
|
|
|
|
|
// Read paremeters from environment
|
|
|
|
if v, exists := os.LookupEnv("FICCLOUD_URL"); exists {
|
|
|
|
cloudDAVBase = v
|
|
|
|
}
|
|
|
|
if v, exists := os.LookupEnv("FICCLOUD_USER"); exists {
|
|
|
|
cloudUsername = v
|
|
|
|
}
|
|
|
|
if v, exists := os.LookupEnv("FICCLOUD_PASS"); exists {
|
|
|
|
cloudPassword = v
|
|
|
|
}
|
|
|
|
|
|
|
|
// Read parameters from command line
|
|
|
|
flag.StringVar(&localImporterDirectory, "localimport", localImporterDirectory,
|
|
|
|
"Base directory where to find challenges files to import, local part")
|
|
|
|
flag.StringVar(&cloudDAVBase, "clouddav", cloudDAVBase,
|
|
|
|
"Base directory where to find challenges files to import, cloud part")
|
|
|
|
flag.StringVar(&cloudUsername, "clouduser", cloudUsername, "Username used to sync")
|
|
|
|
flag.StringVar(&cloudPassword, "cloudpass", cloudPassword, "Password used to sync")
|
2022-11-05 14:26:57 +00:00
|
|
|
flag.BoolVar(&sync.AllowWIPExercice, "allow-wip-exercices", sync.AllowWIPExercice, "Are WIP exercice allowed?")
|
2019-07-05 23:25:34 +00:00
|
|
|
flag.BoolVar(&fic.OptionalDigest, "optionaldigest", fic.OptionalDigest, "Is the digest required when importing files?")
|
|
|
|
flag.BoolVar(&fic.StrongDigest, "strongdigest", fic.StrongDigest, "Are BLAKE2b digests required or is SHA-1 good enough?")
|
2020-01-16 17:53:27 +00:00
|
|
|
flag.BoolVar(&skipFileChecks, "skipfiledigests", skipFileChecks, "Don't perform DIGESTS checks on file to speed up the checks")
|
2022-07-11 17:57:33 +00:00
|
|
|
flag.BoolVar(&logMissingResolution, "skipresolution", logMissingResolution, "Don't fail if resolution.mp4 is absent")
|
2022-05-10 08:23:28 +00:00
|
|
|
flag.BoolVar(&skipBinaryFileCheck, "skip-binary-file", skipBinaryFileCheck, "In Git-LFS check, don't warn files")
|
2021-11-13 19:47:40 +00:00
|
|
|
flag.IntVar(&ignoreBinaryFileUnder, "skip-binary-files-under", ignoreBinaryFileUnder, "In Git-LFS check, don't warn files under this size")
|
2022-07-11 17:57:33 +00:00
|
|
|
flag.Var(&checkplugins, "rules-plugins", "List of libraries containing others rules to checks")
|
2023-10-13 21:05:08 +00:00
|
|
|
flag.Var(&sync.RemoteFileDomainWhitelist, "remote-file-domain-whitelist", "List of domains which are allowed to store remote files")
|
2019-07-05 23:25:34 +00:00
|
|
|
flag.Parse()
|
|
|
|
|
2022-06-10 14:54:13 +00:00
|
|
|
// Do not display timestamp
|
|
|
|
log.SetFlags(0)
|
2019-07-05 23:25:34 +00:00
|
|
|
|
|
|
|
// Instantiate importer
|
|
|
|
regenImporter := false
|
|
|
|
if localImporterDirectory != "" {
|
|
|
|
sync.GlobalImporter = sync.LocalImporter{Base: localImporterDirectory, Symlink: true}
|
|
|
|
} else if cloudDAVBase != "" {
|
|
|
|
sync.GlobalImporter, _ = sync.NewCloudImporter(cloudDAVBase, cloudUsername, cloudPassword)
|
|
|
|
} else {
|
|
|
|
// In this case, we want to treat the entier path given
|
|
|
|
regenImporter = true
|
|
|
|
}
|
|
|
|
|
2019-11-25 15:22:40 +00:00
|
|
|
var err error
|
|
|
|
|
|
|
|
// Create temporary directory for storing FILES/ content
|
|
|
|
fic.FilesDir, err = ioutil.TempDir("", "fic-repochecker.")
|
|
|
|
if err != nil {
|
|
|
|
|
|
|
|
}
|
|
|
|
defer os.RemoveAll(fic.FilesDir)
|
|
|
|
|
2019-07-05 23:25:34 +00:00
|
|
|
if sync.GlobalImporter != nil {
|
|
|
|
log.Println("Using", sync.GlobalImporter.Kind())
|
|
|
|
|
|
|
|
if themes, err := sync.GetThemes(sync.GlobalImporter); err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
} else if len(flag.Args()) == 0 {
|
|
|
|
log.Println("Existing themes:")
|
|
|
|
for _, th := range themes {
|
|
|
|
log.Println("-", th)
|
|
|
|
}
|
|
|
|
os.Exit(1)
|
|
|
|
}
|
|
|
|
} else if len(flag.Args()) == 0 {
|
|
|
|
log.Fatal("No importer nor path given!")
|
|
|
|
}
|
|
|
|
|
2022-07-11 17:57:33 +00:00
|
|
|
// Load rules plugins
|
|
|
|
for _, p := range checkplugins {
|
|
|
|
if err := sync.LoadChecksPlugin(p); err != nil {
|
|
|
|
log.Fatalf("Unable to load rule plugin %q: %s", p, err.Error())
|
|
|
|
} else {
|
|
|
|
log.Printf("Rules plugin %q successfully loaded", p)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-05 23:25:34 +00:00
|
|
|
// Variable that handles the exit status
|
|
|
|
hasError := false
|
|
|
|
|
|
|
|
for _, p := range flag.Args() {
|
|
|
|
if regenImporter {
|
|
|
|
var err error
|
|
|
|
p, err = filepath.Abs(p)
|
|
|
|
if err != nil {
|
|
|
|
p = path.Clean(p)
|
|
|
|
}
|
|
|
|
sync.GlobalImporter = sync.LocalImporter{
|
2021-11-13 16:27:32 +00:00
|
|
|
Base: path.Dir(p),
|
2019-07-05 23:25:34 +00:00
|
|
|
Symlink: true,
|
|
|
|
}
|
|
|
|
p = path.Base(p)
|
|
|
|
}
|
|
|
|
|
|
|
|
nberr := 0
|
2022-10-29 15:03:57 +00:00
|
|
|
theme, exceptions, errs := sync.BuildTheme(sync.GlobalImporter, p)
|
2019-07-05 23:25:34 +00:00
|
|
|
|
2024-05-16 09:10:30 +00:00
|
|
|
if theme != nil && !sync.GlobalImporter.Exists(path.Join(p, "challenge.toml")) && !sync.GlobalImporter.Exists(path.Join(p, "challenge.txt")) {
|
2023-11-22 11:16:53 +00:00
|
|
|
thiserrors := multierr.Errors(errs)
|
|
|
|
nberr += len(thiserrors)
|
|
|
|
for _, err := range thiserrors {
|
2019-07-05 23:25:34 +00:00
|
|
|
log.Println(err)
|
|
|
|
}
|
|
|
|
|
2021-11-22 14:35:07 +00:00
|
|
|
exercices, err := sync.GetExercices(sync.GlobalImporter, theme)
|
2019-07-05 23:25:34 +00:00
|
|
|
if err != nil {
|
|
|
|
nberr += 1
|
|
|
|
log.Println(err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2021-11-22 14:35:07 +00:00
|
|
|
dmap := map[int64]*fic.Exercice{}
|
2019-07-05 23:25:34 +00:00
|
|
|
|
|
|
|
for _, edir := range exercices {
|
2023-07-09 17:05:58 +00:00
|
|
|
ex_exceptions := exceptions.GetFileExceptions(edir)
|
|
|
|
|
2023-11-22 11:16:53 +00:00
|
|
|
for _, err := range multierr.Errors(checkExercice(theme, edir, &dmap, ex_exceptions)) {
|
2022-07-01 22:01:05 +00:00
|
|
|
log.Println(err.Error())
|
2022-07-11 17:57:33 +00:00
|
|
|
|
|
|
|
if logMissingResolution {
|
|
|
|
if e, ok := err.(*sync.ExerciceError); ok {
|
|
|
|
if errors.Is(e.GetError(), sync.ErrResolutionNotFound) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
nberr += 1
|
2019-07-05 23:25:34 +00:00
|
|
|
}
|
|
|
|
log.Printf("================================== Exercice %q treated\n", edir)
|
|
|
|
}
|
|
|
|
|
2021-11-13 16:27:32 +00:00
|
|
|
bfile := searchBinaryInGit(path.Join(sync.GlobalImporter.(sync.LocalImporter).Base, p))
|
|
|
|
if len(bfile) > 0 {
|
|
|
|
fmt.Printf("\n")
|
|
|
|
log.Println("There are some binary files in your git repository, they HAVE TO use LFS instead:")
|
|
|
|
for _, f := range bfile {
|
|
|
|
log.Println(" -", f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-05 23:25:34 +00:00
|
|
|
fmt.Printf("\n")
|
|
|
|
log.Printf("Theme %q treated. %d error(s) reported.\n\n", p, nberr)
|
|
|
|
} else {
|
|
|
|
log.Printf("This is not a theme directory, run checks for exercice.\n\n")
|
|
|
|
|
2023-11-22 11:16:53 +00:00
|
|
|
for _, err := range multierr.Errors(checkExercice(&fic.Theme{}, p, &map[int64]*fic.Exercice{}, nil)) {
|
2019-07-05 23:25:34 +00:00
|
|
|
nberr += 1
|
|
|
|
log.Println(err)
|
|
|
|
}
|
2021-11-13 16:27:32 +00:00
|
|
|
|
|
|
|
bfile := searchBinaryInGit(path.Join(sync.GlobalImporter.(sync.LocalImporter).Base, p))
|
|
|
|
if len(bfile) > 0 {
|
|
|
|
fmt.Printf("\n")
|
|
|
|
log.Println("There are some binary files in your git repository, they HAVE TO use LFS instead:")
|
|
|
|
for _, f := range bfile {
|
|
|
|
if strings.HasPrefix(f, p) {
|
|
|
|
log.Println(" -", f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-05 23:25:34 +00:00
|
|
|
fmt.Printf("\n")
|
|
|
|
log.Printf("Exercice %q treated. %d error(s) reported.\n", p, nberr)
|
|
|
|
}
|
|
|
|
|
|
|
|
if nberr > 0 {
|
|
|
|
hasError = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if hasError {
|
|
|
|
os.Exit(1)
|
|
|
|
}
|
|
|
|
}
|