server/repochecker/main.go

329 lines
9.8 KiB
Go

package main
import (
"bufio"
"bytes"
"errors"
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path"
"path/filepath"
"strconv"
"strings"
"go.uber.org/multierr"
"srs.epita.fr/fic-server/admin/sync"
"srs.epita.fr/fic-server/libfic"
)
var (
ignoreBinaryFileUnder = 1500000
skipFileChecks = false
skipBinaryFileCheck = false
logMissingResolution = false
)
func formatFileSize(size int) string {
if size > 1000000000000 {
return fmt.Sprintf("%.1f TiB", float64(size)/float64(1<<40))
} else if size > 1000000000 {
return fmt.Sprintf("%.1f GiB", float64(size)/float64(1<<30))
} else if size > 1000000 {
return fmt.Sprintf("%.1f MiB", float64(size)/float64(1<<20))
} else if size > 1000 {
return fmt.Sprintf("%.1f KiB", float64(size)/float64(1<<10))
}
return fmt.Sprintf("%d B", size)
}
func searchBinaryInGit(edir string) (ret []string) {
// Check if git exists and if we are in a git repo
err := exec.Command("git", "-C", edir, "remote").Run()
if err == nil {
cmd := exec.Command("git", "-C", edir, "log", "--all", "--numstat", "--no-renames", "-z")
var out bytes.Buffer
cmd.Stdout = &out
err := cmd.Run()
if err == nil {
alreadySeen := map[string]string{}
commit := ""
scanner := bufio.NewScanner(&out)
// Split on \n and \0 (-z option)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
for i := 0; i < len(data); i++ {
if data[i] == '\n' || data[i] == '\000' {
return i + 1, data[:i], nil
}
}
if !atEOF {
return 0, nil, nil
}
return 0, data, bufio.ErrFinalToken
})
for scanner.Scan() {
if strings.HasPrefix(scanner.Text(), "commit ") {
commit = strings.TrimPrefix(scanner.Text(), "commit ")
} else if strings.HasPrefix(scanner.Text(), "-\t-\t") {
fname := strings.TrimPrefix(scanner.Text(), "-\t-\t")
cmdfile := exec.Command("git", "-C", edir, "ls-tree", "-r", "-l", commit, fname)
var outfile bytes.Buffer
cmdfile.Stdout = &outfile
err = cmdfile.Run()
var fsize int = -1024
if err == nil {
fields := strings.Fields(outfile.String())
if len(fields) < 4 {
// This should be a file deletion
if _, ok := alreadySeen[fname]; !ok {
alreadySeen[fname] = fmt.Sprintf("%s (commit %s) deleted", fname, commit[:7])
}
continue
} else if fsize, err = strconv.Atoi(fields[3]); err == nil && fsize < ignoreBinaryFileUnder {
if _, ok := alreadySeen[fname]; !ok || skipBinaryFileCheck {
continue
}
} else if _, ok := alreadySeen[fname]; !ok && skipBinaryFileCheck {
alreadySeen[fname] = fmt.Sprintf("%s (commit %s) (size %s)", fname, commit[:7], formatFileSize(fsize))
continue
}
}
if as, ok := alreadySeen[fname]; ok && as != "" {
ret = append(ret, as)
alreadySeen[fname] = ""
}
ret = append(ret, fmt.Sprintf("%s (commit %s) (size %s)", fname, commit[:7], formatFileSize(fsize)))
}
}
}
}
return
}
func checkExercice(theme *fic.Theme, edir string, dmap *map[int64]*fic.Exercice, exceptions *sync.CheckExceptions) (errs error) {
e, _, eid, exceptions, _, berrs := sync.BuildExercice(sync.GlobalImporter, theme, path.Join(theme.Path, edir), dmap, nil)
errs = multierr.Append(errs, berrs)
if e != nil {
// Files
var files []string
var cerrs error
if !skipFileChecks {
files, cerrs = sync.CheckExerciceFiles(sync.GlobalImporter, e, exceptions)
log.Printf("%d files checked.\n", len(files))
} else {
files, cerrs = sync.CheckExerciceFilesPresence(sync.GlobalImporter, e)
log.Printf("%d files presents but not checked (please check digest yourself).\n", len(files))
}
errs = multierr.Append(errs, cerrs)
// Flags
flags, cerrs := sync.CheckExerciceFlags(sync.GlobalImporter, e, files, exceptions)
errs = multierr.Append(errs, cerrs)
log.Printf("%d flags checked.\n", len(flags))
// Hints
hints, cerrs := sync.CheckExerciceHints(sync.GlobalImporter, e, exceptions)
errs = multierr.Append(errs, cerrs)
log.Printf("%d hints checked.\n", len(hints))
if dmap != nil {
(*dmap)[int64(eid)] = e
}
}
return
}
func main() {
cloudDAVBase := ""
cloudUsername := "fic"
cloudPassword := ""
localImporterDirectory := ""
checkplugins := sync.CheckPluginList{}
// Read paremeters from environment
if v, exists := os.LookupEnv("FICCLOUD_URL"); exists {
cloudDAVBase = v
}
if v, exists := os.LookupEnv("FICCLOUD_USER"); exists {
cloudUsername = v
}
if v, exists := os.LookupEnv("FICCLOUD_PASS"); exists {
cloudPassword = v
}
// Read parameters from command line
flag.StringVar(&localImporterDirectory, "localimport", localImporterDirectory,
"Base directory where to find challenges files to import, local part")
flag.StringVar(&cloudDAVBase, "clouddav", cloudDAVBase,
"Base directory where to find challenges files to import, cloud part")
flag.StringVar(&cloudUsername, "clouduser", cloudUsername, "Username used to sync")
flag.StringVar(&cloudPassword, "cloudpass", cloudPassword, "Password used to sync")
flag.BoolVar(&sync.AllowWIPExercice, "allow-wip-exercices", sync.AllowWIPExercice, "Are WIP exercice allowed?")
flag.BoolVar(&fic.OptionalDigest, "optionaldigest", fic.OptionalDigest, "Is the digest required when importing files?")
flag.BoolVar(&fic.StrongDigest, "strongdigest", fic.StrongDigest, "Are BLAKE2b digests required or is SHA-1 good enough?")
flag.BoolVar(&skipFileChecks, "skipfiledigests", skipFileChecks, "Don't perform DIGESTS checks on file to speed up the checks")
flag.BoolVar(&logMissingResolution, "skipresolution", logMissingResolution, "Don't fail if resolution.mp4 is absent")
flag.BoolVar(&skipBinaryFileCheck, "skip-binary-file", skipBinaryFileCheck, "In Git-LFS check, don't warn files")
flag.IntVar(&ignoreBinaryFileUnder, "skip-binary-files-under", ignoreBinaryFileUnder, "In Git-LFS check, don't warn files under this size")
flag.Var(&checkplugins, "rules-plugins", "List of libraries containing others rules to checks")
flag.Var(&sync.RemoteFileDomainWhitelist, "remote-file-domain-whitelist", "List of domains which are allowed to store remote files")
flag.Parse()
// Do not display timestamp
log.SetFlags(0)
// Instantiate importer
regenImporter := false
if localImporterDirectory != "" {
sync.GlobalImporter = sync.LocalImporter{Base: localImporterDirectory, Symlink: true}
} else if cloudDAVBase != "" {
sync.GlobalImporter, _ = sync.NewCloudImporter(cloudDAVBase, cloudUsername, cloudPassword)
} else {
// In this case, we want to treat the entier path given
regenImporter = true
}
var err error
// Create temporary directory for storing FILES/ content
fic.FilesDir, err = ioutil.TempDir("", "fic-repochecker.")
if err != nil {
}
defer os.RemoveAll(fic.FilesDir)
if sync.GlobalImporter != nil {
log.Println("Using", sync.GlobalImporter.Kind())
if themes, err := sync.GetThemes(sync.GlobalImporter); err != nil {
log.Fatal(err)
} else if len(flag.Args()) == 0 {
log.Println("Existing themes:")
for _, th := range themes {
log.Println("-", th)
}
os.Exit(1)
}
} else if len(flag.Args()) == 0 {
log.Fatal("No importer nor path given!")
}
// Load rules plugins
for _, p := range checkplugins {
if err := sync.LoadChecksPlugin(p); err != nil {
log.Fatalf("Unable to load rule plugin %q: %s", p, err.Error())
} else {
log.Printf("Rules plugin %q successfully loaded", p)
}
}
// Variable that handles the exit status
hasError := false
for _, p := range flag.Args() {
if regenImporter {
var err error
p, err = filepath.Abs(p)
if err != nil {
p = path.Clean(p)
}
sync.GlobalImporter = sync.LocalImporter{
Base: path.Dir(p),
Symlink: true,
}
p = path.Base(p)
}
nberr := 0
theme, exceptions, errs := sync.BuildTheme(sync.GlobalImporter, p)
if theme != nil && !sync.GlobalImporter.Exists(path.Join(p, "challenge.txt")) && !sync.GlobalImporter.Exists(path.Join(p, "challenge.toml")) {
thiserrors := multierr.Errors(errs)
nberr += len(thiserrors)
for _, err := range thiserrors {
log.Println(err)
}
exercices, err := sync.GetExercices(sync.GlobalImporter, theme)
if err != nil {
nberr += 1
log.Println(err)
continue
}
dmap := map[int64]*fic.Exercice{}
for _, edir := range exercices {
ex_exceptions := exceptions.GetFileExceptions(edir)
for _, err := range multierr.Errors(checkExercice(theme, edir, &dmap, ex_exceptions)) {
log.Println(err.Error())
if logMissingResolution {
if e, ok := err.(*sync.ExerciceError); ok {
if errors.Is(e.GetError(), sync.ErrResolutionNotFound) {
continue
}
}
}
nberr += 1
}
log.Printf("================================== Exercice %q treated\n", edir)
}
bfile := searchBinaryInGit(path.Join(sync.GlobalImporter.(sync.LocalImporter).Base, p))
if len(bfile) > 0 {
fmt.Printf("\n")
log.Println("There are some binary files in your git repository, they HAVE TO use LFS instead:")
for _, f := range bfile {
log.Println(" -", f)
}
}
fmt.Printf("\n")
log.Printf("Theme %q treated. %d error(s) reported.\n\n", p, nberr)
} else {
log.Printf("This is not a theme directory, run checks for exercice.\n\n")
for _, err := range multierr.Errors(checkExercice(&fic.Theme{}, p, &map[int64]*fic.Exercice{}, nil)) {
nberr += 1
log.Println(err)
}
bfile := searchBinaryInGit(path.Join(sync.GlobalImporter.(sync.LocalImporter).Base, p))
if len(bfile) > 0 {
fmt.Printf("\n")
log.Println("There are some binary files in your git repository, they HAVE TO use LFS instead:")
for _, f := range bfile {
if strings.HasPrefix(f, p) {
log.Println(" -", f)
}
}
}
fmt.Printf("\n")
log.Printf("Exercice %q treated. %d error(s) reported.\n", p, nberr)
}
if nberr > 0 {
hasError = true
}
}
if hasError {
os.Exit(1)
}
}