server/repochecker/main.go

299 lines
8.5 KiB
Go

package main
import (
"bufio"
"bytes"
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path"
"path/filepath"
"strconv"
"strings"
"srs.epita.fr/fic-server/admin/sync"
"srs.epita.fr/fic-server/libfic"
)
var (
ignoreBinaryFileUnder = 1500000
skipFileChecks = false
skipBinaryFileCheck = false
)
func formatFileSize(size int) string {
if size > 1000000000000 {
return fmt.Sprintf("%.1f TiB", float64(size)/float64(1<<40))
} else if size > 1000000000 {
return fmt.Sprintf("%.1f GiB", float64(size)/float64(1<<30))
} else if size > 1000000 {
return fmt.Sprintf("%.1f MiB", float64(size)/float64(1<<20))
} else if size > 1000 {
return fmt.Sprintf("%.1f KiB", float64(size)/float64(1<<10))
}
return fmt.Sprintf("%d B", size)
}
func searchBinaryInGit(edir string) (ret []string) {
// Check if git exists and if we are in a git repo
err := exec.Command("git", "-C", edir, "remote").Run()
if err == nil {
cmd := exec.Command("git", "-C", edir, "log", "--all", "--numstat", "--no-renames", "-z")
var out bytes.Buffer
cmd.Stdout = &out
err := cmd.Run()
if err == nil {
alreadySeen := map[string]string{}
commit := ""
scanner := bufio.NewScanner(&out)
// Split on \n and \0 (-z option)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
for i := 0; i < len(data); i++ {
if data[i] == '\n' || data[i] == '\000' {
return i + 1, data[:i], nil
}
}
if !atEOF {
return 0, nil, nil
}
return 0, data, bufio.ErrFinalToken
})
for scanner.Scan() {
if strings.HasPrefix(scanner.Text(), "commit ") {
commit = strings.TrimPrefix(scanner.Text(), "commit ")
} else if strings.HasPrefix(scanner.Text(), "-\t-\t") {
fname := strings.TrimPrefix(scanner.Text(), "-\t-\t")
cmdfile := exec.Command("git", "-C", edir, "ls-tree", "-r", "-l", commit, fname)
var outfile bytes.Buffer
cmdfile.Stdout = &outfile
err = cmdfile.Run()
var fsize int = -1024
if err == nil {
fields := strings.Fields(outfile.String())
if len(fields) < 4 {
// This should be a file deletion
if _, ok := alreadySeen[fname]; !ok {
alreadySeen[fname] = fmt.Sprintf("%s (commit %s) deleted", fname, commit[:7])
}
continue
} else if fsize, err = strconv.Atoi(fields[3]); err == nil && fsize < ignoreBinaryFileUnder {
if _, ok := alreadySeen[fname]; !ok || skipBinaryFileCheck {
continue
}
} else if _, ok := alreadySeen[fname]; !ok && skipBinaryFileCheck {
alreadySeen[fname] = fmt.Sprintf("%s (commit %s) (size %s)", fname, commit[:7], formatFileSize(fsize))
continue
}
}
if as, ok := alreadySeen[fname]; ok && as != "" {
ret = append(ret, as)
alreadySeen[fname] = ""
}
ret = append(ret, fmt.Sprintf("%s (commit %s) (size %s)", fname, commit[:7], formatFileSize(fsize)))
}
}
}
}
return
}
func checkExercice(theme *fic.Theme, edir string, dmap *map[int64]*fic.Exercice) (errs []string) {
e, _, eid, _, berrs := sync.BuildExercice(sync.GlobalImporter, theme, path.Join(theme.Path, edir), dmap)
errs = append(errs, berrs...)
if e != nil {
// Files
var files []string
var cerrs []string
if !skipFileChecks {
files, cerrs = sync.CheckExerciceFiles(sync.GlobalImporter, e)
log.Printf("%d files checked.\n", len(files))
} else {
files, cerrs = sync.CheckExerciceFilesPresence(sync.GlobalImporter, e)
log.Printf("%d files presents but not checked (please check digest yourself).\n", len(files))
}
errs = append(errs, cerrs...)
// Flags
flags, cerrs := sync.CheckExerciceFlags(sync.GlobalImporter, e, files)
errs = append(errs, cerrs...)
log.Printf("%d flags checked.\n", len(flags))
// Hints
hints, cerrs := sync.CheckExerciceHints(sync.GlobalImporter, e)
errs = append(errs, cerrs...)
log.Printf("%d hints checked.\n", len(hints))
if dmap != nil {
(*dmap)[int64(eid)] = e
}
}
return
}
func main() {
cloudDAVBase := ""
cloudUsername := "fic"
cloudPassword := ""
localImporterDirectory := ""
// Read paremeters from environment
if v, exists := os.LookupEnv("FICCLOUD_URL"); exists {
cloudDAVBase = v
}
if v, exists := os.LookupEnv("FICCLOUD_USER"); exists {
cloudUsername = v
}
if v, exists := os.LookupEnv("FICCLOUD_PASS"); exists {
cloudPassword = v
}
// Read parameters from command line
flag.StringVar(&localImporterDirectory, "localimport", localImporterDirectory,
"Base directory where to find challenges files to import, local part")
flag.StringVar(&cloudDAVBase, "clouddav", cloudDAVBase,
"Base directory where to find challenges files to import, cloud part")
flag.StringVar(&cloudUsername, "clouduser", cloudUsername, "Username used to sync")
flag.StringVar(&cloudPassword, "cloudpass", cloudPassword, "Password used to sync")
flag.BoolVar(&fic.OptionalDigest, "optionaldigest", fic.OptionalDigest, "Is the digest required when importing files?")
flag.BoolVar(&fic.StrongDigest, "strongdigest", fic.StrongDigest, "Are BLAKE2b digests required or is SHA-1 good enough?")
flag.BoolVar(&skipFileChecks, "skipfiledigests", skipFileChecks, "Don't perform DIGESTS checks on file to speed up the checks")
flag.BoolVar(&sync.LogMissingResolution, "skipresolution", sync.LogMissingResolution, "Don't fail if resolution.mp4 is absent")
flag.BoolVar(&skipBinaryFileCheck, "skip-binary-file", skipBinaryFileCheck, "In Git-LFS check, don't warn files")
flag.IntVar(&ignoreBinaryFileUnder, "skip-binary-files-under", ignoreBinaryFileUnder, "In Git-LFS check, don't warn files under this size")
flag.Parse()
log.SetPrefix("[repochecker] ")
// Instantiate importer
regenImporter := false
if localImporterDirectory != "" {
sync.GlobalImporter = sync.LocalImporter{Base: localImporterDirectory, Symlink: true}
} else if cloudDAVBase != "" {
sync.GlobalImporter, _ = sync.NewCloudImporter(cloudDAVBase, cloudUsername, cloudPassword)
} else {
// In this case, we want to treat the entier path given
regenImporter = true
}
var err error
// Create temporary directory for storing FILES/ content
fic.FilesDir, err = ioutil.TempDir("", "fic-repochecker.")
if err != nil {
}
defer os.RemoveAll(fic.FilesDir)
if sync.GlobalImporter != nil {
log.Println("Using", sync.GlobalImporter.Kind())
if themes, err := sync.GetThemes(sync.GlobalImporter); err != nil {
log.Fatal(err)
} else if len(flag.Args()) == 0 {
log.Println("Existing themes:")
for _, th := range themes {
log.Println("-", th)
}
os.Exit(1)
}
} else if len(flag.Args()) == 0 {
log.Fatal("No importer nor path given!")
}
// Variable that handles the exit status
hasError := false
for _, p := range flag.Args() {
if regenImporter {
var err error
p, err = filepath.Abs(p)
if err != nil {
p = path.Clean(p)
}
sync.GlobalImporter = sync.LocalImporter{
Base: path.Dir(p),
Symlink: true,
}
p = path.Base(p)
}
nberr := 0
theme, errs := sync.BuildTheme(sync.GlobalImporter, p)
if theme != nil {
nberr += len(errs)
for _, err := range errs {
log.Println(err)
}
exercices, err := sync.GetExercices(sync.GlobalImporter, theme)
if err != nil {
nberr += 1
log.Println(err)
continue
}
dmap := map[int64]*fic.Exercice{}
for _, edir := range exercices {
for _, err := range checkExercice(theme, edir, &dmap) {
nberr += 1
log.Println(err)
}
log.Printf("================================== Exercice %q treated\n", edir)
}
bfile := searchBinaryInGit(path.Join(sync.GlobalImporter.(sync.LocalImporter).Base, p))
if len(bfile) > 0 {
fmt.Printf("\n")
log.Println("There are some binary files in your git repository, they HAVE TO use LFS instead:")
for _, f := range bfile {
log.Println(" -", f)
}
}
fmt.Printf("\n")
log.Printf("Theme %q treated. %d error(s) reported.\n\n", p, nberr)
} else {
log.Printf("This is not a theme directory, run checks for exercice.\n\n")
for _, err := range checkExercice(&fic.Theme{}, p, &map[int64]*fic.Exercice{}) {
nberr += 1
log.Println(err)
}
bfile := searchBinaryInGit(path.Join(sync.GlobalImporter.(sync.LocalImporter).Base, p))
if len(bfile) > 0 {
fmt.Printf("\n")
log.Println("There are some binary files in your git repository, they HAVE TO use LFS instead:")
for _, f := range bfile {
if strings.HasPrefix(f, p) {
log.Println(" -", f)
}
}
}
fmt.Printf("\n")
log.Printf("Exercice %q treated. %d error(s) reported.\n", p, nberr)
}
if nberr > 0 {
hasError = true
}
}
if hasError {
os.Exit(1)
}
}