sync: Extract function that import files from importer

This commit is contained in:
nemunaire 2019-07-05 19:07:28 +02:00
parent 3104bf4e65
commit 3f55845374
3 changed files with 136 additions and 90 deletions

View file

@ -1,6 +1,7 @@
package sync
import (
"bufio"
"encoding/hex"
"fmt"
"path"
@ -10,76 +11,104 @@ import (
"srs.epita.fr/fic-server/libfic"
)
// SyncExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge.
// It takes care of DIGESTS.txt and ensure imported files match.
func SyncExerciceFiles(i Importer, exercice fic.Exercice) (errs []string) {
func buildFilesListInto(i Importer, exercice fic.Exercice, into string) (files []string, digests map[string][]byte, errs []string) {
// If no files directory, don't display error
if ! i.exists(path.Join(exercice.Path, "files")) {
if !i.exists(path.Join(exercice.Path, into)) {
return
}
if digs, err := getFileContent(i, path.Join(exercice.Path, "files", "DIGESTS.txt")); err != nil {
// Parse DIGESTS.txt
if digs, err := getFileContent(i, path.Join(exercice.Path, into, "DIGESTS.txt")); err != nil {
errs = append(errs, fmt.Sprintf("%q: unable to read DIGESTS.txt: %s", path.Base(exercice.Path), err))
} else if _, err := exercice.WipeFiles(); err != nil {
errs = append(errs, err.Error())
} else if files, err := i.listDir(path.Join(exercice.Path, "files")); err != nil {
errs = append(errs, err.Error())
} else {
// Parse DIGESTS.txt
digests := map[string][]byte{}
digests = map[string][]byte{}
for nline, d := range strings.Split(digs, "\n") {
if dsplt := strings.SplitN(d, " ", 2); len(dsplt) < 2 {
errs = append(errs, fmt.Sprintf("%q: unable to parse DIGESTS.txt line %d: invalid format", path.Base(exercice.Path), nline + 1))
errs = append(errs, fmt.Sprintf("%q: unable to parse DIGESTS.txt line %d: invalid format", path.Base(exercice.Path), nline+1))
continue
} else if hash, err := hex.DecodeString(dsplt[0]); err != nil {
errs = append(errs, fmt.Sprintf("%q: unable to parse DIGESTS.txt line %d: %s", path.Base(exercice.Path), nline + 1, err))
errs = append(errs, fmt.Sprintf("%q: unable to parse DIGESTS.txt line %d: %s", path.Base(exercice.Path), nline+1, err))
continue
} else {
digests[strings.TrimFunc(dsplt[1], unicode.IsSpace)] = hash
}
}
}
// Import splitted files
var splittedFiles []string
for _, fname := range files {
if matched, _ := path.Match("*.00", fname); matched {
fname = fname[:len(fname)-3]
splittedFiles = append(splittedFiles, fname + ".[0-9][0-9]")
files = append(files, fname)
} else if matched, _ := path.Match("*00", fname); matched {
fname = fname[:len(fname)-2]
splittedFiles = append(splittedFiles, fname + "[0-9][0-9]")
files = append(files, fname)
} else if matched, _ := path.Match("*_MERGED", fname); matched {
splittedFiles = append(splittedFiles, fname)
}
}
// Import standard files
for _, fname := range files {
// Read file list
if flist, err := i.listDir(path.Join(exercice.Path, into)); err != nil {
errs = append(errs, err.Error())
} else {
for _, fname := range flist {
if fname == "DIGESTS.txt" {
continue
}
foundSplitted := false
for _, sf := range splittedFiles {
if matched, _ := path.Match(sf, fname); matched {
foundSplitted = true
}
}
if foundSplitted {
if matched, _ := path.Match("*.00", fname); matched {
fname = fname[:len(fname)-3]
} else if matched, _ := path.Match("*00", fname); matched {
fname = fname[:len(fname)-2]
} else if matched, _ := path.Match("*_MERGED", fname); matched {
continue
}
if f, err := i.importFile(path.Join(exercice.Path, "files", fname),
func(filePath string, origin string) (interface{}, error) {
return exercice.ImportFile(filePath, origin, digests[fname])
}); err != nil {
errs = append(errs, fmt.Sprintf("%q: unable to import file %q: %s", path.Base(exercice.Path), fname, err))
continue
} else if f.(fic.EFile).Size == 0 {
errs = append(errs, fmt.Sprintf("%q: WARNING imported file %q is empty!", path.Base(exercice.Path), fname))
fileFound := false
for _, f := range files {
if fname == f {
fileFound = true
break
}
}
if !fileFound {
files = append(files, fname)
}
}
}
return
}
// CheckExerciceFiles checks that remote files have the right digest.
func CheckExerciceFiles(i Importer, exercice fic.Exercice) (files []fic.EFile, errs []string) {
flist, digests, berrs := buildFilesListInto(i, exercice, "files")
errs = append(errs, berrs...)
for _, fname := range flist {
w, hash160, hash512 := fic.CreateHashBuffers()
if err := i.getFile(path.Join(exercice.Path, "files", fname), bufio.NewWriter(w)); err != nil {
errs = append(errs, fmt.Sprintf("%q: unable to read file %q: %s", path.Base(exercice.Path), fname, err))
continue
} else if _, err := fic.CheckBufferHash(hash160, hash512, digests[fname]); err != nil {
errs = append(errs, fmt.Sprintf("%q: %s: %s", path.Base(exercice.Path), fname, err))
}
files = append(files, fic.EFile{Name: fname})
}
return
}
// SyncExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge.
// It takes care of DIGESTS.txt and ensure imported files match.
func SyncExerciceFiles(i Importer, exercice fic.Exercice) (errs []string) {
if _, err := exercice.WipeFiles(); err != nil {
errs = append(errs, err.Error())
}
files, digests, berrs := buildFilesListInto(i, exercice, "files")
errs = append(errs, berrs...)
// Import standard files
for _, fname := range files {
if f, err := i.importFile(path.Join(exercice.Path, "files", fname),
func(filePath string, origin string) (interface{}, error) {
return exercice.ImportFile(filePath, origin, digests[fname])
}); err != nil {
errs = append(errs, fmt.Sprintf("%q: unable to import file %q: %s", path.Base(exercice.Path), fname, err))
continue
} else if f.(fic.EFile).Size == 0 {
errs = append(errs, fmt.Sprintf("%q: WARNING imported file %q is empty!", path.Base(exercice.Path), fname))
}
}
return

View file

@ -58,7 +58,7 @@ func getFileSize(i Importer, URI string) (size int64, err error) {
for _, fname := range []string{filename, filename + "."} {
found := false
for _, file := range files {
if matched, _ := path.Match(fname + "[0-9][0-9]", file); matched {
if matched, _ := path.Match(fname+"[0-9][0-9]", file); matched {
found = true
if fi, err := i.stat(path.Join(dirname, file)); err != nil {
return size, err
@ -95,7 +95,7 @@ func getFile(i Importer, URI string, writer *bufio.Writer) error {
for _, fname := range []string{filename, filename + "."} {
found := false
for _, file := range files {
if matched, _ := path.Match(fname + "[0-9][0-9]", file); matched {
if matched, _ := path.Match(fname+"[0-9][0-9]", file); matched {
found = true
if err := i.getFile(path.Join(dirname, file), writer); err != nil {
return err

View file

@ -6,6 +6,7 @@ import (
_ "crypto/sha1"
"encoding/hex"
"errors"
"hash"
"io"
"os"
"path"
@ -28,19 +29,19 @@ var PlainDigest bool = false
// EFile represents a challenge file.
type EFile struct {
Id int64 `json:"id"`
Id int64 `json:"id"`
// origin holds the import relative path of the file
origin string
origin string
// Path is the location where the file is stored, relatively to FilesDir
Path string `json:"path"`
Path string `json:"path"`
// IdExercice is the identifier of the underlying challenge
IdExercice int64 `json:"idExercice"`
IdExercice int64 `json:"idExercice"`
// Name is the title displayed to players
Name string `json:"name"`
Name string `json:"name"`
// Checksum stores the cached hash of the file
Checksum []byte `json:"checksum"`
Checksum []byte `json:"checksum"`
// Size contains the cached size of the file
Size int64 `json:"size"`
Size int64 `json:"size"`
}
// GetFiles returns a list of all files living in the database.
@ -88,7 +89,7 @@ func GetFileByPath(path string) (EFile, error) {
func (e Exercice) GetFileByFilename(filename string) (f EFile, err error) {
filename = path.Base(filename)
err = DBQueryRow("SELECT id_file, origin, path, id_exercice, name, cksum, size FROM exercice_files WHERE id_exercice = ? AND origin LIKE ?", e.Id, "%/" + filename).Scan(&f.Id, &f.origin, &f.Path, &f.IdExercice, &f.Name, &f.Checksum, &f.Size)
err = DBQueryRow("SELECT id_file, origin, path, id_exercice, name, cksum, size FROM exercice_files WHERE id_exercice = ? AND origin LIKE ?", e.Id, "%/"+filename).Scan(&f.Id, &f.origin, &f.Path, &f.IdExercice, &f.Name, &f.Checksum, &f.Size)
return
}
@ -139,50 +140,66 @@ func minifyHash(hash string) string {
}
}
// CheckBufferHash checks if the bufio has the given digest.
func CreateHashBuffers() (io.Writer, *hash.Hash, *hash.Hash) {
hash160 := crypto.SHA1.New()
hash512 := crypto.BLAKE2b_512.New()
w := io.MultiWriter(hash160, hash512)
return w, &hash160, &hash512
}
// CheckBufferHash checks if the bufio has the given digest.
func CheckBufferHash(hash160 *hash.Hash, hash512 *hash.Hash, digest []byte) ([]byte, error) {
result160 := (*hash160).Sum(nil)
result512 := (*hash512).Sum(nil)
if len(digest) != len(result512) {
if len(digest) != len(result160) {
return []byte{}, errors.New("Digests doesn't match: calculated: sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
} else if StrongDigest {
return []byte{}, errors.New("Invalid digests: SHA-1 checksums are no more accepted. Calculated sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
}
for k := range result160 {
if result160[k] != digest[k] {
return []byte{}, errors.New("Digests doesn't match: calculated: sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
}
}
} else {
for k := range result512 {
if result512[k] != digest[k] {
return []byte{}, errors.New("Digests doesn't match: calculated: " + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
}
}
}
return result512, nil
}
// checkFileHash checks if the file at the given filePath has the given digest.
// It also returns the file's size.
func checkFileHash(filePath string, digest []byte) ([]byte, int64, error) {
func checkFileHash(filePath string, digest []byte) (dgst []byte, size int64, err error) {
if digest == nil {
return []byte{}, 0, errors.New("No digest given.")
} else if fi, err := os.Stat(filePath); err != nil {
return []byte{}, 0, err
} else if fd, err := os.Open(filePath); err != nil {
return []byte{}, fi.Size(), err
} else if fi, errr := os.Stat(filePath); err != nil {
return []byte{}, 0, errr
} else if fd, errr := os.Open(filePath); err != nil {
return []byte{}, fi.Size(), errr
} else {
defer fd.Close()
size = fi.Size()
reader := bufio.NewReader(fd)
hash160 := crypto.SHA1.New()
hash512 := crypto.BLAKE2b_512.New()
w, hash160, hash512 := CreateHashBuffers()
w := io.MultiWriter(hash160, hash512)
if _, err := io.Copy(w, reader); err != nil {
return []byte{}, fi.Size(), err
}
result160 := hash160.Sum(nil)
result512 := hash512.Sum(nil)
if len(digest) != len(result512) {
if len(digest) != len(result160) {
return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
} else if StrongDigest {
return []byte{}, fi.Size(), errors.New("Invalid digests: SHA-1 checksums are no more accepted. Calculated sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
}
for k := range result160 {
if result160[k] != digest[k] {
return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
}
}
} else {
for k := range result512 {
if result512[k] != digest[k] {
return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: " + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
}
}
if _, err = io.Copy(w, bufio.NewReader(fd)); err != nil {
return
}
return result512, fi.Size(), nil
dgst, err = CheckBufferHash(hash160, hash512, digest)
return
}
}