Refactor sync file reading
This commit is contained in:
parent
541e32e10b
commit
6ca71230c1
4 changed files with 74 additions and 62 deletions
|
@ -1,7 +1,6 @@
|
|||
package sync
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"log"
|
||||
|
@ -104,29 +103,33 @@ func CheckExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExc
|
|||
errs = append(errs, berrs...)
|
||||
|
||||
for _, fname := range flist {
|
||||
w, hash160, hash512 := fic.CreateHashBuffers()
|
||||
|
||||
if err := GetFile(i, path.Join(exercice.Path, "files", fname), bufio.NewWriter(w)); err != nil {
|
||||
if fd, closer, err := GetFile(i, path.Join(exercice.Path, "files", fname)); err != nil {
|
||||
errs = append(errs, NewFileError(exercice, fname, fmt.Errorf("unable to read file: %w", err)))
|
||||
continue
|
||||
} else if _, err := fic.CheckBufferHash(hash160, hash512, digests[fname]); err != nil {
|
||||
errs = append(errs, NewFileError(exercice, fname, err))
|
||||
} else if size, err := getFileSize(i, path.Join(exercice.Path, "files", fname)); err != nil {
|
||||
errs = append(errs, NewFileError(exercice, fname, err))
|
||||
} else {
|
||||
var digest_shown []byte
|
||||
if strings.HasSuffix(fname, ".gz") {
|
||||
if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists {
|
||||
digest_shown = d
|
||||
defer closer()
|
||||
|
||||
hash160, hash512 := fic.CreateHashBuffers(fd)
|
||||
|
||||
if _, err := fic.CheckBufferHash(hash160, hash512, digests[fname]); err != nil {
|
||||
errs = append(errs, NewFileError(exercice, fname, err))
|
||||
} else if size, err := getFileSize(i, path.Join(exercice.Path, "files", fname)); err != nil {
|
||||
errs = append(errs, NewFileError(exercice, fname, err))
|
||||
} else {
|
||||
var digest_shown []byte
|
||||
if strings.HasSuffix(fname, ".gz") {
|
||||
if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists {
|
||||
digest_shown = d
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
file := exercice.NewDummyFile(path.Join(exercice.Path, "files", fname), getDestinationFilePath(path.Join(exercice.Path, "files", fname)), (*hash512).Sum(nil), digest_shown, size)
|
||||
file := exercice.NewDummyFile(path.Join(exercice.Path, "files", fname), getDestinationFilePath(path.Join(exercice.Path, "files", fname)), (*hash512).Sum(nil), digest_shown, size)
|
||||
|
||||
// Call checks hooks
|
||||
for _, h := range hooks.fileHooks {
|
||||
for _, e := range h(file, exceptions) {
|
||||
errs = append(errs, NewFileError(exercice, fname, e))
|
||||
// Call checks hooks
|
||||
for _, h := range hooks.fileHooks {
|
||||
for _, e := range h(file, exceptions) {
|
||||
errs = append(errs, NewFileError(exercice, fname, e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,21 +102,15 @@ func getFileSize(i Importer, URI string) (size int64, err error) {
|
|||
}
|
||||
|
||||
// GetFile helps to manage huge file transfert by concatenating splitted (with split(1)) files.
|
||||
func GetFile(i Importer, URI string, writer *bufio.Writer) error {
|
||||
func GetFile(i Importer, URI string) (io.Reader, func(), error) {
|
||||
// Import file if it exists
|
||||
if i.exists(URI) {
|
||||
fd, err := i.getFile(URI)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if fdc, ok := fd.(io.ReadCloser); ok {
|
||||
defer fdc.Close()
|
||||
}
|
||||
writer.ReadFrom(fd)
|
||||
|
||||
writer.Flush()
|
||||
return nil
|
||||
return fd, func() {
|
||||
if fdc, ok := fd.(io.ReadCloser); ok {
|
||||
fdc.Close()
|
||||
}
|
||||
}, err
|
||||
}
|
||||
|
||||
// Try to find file parts
|
||||
|
@ -124,47 +118,57 @@ func GetFile(i Importer, URI string, writer *bufio.Writer) error {
|
|||
if i.exists(dirname) {
|
||||
filename := path.Base(URI)
|
||||
if files, err := i.listDir(dirname); err != nil {
|
||||
return err
|
||||
return nil, nil, err
|
||||
} else {
|
||||
var readers []io.Reader
|
||||
|
||||
for _, fname := range []string{filename, filename + "."} {
|
||||
found := false
|
||||
for _, file := range files {
|
||||
if matched, _ := path.Match(fname+"[0-9][0-9]", file); matched {
|
||||
found = true
|
||||
|
||||
fd, err := i.getFile(path.Join(dirname, file))
|
||||
if err != nil {
|
||||
return err
|
||||
// Close already opened files to avoid leaks
|
||||
for _, rd := range readers {
|
||||
if rdc, ok := rd.(io.ReadCloser); ok {
|
||||
rdc.Close()
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if fdc, ok := fd.(io.ReadCloser); ok {
|
||||
defer fdc.Close()
|
||||
}
|
||||
writer.ReadFrom(fd)
|
||||
readers = append(readers, fd)
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
writer.Flush()
|
||||
return nil
|
||||
if len(readers) > 0 {
|
||||
return io.MultiReader(readers...), func() {
|
||||
for _, rd := range readers {
|
||||
if rdc, ok := rd.(io.ReadCloser); ok {
|
||||
rdc.Close()
|
||||
}
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("%q: no such file or directory", URI)
|
||||
return nil, nil, fmt.Errorf("%q: no such file or directory", URI)
|
||||
}
|
||||
|
||||
// GetFileContent retrieves the content of the given text file.
|
||||
func GetFileContent(i Importer, URI string) (string, error) {
|
||||
cnt := bytes.Buffer{}
|
||||
|
||||
if err := GetFile(i, URI, bufio.NewWriter(io.Writer(&cnt))); err != nil {
|
||||
if fd, closer, err := GetFile(i, URI); err != nil {
|
||||
return "", err
|
||||
} else {
|
||||
defer closer()
|
||||
|
||||
buffd := bufio.NewReader(fd)
|
||||
|
||||
// Ensure we read UTF-8 content.
|
||||
buf := make([]rune, 0)
|
||||
for b, _, err := cnt.ReadRune(); err == nil; b, _, err = cnt.ReadRune() {
|
||||
for b, _, err := buffd.ReadRune(); err == nil; b, _, err = buffd.ReadRune() {
|
||||
buf = append(buf, b)
|
||||
}
|
||||
|
||||
|
@ -194,14 +198,17 @@ func importFile(i Importer, URI string, dest string) error {
|
|||
return err
|
||||
} else {
|
||||
defer fdto.Close()
|
||||
writer := bufio.NewWriter(fdto)
|
||||
if err := GetFile(i, URI, writer); err != nil {
|
||||
|
||||
if fdfrom, closer, err := GetFile(i, URI); err != nil {
|
||||
os.Remove(dest)
|
||||
return err
|
||||
} else {
|
||||
defer closer()
|
||||
|
||||
_, err = io.Copy(fdto, fdfrom)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ImportFile imports the file at the given URI, using helpers of the given Importer.
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
package sync
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/base32"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
@ -87,10 +87,15 @@ func (t *imageImporterTransformer) Transform(doc *ast.Document, reader text.Read
|
|||
return ast.WalkStop, err
|
||||
} else {
|
||||
defer fdto.Close()
|
||||
writer := bufio.NewWriter(fdto)
|
||||
if err := GetFile(t.importer, path.Join(t.rootDir, iPath), writer); err != nil {
|
||||
|
||||
if fd, closer, err := GetFile(t.importer, path.Join(t.rootDir, iPath)); err != nil {
|
||||
os.Remove(dPath)
|
||||
return ast.WalkStop, err
|
||||
} else {
|
||||
defer closer()
|
||||
|
||||
_, err = io.Copy(fdto, fd)
|
||||
return ast.WalkStop, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package fic
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto"
|
||||
_ "crypto/sha1"
|
||||
"encoding/hex"
|
||||
|
@ -169,13 +168,15 @@ func minifyHash(hash string) string {
|
|||
}
|
||||
|
||||
// CheckBufferHash checks if the bufio has the given digest.
|
||||
func CreateHashBuffers() (io.Writer, *hash.Hash, *hash.Hash) {
|
||||
func CreateHashBuffers(rd io.Reader) (*hash.Hash, *hash.Hash) {
|
||||
hash160 := crypto.SHA1.New()
|
||||
hash512 := crypto.BLAKE2b_512.New()
|
||||
|
||||
w := io.MultiWriter(hash160, hash512)
|
||||
|
||||
return w, &hash160, &hash512
|
||||
io.Copy(w, rd)
|
||||
|
||||
return &hash160, &hash512
|
||||
}
|
||||
|
||||
// CheckBufferHash checks if the bufio has the given digest.
|
||||
|
@ -219,11 +220,7 @@ func checkFileHash(filePath string, digest []byte) (dgst []byte, size int64, err
|
|||
defer fd.Close()
|
||||
size = fi.Size()
|
||||
|
||||
w, hash160, hash512 := CreateHashBuffers()
|
||||
|
||||
if _, err = io.Copy(w, bufio.NewReader(fd)); err != nil {
|
||||
return
|
||||
}
|
||||
hash160, hash512 := CreateHashBuffers(fd)
|
||||
|
||||
dgst, err = CheckBufferHash(hash160, hash512, digest)
|
||||
|
||||
|
|
Reference in a new issue