Compare commits
7 commits
e68025f017
...
af97e1317f
Author | SHA1 | Date | |
---|---|---|---|
af97e1317f | |||
7f38911bbb | |||
c2996b9f0a | |||
8723f500cc | |||
b55151623c | |||
c7d1d7ce4c | |||
c5d0616896 |
15 changed files with 414 additions and 137 deletions
|
@ -241,7 +241,7 @@ func declareSyncExercicesRoutes(router *gin.RouterGroup) {
|
||||||
|
|
||||||
exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil)
|
exceptions := sync.LoadExerciceException(sync.GlobalImporter, theme, exercice, nil)
|
||||||
|
|
||||||
c.JSON(http.StatusOK, flatifySyncErrors(sync.SyncExerciceFiles(sync.GlobalImporter, exercice, exceptions)))
|
c.JSON(http.StatusOK, flatifySyncErrors(sync.ImportExerciceFiles(sync.GlobalImporter, exercice, exceptions)))
|
||||||
})
|
})
|
||||||
apiSyncExercicesRoutes.POST("/fixurlid", func(c *gin.Context) {
|
apiSyncExercicesRoutes.POST("/fixurlid", func(c *gin.Context) {
|
||||||
exercice := c.MustGet("exercice").(*fic.Exercice)
|
exercice := c.MustGet("exercice").(*fic.Exercice)
|
||||||
|
|
|
@ -315,9 +315,57 @@ func DownloadExerciceFile(pf ExerciceFile, dest string, exercice *fic.Exercice,
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// SyncExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge.
|
type importedFile struct {
|
||||||
|
file interface{}
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func SyncExerciceFiles(i Importer, exercice *fic.Exercice, paramsFiles map[string]ExerciceFile, actionAfterImport func(fname string, digests map[string][]byte, filePath, origin string) (interface{}, error)) (ret []*importedFile, errs error) {
|
||||||
|
files, digests, berrs := BuildFilesListInto(i, exercice, "files")
|
||||||
|
errs = multierr.Append(errs, berrs)
|
||||||
|
|
||||||
|
// Import standard files
|
||||||
|
for _, fname := range files {
|
||||||
|
var f interface{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if pf, exists := paramsFiles[fname]; exists && pf.URL != "" && !i.Exists(path.Join(exercice.Path, "files", fname)) {
|
||||||
|
dest := GetDestinationFilePath(pf.URL, &pf.Filename)
|
||||||
|
|
||||||
|
if _, err := os.Stat(dest); !os.IsNotExist(err) {
|
||||||
|
if d, err := actionAfterImport(fname, digests, dest, pf.URL); err == nil {
|
||||||
|
f = d
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f == nil {
|
||||||
|
errs = multierr.Append(errs, DownloadExerciceFile(paramsFiles[fname], dest, exercice, false))
|
||||||
|
|
||||||
|
f, err = actionAfterImport(fname, digests, dest, pf.URL)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
f, err = i.importFile(path.Join(exercice.Path, "files", fname), func(filePath, origin string) (interface{}, error) {
|
||||||
|
return actionAfterImport(fname, digests, filePath, origin)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
errs = multierr.Append(errs, NewFileError(exercice, fname, err))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = append(ret, &importedFile{
|
||||||
|
f,
|
||||||
|
fname,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImportExerciceFiles reads the content of files/ directory and import it as EFile for the given challenge.
|
||||||
// It takes care of DIGESTS.txt and ensure imported files match.
|
// It takes care of DIGESTS.txt and ensure imported files match.
|
||||||
func SyncExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExceptions) (errs error) {
|
func ImportExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExceptions) (errs error) {
|
||||||
if _, err := exercice.WipeFiles(); err != nil {
|
if _, err := exercice.WipeFiles(); err != nil {
|
||||||
errs = multierr.Append(errs, err)
|
errs = multierr.Append(errs, err)
|
||||||
}
|
}
|
||||||
|
@ -328,63 +376,41 @@ func SyncExerciceFiles(i Importer, exercice *fic.Exercice, exceptions *CheckExce
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
files, digests, berrs := BuildFilesListInto(i, exercice, "files")
|
actionAfterImport := func(fname string, digests map[string][]byte, filePath, origin string) (interface{}, error) {
|
||||||
|
var digest_shown []byte
|
||||||
|
if strings.HasSuffix(fname, ".gz") {
|
||||||
|
if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists {
|
||||||
|
digest_shown = d
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
published := true
|
||||||
|
disclaimer := ""
|
||||||
|
if f, exists := paramsFiles[fname]; exists {
|
||||||
|
published = !f.Hidden
|
||||||
|
|
||||||
|
// Call checks hooks
|
||||||
|
for _, hk := range hooks.mdTextHooks {
|
||||||
|
for _, err := range multierr.Errors(hk(f.Disclaimer, exercice.Language, exceptions)) {
|
||||||
|
errs = multierr.Append(errs, NewFileError(exercice, fname, err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if disclaimer, err = ProcessMarkdown(i, fixnbsp(f.Disclaimer), exercice.Path); err != nil {
|
||||||
|
errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("error during markdown formating of disclaimer: %w", err)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return exercice.ImportFile(filePath, origin, digests[fname], digest_shown, disclaimer, published)
|
||||||
|
}
|
||||||
|
|
||||||
|
files, berrs := SyncExerciceFiles(i, exercice, paramsFiles, actionAfterImport)
|
||||||
errs = multierr.Append(errs, berrs)
|
errs = multierr.Append(errs, berrs)
|
||||||
|
|
||||||
// Import standard files
|
// Import files in db
|
||||||
for _, fname := range files {
|
for _, file := range files {
|
||||||
actionAfterImport := func(filePath string, origin string) (interface{}, error) {
|
fname := file.Name
|
||||||
var digest_shown []byte
|
f := file.file
|
||||||
if strings.HasSuffix(fname, ".gz") {
|
|
||||||
if d, exists := digests[strings.TrimSuffix(fname, ".gz")]; exists {
|
|
||||||
digest_shown = d
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
published := true
|
|
||||||
disclaimer := ""
|
|
||||||
if f, exists := paramsFiles[fname]; exists {
|
|
||||||
published = !f.Hidden
|
|
||||||
|
|
||||||
// Call checks hooks
|
|
||||||
for _, hk := range hooks.mdTextHooks {
|
|
||||||
for _, err := range multierr.Errors(hk(f.Disclaimer, exercice.Language, exceptions)) {
|
|
||||||
errs = multierr.Append(errs, NewFileError(exercice, fname, err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if disclaimer, err = ProcessMarkdown(i, fixnbsp(f.Disclaimer), exercice.Path); err != nil {
|
|
||||||
errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("error during markdown formating of disclaimer: %w", err)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return exercice.ImportFile(filePath, origin, digests[fname], digest_shown, disclaimer, published)
|
|
||||||
}
|
|
||||||
|
|
||||||
var f interface{}
|
|
||||||
|
|
||||||
if pf, exists := paramsFiles[fname]; exists && pf.URL != "" && !i.Exists(path.Join(exercice.Path, "files", fname)) {
|
|
||||||
dest := GetDestinationFilePath(pf.URL, &pf.Filename)
|
|
||||||
|
|
||||||
if _, err := os.Stat(dest); !os.IsNotExist(err) {
|
|
||||||
if d, err := actionAfterImport(dest, pf.URL); err == nil {
|
|
||||||
f = d
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if f == nil {
|
|
||||||
errs = multierr.Append(errs, DownloadExerciceFile(paramsFiles[fname], dest, exercice, false))
|
|
||||||
|
|
||||||
f, err = actionAfterImport(dest, pf.URL)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
f, err = i.importFile(path.Join(exercice.Path, "files", fname), actionAfterImport)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
errs = multierr.Append(errs, NewFileError(exercice, fname, err))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.(*fic.EFile).Size == 0 {
|
if f.(*fic.EFile).Size == 0 {
|
||||||
errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("imported file is empty!")))
|
errs = multierr.Append(errs, NewFileError(exercice, fname, fmt.Errorf("imported file is empty!")))
|
||||||
|
|
|
@ -398,7 +398,7 @@ func SyncExercice(i Importer, theme *fic.Theme, epath string, dmap *map[int64]*f
|
||||||
if len(e.Image) > 0 {
|
if len(e.Image) > 0 {
|
||||||
if _, err := i.importFile(e.Image,
|
if _, err := i.importFile(e.Image,
|
||||||
func(filePath string, origin string) (interface{}, error) {
|
func(filePath string, origin string) (interface{}, error) {
|
||||||
if err := resizePicture(filePath, image.Rect(0, 0, 500, 300)); err != nil {
|
if err := resizePicture(i, origin, filePath, image.Rect(0, 0, 500, 300)); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -188,26 +188,35 @@ func GetDestinationFilePath(URI string, filename *string) string {
|
||||||
return path.Join(fic.FilesDir, strings.ToLower(base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(hash[:])), *filename)
|
return path.Join(fic.FilesDir, strings.ToLower(base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(hash[:])), *filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
func importFile(i Importer, URI string, dest string) error {
|
var fileWriter = fileWriterToFS
|
||||||
|
|
||||||
|
func SetWriteFileFunc(writerFunc func(dest string) (io.WriteCloser, error)) {
|
||||||
|
fileWriter = writerFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileWriterToFS(dest string) (io.WriteCloser, error) {
|
||||||
if err := os.MkdirAll(path.Dir(dest), 0751); err != nil {
|
if err := os.MkdirAll(path.Dir(dest), 0751); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write file
|
return os.Create(dest)
|
||||||
if fdto, err := os.Create(dest); err != nil {
|
}
|
||||||
|
|
||||||
|
func importFile(i Importer, URI string, dest string) error {
|
||||||
|
if fdfrom, closer, err := GetFile(i, URI); err != nil {
|
||||||
|
os.Remove(dest)
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
defer fdto.Close()
|
defer closer()
|
||||||
|
|
||||||
if fdfrom, closer, err := GetFile(i, URI); err != nil {
|
fdto, err := fileWriter(dest)
|
||||||
os.Remove(dest)
|
if err != nil {
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
defer closer()
|
|
||||||
|
|
||||||
_, err = io.Copy(fdto, fdfrom)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
defer fdto.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(fdto, fdfrom)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -246,7 +246,7 @@ func SyncThemeDeep(i Importer, theme *fic.Theme, tid int, themeStep uint8, excep
|
||||||
log.Printf("Deep synchronization in progress: %d/255 - doing Theme %q, Exercice %q: %q\n", DeepSyncProgress, theme.Name, exercice.Title, exercice.Path)
|
log.Printf("Deep synchronization in progress: %d/255 - doing Theme %q, Exercice %q: %q\n", DeepSyncProgress, theme.Name, exercice.Title, exercice.Path)
|
||||||
|
|
||||||
DeepSyncProgress = 3 + uint8(tid)*themeStep + uint8(eid)*exerciceStep
|
DeepSyncProgress = 3 + uint8(tid)*themeStep + uint8(eid)*exerciceStep
|
||||||
errs = multierr.Append(errs, SyncExerciceFiles(i, exercice, ex_exceptions[eid]))
|
errs = multierr.Append(errs, ImportExerciceFiles(i, exercice, ex_exceptions[eid]))
|
||||||
|
|
||||||
DeepSyncProgress += exerciceStep / 3
|
DeepSyncProgress += exerciceStep / 3
|
||||||
flagsBindings, ferrs := SyncExerciceFlags(i, exercice, ex_exceptions[eid])
|
flagsBindings, ferrs := SyncExerciceFlags(i, exercice, ex_exceptions[eid])
|
||||||
|
|
|
@ -3,9 +3,7 @@ package sync
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/base32"
|
"encoding/base32"
|
||||||
"io"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
|
||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -89,27 +87,10 @@ func (t *imageImporterTransformer) Transform(doc *ast.Document, reader text.Read
|
||||||
dPath := path.Join(fic.FilesDir, strings.ToLower(base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(t.hash[:])), iPath)
|
dPath := path.Join(fic.FilesDir, strings.ToLower(base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(t.hash[:])), iPath)
|
||||||
child.Destination = []byte(path.Join(t.absPath, string(child.Destination)))
|
child.Destination = []byte(path.Join(t.absPath, string(child.Destination)))
|
||||||
|
|
||||||
if err := os.MkdirAll(path.Dir(dPath), 0755); err != nil {
|
err := importFile(t.importer, path.Join(t.rootDir, iPath), dPath)
|
||||||
|
if err != nil {
|
||||||
return ast.WalkStop, err
|
return ast.WalkStop, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if fdto, err := os.Create(dPath); err != nil {
|
|
||||||
return ast.WalkStop, err
|
|
||||||
} else {
|
|
||||||
defer fdto.Close()
|
|
||||||
|
|
||||||
if fd, closer, err := GetFile(t.importer, path.Join(t.rootDir, iPath)); err != nil {
|
|
||||||
os.Remove(dPath)
|
|
||||||
return ast.WalkStop, err
|
|
||||||
} else {
|
|
||||||
defer closer()
|
|
||||||
|
|
||||||
_, err = io.Copy(fdto, fd)
|
|
||||||
if err != nil {
|
|
||||||
return ast.WalkStop, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ast.WalkContinue, nil
|
return ast.WalkContinue, nil
|
||||||
|
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
"image/jpeg"
|
"image/jpeg"
|
||||||
|
"io"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
|
@ -40,15 +41,19 @@ func GetThemes(i Importer) (themes []string, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// resizePicture makes the given image just fill the given rectangle.
|
// resizePicture makes the given image just fill the given rectangle.
|
||||||
func resizePicture(importedPath string, rect image.Rectangle) error {
|
func resizePicture(i Importer, imgPath string, importedPath string, rect image.Rectangle) error {
|
||||||
if fl, err := os.Open(importedPath); err != nil {
|
if fl, err := i.GetFile(imgPath); err != nil {
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
if src, _, err := image.Decode(fl); err != nil {
|
if src, _, err := image.Decode(fl); err != nil {
|
||||||
fl.Close()
|
if flc, ok := fl.(io.ReadCloser); ok {
|
||||||
|
flc.Close()
|
||||||
|
}
|
||||||
return err
|
return err
|
||||||
} else if src.Bounds().Max.X > rect.Max.X && src.Bounds().Max.Y > rect.Max.Y {
|
} else if src.Bounds().Max.X > rect.Max.X && src.Bounds().Max.Y > rect.Max.Y {
|
||||||
fl.Close()
|
if flc, ok := fl.(io.ReadCloser); ok {
|
||||||
|
flc.Close()
|
||||||
|
}
|
||||||
|
|
||||||
mWidth := rect.Max.Y * src.Bounds().Max.X / src.Bounds().Max.Y
|
mWidth := rect.Max.Y * src.Bounds().Max.X / src.Bounds().Max.Y
|
||||||
mHeight := rect.Max.X * src.Bounds().Max.Y / src.Bounds().Max.X
|
mHeight := rect.Max.X * src.Bounds().Max.Y / src.Bounds().Max.X
|
||||||
|
@ -61,7 +66,7 @@ func resizePicture(importedPath string, rect image.Rectangle) error {
|
||||||
dst := image.NewRGBA(rect)
|
dst := image.NewRGBA(rect)
|
||||||
draw.CatmullRom.Scale(dst, rect, src, src.Bounds(), draw.Over, nil)
|
draw.CatmullRom.Scale(dst, rect, src, src.Bounds(), draw.Over, nil)
|
||||||
|
|
||||||
dstFile, err := os.Create(strings.TrimSuffix(importedPath, ".jpg") + ".thumb.jpg")
|
dstFile, err := fileWriter(strings.TrimSuffix(importedPath, ".jpg") + ".thumb.jpg")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -71,7 +76,7 @@ func resizePicture(importedPath string, rect image.Rectangle) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
dstFile, err := os.Create(strings.TrimSuffix(importedPath, ".jpg") + ".thumb.jpg")
|
dstFile, err := fileWriter(strings.TrimSuffix(importedPath, ".jpg") + ".thumb.jpg")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -273,6 +278,36 @@ func BuildTheme(i Importer, tdir string) (th *fic.Theme, exceptions *CheckExcept
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SyncThemeFiles import all theme's related files
|
||||||
|
func SyncThemeFiles(i Importer, btheme *fic.Theme) (errs error) {
|
||||||
|
if len(btheme.Image) > 0 {
|
||||||
|
if _, err := i.importFile(btheme.Image,
|
||||||
|
func(filePath string, origin string) (interface{}, error) {
|
||||||
|
if err := resizePicture(i, origin, filePath, image.Rect(0, 0, 500, 300)); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
btheme.Image = strings.TrimPrefix(filePath, fic.FilesDir)
|
||||||
|
btheme.BackgroundColor, _ = getBackgroundColor(filePath)
|
||||||
|
return nil, nil
|
||||||
|
}); err != nil {
|
||||||
|
errs = multierr.Append(errs, NewThemeError(btheme, fmt.Errorf("unable to import heading image: %w", err)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(btheme.PartnerImage) > 0 {
|
||||||
|
if _, err := i.importFile(btheme.PartnerImage,
|
||||||
|
func(filePath string, origin string) (interface{}, error) {
|
||||||
|
btheme.PartnerImage = strings.TrimPrefix(filePath, fic.FilesDir)
|
||||||
|
return nil, nil
|
||||||
|
}); err != nil {
|
||||||
|
errs = multierr.Append(errs, NewThemeError(btheme, fmt.Errorf("unable to import partner image: %w", err)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// SyncThemes imports new or updates existing themes.
|
// SyncThemes imports new or updates existing themes.
|
||||||
func SyncThemes(i Importer) (exceptions map[string]*CheckExceptions, errs error) {
|
func SyncThemes(i Importer) (exceptions map[string]*CheckExceptions, errs error) {
|
||||||
if themes, err := GetThemes(i); err != nil {
|
if themes, err := GetThemes(i); err != nil {
|
||||||
|
@ -294,29 +329,9 @@ func SyncThemes(i Importer) (exceptions map[string]*CheckExceptions, errs error)
|
||||||
|
|
||||||
exceptions[tdir] = excepts
|
exceptions[tdir] = excepts
|
||||||
|
|
||||||
if len(btheme.Image) > 0 {
|
err = SyncThemeFiles(i, btheme)
|
||||||
if _, err := i.importFile(btheme.Image,
|
if err != nil {
|
||||||
func(filePath string, origin string) (interface{}, error) {
|
errs = multierr.Append(errs, NewThemeError(btheme, fmt.Errorf("unable to import heading image: %w", err)))
|
||||||
if err := resizePicture(filePath, image.Rect(0, 0, 500, 300)); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
btheme.Image = strings.TrimPrefix(filePath, fic.FilesDir)
|
|
||||||
btheme.BackgroundColor, _ = getBackgroundColor(filePath)
|
|
||||||
return nil, nil
|
|
||||||
}); err != nil {
|
|
||||||
errs = multierr.Append(errs, NewThemeError(btheme, fmt.Errorf("unable to import heading image: %w", err)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(btheme.PartnerImage) > 0 {
|
|
||||||
if _, err := i.importFile(btheme.PartnerImage,
|
|
||||||
func(filePath string, origin string) (interface{}, error) {
|
|
||||||
btheme.PartnerImage = strings.TrimPrefix(filePath, fic.FilesDir)
|
|
||||||
return nil, nil
|
|
||||||
}); err != nil {
|
|
||||||
errs = multierr.Append(errs, NewThemeError(btheme, fmt.Errorf("unable to import partner image: %w", err)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var theme *fic.Theme
|
var theme *fic.Theme
|
||||||
|
|
1
fileexporter/.gitignore
vendored
Normal file
1
fileexporter/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
fileexporter
|
42
fileexporter/archive.go
Normal file
42
fileexporter/archive.go
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type archiveFileCreator interface {
|
||||||
|
Create(name string) (io.Writer, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
OutputFormats["archive"] = func(args ...string) (func(string) (io.WriteCloser, error), error) {
|
||||||
|
if len(args) != 1 {
|
||||||
|
return nil, errors.New("archive has 1 required argument: [destination-file]")
|
||||||
|
}
|
||||||
|
|
||||||
|
fd, err := os.Create(args[0])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var w archiveFileCreator
|
||||||
|
if path.Ext(args[0]) == ".zip" {
|
||||||
|
w = zip.NewWriter(fd)
|
||||||
|
} else {
|
||||||
|
return nil, errors.New("destination file has to have .zip extension")
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(dest string) (io.WriteCloser, error) {
|
||||||
|
fw, err := w.Create(dest)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return NopCloser(fw), nil
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
}
|
22
fileexporter/copy.go
Normal file
22
fileexporter/copy.go
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"srs.epita.fr/fic-server/libfic"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
OutputFormats["copy"] = func(args ...string) (func(string) (io.WriteCloser, error), error) {
|
||||||
|
if len(args) > 1 {
|
||||||
|
return nil, errors.New("copy can only take 1 argument: [destination-folder]")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(args) == 1 {
|
||||||
|
fic.FilesDir = args[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
}
|
176
fileexporter/main.go
Normal file
176
fileexporter/main.go
Normal file
|
@ -0,0 +1,176 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"flag"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"srs.epita.fr/fic-server/admin/sync"
|
||||||
|
"srs.epita.fr/fic-server/libfic"
|
||||||
|
)
|
||||||
|
|
||||||
|
var OutputFormats = map[string]func(...string) (func(string) (io.WriteCloser, error), error){}
|
||||||
|
|
||||||
|
func exportThemeFiles(tdir string) (errs error) {
|
||||||
|
theme, exceptions, err := sync.BuildTheme(sync.GlobalImporter, tdir)
|
||||||
|
errs = errors.Join(errs, err)
|
||||||
|
|
||||||
|
err = sync.SyncThemeFiles(sync.GlobalImporter, theme)
|
||||||
|
if err != nil {
|
||||||
|
errs = errors.Join(errs, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
exercices, err := sync.GetExercices(sync.GlobalImporter, theme)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Unable to list exercices for theme %q: %s", theme.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
dmap := map[int64]*fic.Exercice{}
|
||||||
|
|
||||||
|
for i, edir := range exercices {
|
||||||
|
log.Printf("In theme %s, doing exercice %d/%d: %s", tdir, i, len(exercices), tdir)
|
||||||
|
err = exportExerciceFiles(theme, edir, &dmap, exceptions)
|
||||||
|
errs = errors.Join(errs, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func exportExerciceFiles(theme *fic.Theme, edir string, dmap *map[int64]*fic.Exercice, exceptions *sync.CheckExceptions) (errs error) {
|
||||||
|
exercice, _, eid, exceptions, _, berrs := sync.BuildExercice(sync.GlobalImporter, theme, path.Join(theme.Path, edir), dmap, nil)
|
||||||
|
errs = errors.Join(errs, berrs)
|
||||||
|
|
||||||
|
if exercice != nil {
|
||||||
|
paramsFiles, err := sync.GetExerciceFilesParams(sync.GlobalImporter, exercice)
|
||||||
|
if err != nil {
|
||||||
|
errs = errors.Join(errs, sync.NewChallengeTxtError(exercice, 0, err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = sync.SyncExerciceFiles(sync.GlobalImporter, exercice, paramsFiles, func(fname string, digests map[string][]byte, filePath, origin string) (interface{}, error) {
|
||||||
|
return nil, nil
|
||||||
|
})
|
||||||
|
errs = errors.Join(errs, err)
|
||||||
|
|
||||||
|
if dmap != nil {
|
||||||
|
(*dmap)[int64(eid)] = exercice
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
type nopCloser struct {
|
||||||
|
w io.Writer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (nc *nopCloser) Close() error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (nc *nopCloser) Write(p []byte) (int, error) {
|
||||||
|
return nc.w.Write(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NopCloser(w io.Writer) *nopCloser {
|
||||||
|
return &nopCloser{w}
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeFileToTar(dest string) (io.WriteCloser, error) {
|
||||||
|
log.Println("import2Tar", dest)
|
||||||
|
return NopCloser(bytes.NewBuffer([]byte{})), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
cloudDAVBase := ""
|
||||||
|
cloudUsername := "fic"
|
||||||
|
cloudPassword := ""
|
||||||
|
localImporterDirectory := ""
|
||||||
|
|
||||||
|
// Read paremeters from environment
|
||||||
|
if v, exists := os.LookupEnv("FICCLOUD_URL"); exists {
|
||||||
|
cloudDAVBase = v
|
||||||
|
}
|
||||||
|
if v, exists := os.LookupEnv("FICCLOUD_USER"); exists {
|
||||||
|
cloudUsername = v
|
||||||
|
}
|
||||||
|
if v, exists := os.LookupEnv("FICCLOUD_PASS"); exists {
|
||||||
|
cloudPassword = v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read parameters from command line
|
||||||
|
flag.StringVar(&localImporterDirectory, "localimport", localImporterDirectory,
|
||||||
|
"Base directory where to find challenges files to import, local part")
|
||||||
|
flag.StringVar(&cloudDAVBase, "clouddav", cloudDAVBase,
|
||||||
|
"Base directory where to find challenges files to import, cloud part")
|
||||||
|
flag.StringVar(&cloudUsername, "clouduser", cloudUsername, "Username used to sync")
|
||||||
|
flag.StringVar(&cloudPassword, "cloudpass", cloudPassword, "Password used to sync")
|
||||||
|
flag.BoolVar(&fic.OptionalDigest, "optionaldigest", fic.OptionalDigest, "Is the digest required when importing files?")
|
||||||
|
flag.BoolVar(&fic.StrongDigest, "strongdigest", fic.StrongDigest, "Are BLAKE2b digests required or is SHA-1 good enough?")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
// Do not display timestamp
|
||||||
|
log.SetFlags(0)
|
||||||
|
|
||||||
|
// Instantiate importer
|
||||||
|
if localImporterDirectory != "" {
|
||||||
|
sync.GlobalImporter = sync.LocalImporter{Base: localImporterDirectory, Symlink: false}
|
||||||
|
} else if cloudDAVBase != "" {
|
||||||
|
sync.GlobalImporter, _ = sync.NewCloudImporter(cloudDAVBase, cloudUsername, cloudPassword)
|
||||||
|
}
|
||||||
|
|
||||||
|
if sync.GlobalImporter == nil {
|
||||||
|
log.Fatal("No importer configured!")
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Println("Using", sync.GlobalImporter.Kind())
|
||||||
|
|
||||||
|
// Configure destination
|
||||||
|
if flag.NArg() < 1 {
|
||||||
|
var formats []string
|
||||||
|
|
||||||
|
for k := range OutputFormats {
|
||||||
|
formats = append(formats, k)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Fatal("Please define wanted output format between [" + strings.Join(formats, " ") + "]")
|
||||||
|
} else if outputFormat, ok := OutputFormats[flag.Arg(0)]; !ok {
|
||||||
|
var formats []string
|
||||||
|
|
||||||
|
for k := range OutputFormats {
|
||||||
|
formats = append(formats, k)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Fatal("Please define wanted output format between [" + strings.Join(formats, " ") + "]")
|
||||||
|
} else {
|
||||||
|
fw, err := outputFormat(flag.Args()[1:]...)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
} else if fw != nil {
|
||||||
|
sync.SetWriteFileFunc(fw)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
themes, err := sync.GetThemes(sync.GlobalImporter)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasError := false
|
||||||
|
for i, tdir := range themes {
|
||||||
|
log.Printf("Doing theme %d/%d: %s", i, len(themes), tdir)
|
||||||
|
err = exportThemeFiles(tdir)
|
||||||
|
if err != nil {
|
||||||
|
hasError = true
|
||||||
|
log.Println(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasError {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
6
frontend/fic/package-lock.json
generated
6
frontend/fic/package-lock.json
generated
|
@ -1308,9 +1308,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@sveltejs/kit": {
|
"node_modules/@sveltejs/kit": {
|
||||||
"version": "2.20.2",
|
"version": "2.20.4",
|
||||||
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.20.2.tgz",
|
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.20.4.tgz",
|
||||||
"integrity": "sha512-Dv8TOAZC9vyfcAB9TMsvUEJsRbklRTeNfcYBPaeH6KnABJ99i3CvCB2eNx8fiiliIqe+9GIchBg4RodRH5p1BQ==",
|
"integrity": "sha512-B3Y1mb1Qjt57zXLVch5tfqsK/ebHe6uYTcFSnGFNwRpId3+fplLgQK6Z2zhDVBezSsPuhDq6Pry+9PA88ocN6Q==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
6
qa/ui/package-lock.json
generated
6
qa/ui/package-lock.json
generated
|
@ -989,9 +989,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@sveltejs/kit": {
|
"node_modules/@sveltejs/kit": {
|
||||||
"version": "2.20.2",
|
"version": "2.20.4",
|
||||||
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.20.2.tgz",
|
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.20.4.tgz",
|
||||||
"integrity": "sha512-Dv8TOAZC9vyfcAB9TMsvUEJsRbklRTeNfcYBPaeH6KnABJ99i3CvCB2eNx8fiiliIqe+9GIchBg4RodRH5p1BQ==",
|
"integrity": "sha512-B3Y1mb1Qjt57zXLVch5tfqsK/ebHe6uYTcFSnGFNwRpId3+fplLgQK6Z2zhDVBezSsPuhDq6Pry+9PA88ocN6Q==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
@ -194,14 +194,8 @@ func main() {
|
||||||
regenImporter = true
|
regenImporter = true
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
// Don't write any files
|
||||||
|
sync.SetWriteFileFunc(func(dest string) (io.WriteCloser, error) { return &nullFileWriter{}, nil })
|
||||||
// Create temporary directory for storing FILES/ content
|
|
||||||
fic.FilesDir, err = ioutil.TempDir("", "fic-repochecker.")
|
|
||||||
if err != nil {
|
|
||||||
|
|
||||||
}
|
|
||||||
defer os.RemoveAll(fic.FilesDir)
|
|
||||||
|
|
||||||
if sync.GlobalImporter != nil {
|
if sync.GlobalImporter != nil {
|
||||||
log.Println("Using", sync.GlobalImporter.Kind())
|
log.Println("Using", sync.GlobalImporter.Kind())
|
||||||
|
|
11
repochecker/null.go
Normal file
11
repochecker/null.go
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
type nullFileWriter struct{}
|
||||||
|
|
||||||
|
func (fw *nullFileWriter) Write(p []byte) (int, error) {
|
||||||
|
return len(p), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fw *nullFileWriter) Close() error {
|
||||||
|
return nil
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue