Merge big splitted files before import
This commit is contained in:
parent
0466e7fd53
commit
9c56f5cb30
@ -6,6 +6,7 @@ import (
|
||||
"encoding/base32"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
@ -19,8 +20,15 @@ var CloudDAVBase string
|
||||
var CloudUsername string
|
||||
var CloudPassword string
|
||||
|
||||
type uploadedFile struct {
|
||||
URI string
|
||||
Digest []byte
|
||||
Path string
|
||||
Parts []string
|
||||
}
|
||||
|
||||
func createExerciceFile(theme fic.Theme, exercice fic.Exercice, args map[string]string, body []byte) (interface{}, error) {
|
||||
var uf map[string]string
|
||||
var uf uploadedFile
|
||||
if err := json.Unmarshal(body, &uf); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -30,16 +38,39 @@ func createExerciceFile(theme fic.Theme, exercice fic.Exercice, args map[string]
|
||||
var fromURI string
|
||||
var getFile func(string) (error)
|
||||
|
||||
if URI, ok := uf["URI"]; ok {
|
||||
hash = sha512.Sum512([]byte(URI))
|
||||
logStr = "Import file from Cloud: " + URI + " =>"
|
||||
fromURI = URI
|
||||
getFile = func(dest string) error { return getCloudFile(URI, dest); }
|
||||
} else if path, ok := uf["path"]; ok {
|
||||
hash = sha512.Sum512([]byte(path))
|
||||
logStr = "Import file from local FS: " + path + " =>"
|
||||
fromURI = path
|
||||
getFile = func(dest string) error { return os.Symlink(path, dest); }
|
||||
if uf.URI != "" {
|
||||
hash = sha512.Sum512([]byte(uf.URI))
|
||||
logStr = "Import file from Cloud: " + uf.URI + " =>"
|
||||
fromURI = uf.URI
|
||||
getFile = func(dest string) error { return getCloudFile(uf.URI, dest); }
|
||||
} else if uf.Path != "" && len(uf.Parts) > 0 {
|
||||
hash = sha512.Sum512([]byte(uf.Path))
|
||||
logStr = fmt.Sprintf("Import file from local FS: %s =>", uf.Parts)
|
||||
fromURI = uf.Path
|
||||
getFile = func(dest string) error {
|
||||
if fdto, err := os.Create(dest); err != nil {
|
||||
return err
|
||||
} else {
|
||||
writer := bufio.NewWriter(fdto)
|
||||
for _, partname := range uf.Parts {
|
||||
if fdfrm, err := os.Open(partname); err != nil {
|
||||
return err
|
||||
} else {
|
||||
reader := bufio.NewReader(fdfrm)
|
||||
reader.WriteTo(writer)
|
||||
writer.Flush()
|
||||
fdfrm.Close()
|
||||
}
|
||||
}
|
||||
fdto.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
} else if uf.Path != "" {
|
||||
hash = sha512.Sum512([]byte(uf.Path))
|
||||
logStr = "Import file from local FS: " + uf.Path + " =>"
|
||||
fromURI = uf.Path
|
||||
getFile = func(dest string) error { return os.Symlink(uf.Path, dest); }
|
||||
} else {
|
||||
return nil, errors.New("URI or path not filled")
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASEURL="http://localhost:8081/admin"
|
||||
BASEURL="http://localhost:8081"
|
||||
BASEURI="https://owncloud.srs.epita.fr/remote.php/webdav/FIC 2017"
|
||||
BASEFILE="/mnt/fic/"
|
||||
CLOUDPASS=fic:'f>t\nV33R|(+?$i*'
|
||||
@ -20,19 +20,31 @@ new_exercice() {
|
||||
GAIN="$5"
|
||||
VIDEO="$6"
|
||||
|
||||
curl -f -s -d "{\"title\": \"$TITLE\", \"statement\": \"$STATEMENT\", \"depend\": $DEPEND, \"gain\": $GAIN, \"videoURI\": \"$VIDEO\"}" "${BASEURL}/api/themes/$THEME" |
|
||||
curl -f -s -d "{\"title\": \"$TITLE\", \"statement\": \"$STATEMENT\", \"depend\": $DEPEND, \"gain\": $GAIN, \"videoURI\": \"$VIDEO\"}" "${BASEURL}/api/themes/$THEME/exercices/" |
|
||||
grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+"
|
||||
}
|
||||
|
||||
new_file() {
|
||||
new_file() (
|
||||
THEME="$1"
|
||||
EXERCICE="$2"
|
||||
URI="$3"
|
||||
ARGS="$4"
|
||||
|
||||
FIRST=
|
||||
PARTS=$(echo "$ARGS" | while read arg
|
||||
do
|
||||
[ -n "$arg" ] && {
|
||||
[ -z "${FIRST}" ] || echo -n ","
|
||||
echo "\"$arg\""
|
||||
}
|
||||
FIRST=1
|
||||
done)
|
||||
|
||||
# curl -f -s -d "{\"URI\": \"${BASEFILE}${URI}\"}" "${BASEURL}/api/themes/$THEME/$EXERCICE/files" |
|
||||
curl -f -s -d "{\"path\": \"${BASEFILE}${URI}\"}" "${BASEURL}/api/themes/$THEME/$EXERCICE/files" |
|
||||
grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+"
|
||||
}
|
||||
curl -f -s -d @- "${BASEURL}/api/themes/$THEME/exercices/$EXERCICE/files" <<EOF | grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+"
|
||||
{"path": "${BASEFILE}${URI}", "parts": [${PARTS}]}
|
||||
EOF
|
||||
)
|
||||
|
||||
new_hint() {
|
||||
THEME="$1"
|
||||
@ -41,7 +53,7 @@ new_hint() {
|
||||
CONTENT=`echo "$4" | sed 's/"/\\\\"/g' | sed ':a;N;$!ba;s/\n/<br>/g'`
|
||||
COST="$5"
|
||||
|
||||
curl -f -s -d "{\"title\": \"$TITLE\", \"content\": \"$CONTENT\", \"cost\": \"$COST\"}" "${BASEURL}/api/themes/$THEME/$EXERCICE/hints" |
|
||||
curl -f -s -d "{\"title\": \"$TITLE\", \"content\": \"$CONTENT\", \"cost\": $COST}" "${BASEURL}/api/themes/$THEME/exercices/$EXERCICE/hints" |
|
||||
grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+"
|
||||
}
|
||||
|
||||
@ -51,7 +63,7 @@ new_key() {
|
||||
NAME="$3"
|
||||
KEY=`echo $4 | sed 's/"/\\\\"/g' | sed 's#\\\\#\\\\\\\\#g'`
|
||||
|
||||
curl -f -s -d "{\"name\": \"$NAME\", \"key\": \"$KEY\"}" "${BASEURL}/api/themes/$THEME/$EXERCICE/keys" |
|
||||
curl -f -s -d "{\"name\": \"$NAME\", \"key\": \"$KEY\"}" "${BASEURL}/api/themes/$THEME/exercices/$EXERCICE/keys" |
|
||||
grep -Eo '"id":[0-9]+,' | grep -Eo "[0-9]+"
|
||||
}
|
||||
|
||||
@ -59,7 +71,7 @@ get_dir_from_cloud() {
|
||||
curl -f -s -X PROPFIND -u "${CLOUDPASS}" "${BASEURI}$1" | xmllint --format - | grep 'd:href' | sed -E 's/^.*>(.*)<.*$/\1/'
|
||||
}
|
||||
get_dir() {
|
||||
ls "${BASEFILE}$1"
|
||||
ls "${BASEFILE}$1" 2> /dev/null
|
||||
}
|
||||
#alias get_dir=get_dir_from_cloud
|
||||
|
||||
@ -67,7 +79,7 @@ get_file_from_cloud() {
|
||||
curl -f -s -u "${CLOUDPASS}" "${BASEURI}$1" | tr -d '\r'
|
||||
}
|
||||
get_file() {
|
||||
cat "${BASEFILE}$1" | tr -d '\r'
|
||||
cat "${BASEFILE}$1" 2> /dev/null | tr -d '\r'
|
||||
}
|
||||
#alias get_file=get_file_from_cloud
|
||||
|
||||
@ -91,7 +103,7 @@ do
|
||||
|
||||
LAST=null
|
||||
EXO_NUM=0
|
||||
get_dir "${THM_BASEURI}" | sed 1d | while read f; do basename "$f"; done | while read EXO_URI
|
||||
get_dir "${THM_BASEURI}" | while read f; do basename "$f"; done | while read EXO_URI
|
||||
do
|
||||
case ${EXO_URI} in
|
||||
[0-9]-*)
|
||||
@ -109,6 +121,7 @@ do
|
||||
EXO_BASEURI="${EXO_URI}/"
|
||||
|
||||
EXO_VIDEO=$(get_dir "${THM_BASEURI}${EXO_BASEURI}/resolution/" | grep -E "\.(mov|mkv|mp4|avi|flv|ogv|webm)$" | while read f; do basename $f; done | tail -1)
|
||||
[ -n "$EXO_VIDEO" ] && EXO_VIDEO="/resolution${THM_BASEURI}${EXO_BASEURI}resolution/${EXO_VIDEO}"
|
||||
|
||||
if [ "${LAST}" = "null" ]; then
|
||||
echo ">>> Assuming this exercice has no dependency"
|
||||
@ -121,7 +134,7 @@ do
|
||||
|
||||
EXO_SCENARIO=$(get_file "${THM_BASEURI}${EXO_BASEURI}/scenario.txt")
|
||||
|
||||
EXO_ID=`new_exercice "${THEME_ID}" "${EXO_NAME}" "${EXO_SCENARIO}" "${LAST}" "${EXO_GAIN}" "/resolution${THM_BASEURI}${EXO_BASEURI}resolution/${EXO_VIDEO}"`
|
||||
EXO_ID=`new_exercice "${THEME_ID}" "${EXO_NAME}" "${EXO_SCENARIO}" "${LAST}" "${EXO_GAIN}" "${EXO_VIDEO}"`
|
||||
if [ -z "$EXO_ID" ]; then
|
||||
echo -e "\e[31;01m!!! An error occured during exercice add.\e[00m"
|
||||
continue
|
||||
@ -151,16 +164,37 @@ do
|
||||
|
||||
# Hints
|
||||
EXO_HINT=$(get_file "${THM_BASEURI}${EXO_BASEURI}/hint.txt")
|
||||
if [ -n "$EXO_HINT" ]; then
|
||||
HINT_ID=`new_hint "${THEME_ID}" "${EXO_ID}" "Astuce #1" "${EXO_HINT}" "1"`
|
||||
if [ -z "$HINT_ID" ]; then
|
||||
echo -e "\e[31;01m!!! An error occured during hint import!\e[00m (title=Astuce #1;content=${EXO_HINT};cost=1)"
|
||||
else
|
||||
echo -e "\e[32m>>> New hint added:\e[00m $HINT_ID - Astuce #1"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Files
|
||||
get_dir "${THM_BASEURI}${EXO_BASEURI}files/" | grep -v DIGESTS.txt | while read f; do basename "$f"; done | while read FILE_URI
|
||||
# Files: splited
|
||||
get_dir "${THM_BASEURI}${EXO_BASEURI}files/" | grep -v DIGESTS.txt | grep '[0-9][0-9]$' | sed -E 's/\.?([0-9][0-9])$//' | sort | uniq | while read f; do basename "$f"; done | while read FILE_URI
|
||||
do
|
||||
PARTS=
|
||||
for part in $(get_dir "${THM_BASEURI}${EXO_BASEURI}files/" | grep "${FILE_URI}" | sort)
|
||||
do
|
||||
PARTS="${PARTS}${BASEFILE}${THM_BASEURI}${EXO_BASEURI}files/${part}
|
||||
"
|
||||
done
|
||||
echo -e "\e[35mImport splited file ${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI} from\e[00m `echo ${PARTS} | tr '\n' ' '`"
|
||||
|
||||
FILE_ID=`new_file "${THEME_ID}" "${EXO_ID}" "${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI}" "${PARTS}"`
|
||||
if [ -z "$FILE_ID" ]; then
|
||||
echo -e "\e[31;01m!!! An error occured during file import! Please check path.\e[00m"
|
||||
else
|
||||
echo -e "\e[32m>>> New file added:\e[00m $FILE_ID - $FILE_URI"
|
||||
fi
|
||||
done
|
||||
|
||||
# Files: entire
|
||||
get_dir "${THM_BASEURI}${EXO_BASEURI}files/" | grep -v DIGESTS.txt | grep -v '[0-9][0-9]$' | while read f; do basename "$f"; done | while read FILE_URI
|
||||
do
|
||||
echo "Import file ${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI}"
|
||||
FILE_ID=`new_file "${THEME_ID}" "${EXO_ID}" "${THM_BASEURI}${EXO_BASEURI}files/${FILE_URI}"`
|
||||
|
Loading…
Reference in New Issue
Block a user