sync/file: hide by default the whole calculated digest

This is to avoid direct copy/paste to DIGESTS.txt without real local calculation.
This commit is contained in:
nemunaire 2018-06-20 21:12:00 +02:00
parent 3b7d9a2a75
commit d05c211a7c

View File

@ -23,6 +23,9 @@ var OptionalDigest bool = false
// StrongDigest forces the use of BLAKE2b hash in place of SHA1 (or mixed SHA1/BLAKE2b). // StrongDigest forces the use of BLAKE2b hash in place of SHA1 (or mixed SHA1/BLAKE2b).
var StrongDigest bool = false var StrongDigest bool = false
// Set PlainDigest if digest errors should contain the whole calculated hash, to be paste directly into DIGESTS file.
var PlainDigest bool = false
// EFile represents a challenge file. // EFile represents a challenge file.
type EFile struct { type EFile struct {
Id int64 `json:"id"` Id int64 `json:"id"`
@ -117,6 +120,16 @@ func (e Exercice) GetFileByPath(path string) (EFile, error) {
return f, nil return f, nil
} }
// minifyHash returns only the begining and the end of the given hash.
// Use this function to ensure people doesn't fill their file with our calculated hash.
func minifyHash(hash string) string {
if PlainDigest {
return hash
} else {
return hash[0:len(hash)/3] + "..." + hash[len(hash)/2:]
}
}
// checkFileHash checks if the file at the given filePath has the given digest. // checkFileHash checks if the file at the given filePath has the given digest.
// It also returns the file's size. // It also returns the file's size.
func checkFileHash(filePath string, digest []byte) ([]byte, int64, error) { func checkFileHash(filePath string, digest []byte) ([]byte, int64, error) {
@ -142,20 +155,20 @@ func checkFileHash(filePath string, digest []byte) ([]byte, int64, error) {
if len(digest) != len(result512) { if len(digest) != len(result512) {
if len(digest) != len(result160) { if len(digest) != len(result160) {
return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: sha1:" + hex.EncodeToString(result160) + " & blake2b:" + hex.EncodeToString(result512) + " vs. given: " + hex.EncodeToString(digest)) return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
} else if StrongDigest { } else if StrongDigest {
return []byte{}, fi.Size(), errors.New("Invalid digests: SHA-1 checksums are no more accepted. Calculated sha1:" + hex.EncodeToString(result160) + " & blake2b:" + hex.EncodeToString(result512) + " vs. given: " + hex.EncodeToString(digest)) return []byte{}, fi.Size(), errors.New("Invalid digests: SHA-1 checksums are no more accepted. Calculated sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
} }
for k := range result160 { for k := range result160 {
if result160[k] != digest[k] { if result160[k] != digest[k] {
return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: sha1:" + hex.EncodeToString(result160) + " & blake2b:" + hex.EncodeToString(result512) + " vs. given: " + hex.EncodeToString(digest)) return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: sha1:" + minifyHash(hex.EncodeToString(result160)) + " & blake2b:" + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
} }
} }
} else { } else {
for k := range result512 { for k := range result512 {
if result512[k] != digest[k] { if result512[k] != digest[k] {
return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: " + hex.EncodeToString(result512) + " vs. given: " + hex.EncodeToString(digest)) return []byte{}, fi.Size(), errors.New("Digests doesn't match: calculated: " + minifyHash(hex.EncodeToString(result512)) + " vs. given: " + hex.EncodeToString(digest))
} }
} }
} }