263 lines
8.2 KiB
Nim
263 lines
8.2 KiB
Nim
## Integrity Manager Tests - Canonical Leaf Hashing & Parallel Processing
|
|
## Comprehensive test suite for the corrected Merkle tree implementation
|
|
|
|
import std/[unittest, os, strutils, times, tables]
|
|
import nip/integrity
|
|
|
|
suite "Canonical Leaf Hashing - Determinism":
|
|
setup:
|
|
let testDir = "/tmp/test_canonical"
|
|
createDir(testDir)
|
|
createDir(testDir / "lib")
|
|
createDir(testDir / "data")
|
|
|
|
# Create identical files in different directories
|
|
writeFile(testDir / "lib" / "foo.txt", "Identical content")
|
|
writeFile(testDir / "data" / "foo.txt", "Identical content")
|
|
writeFile(testDir / "unique.txt", "Unique content")
|
|
|
|
teardown:
|
|
removeDir("/tmp/test_canonical")
|
|
|
|
test "Identical content in different paths produces different canonical hashes":
|
|
# This is the CRITICAL test for path-aware hashing
|
|
let libHash = calculateCanonicalHash("lib/foo.txt", "content-hash-123")
|
|
let dataHash = calculateCanonicalHash("data/foo.txt", "content-hash-123")
|
|
|
|
check libHash != dataHash
|
|
echo " lib/foo.txt: " & libHash
|
|
echo " data/foo.txt: " & dataHash
|
|
|
|
test "Moving a file changes the Merkle root":
|
|
var cache1 = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
# Calculate Merkle root with original structure
|
|
let tree1 = buildMerkleTree(testDir, cache1)
|
|
let root1 = tree1.rootHash
|
|
|
|
# Move a file
|
|
moveFile(testDir / "unique.txt", testDir / "lib" / "unique.txt")
|
|
|
|
var cache2 = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
# Calculate Merkle root with new structure
|
|
let tree2 = buildMerkleTree(testDir, cache2)
|
|
let root2 = tree2.rootHash
|
|
|
|
check root1 != root2
|
|
echo " Before move: " & root1
|
|
echo " After move: " & root2
|
|
|
|
test "Canonical leaves are sorted by relative path":
|
|
var cache = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
let leaves = collectCanonicalLeaves(testDir, cache, parallel=false)
|
|
|
|
# Verify sorting
|
|
for i in 0..<(leaves.len - 1):
|
|
check leaves[i].relativePath < leaves[i+1].relativePath
|
|
|
|
echo " Sorted paths:"
|
|
for leaf in leaves:
|
|
echo " " & leaf.relativePath
|
|
|
|
suite "Parallel Processing":
|
|
setup:
|
|
let testDir = "/tmp/test_parallel"
|
|
createDir(testDir)
|
|
|
|
# Create many files to benefit from parallelization
|
|
for i in 1..50:
|
|
writeFile(testDir / ("file" & $i & ".txt"), "Content " & $i)
|
|
|
|
teardown:
|
|
removeDir("/tmp/test_parallel")
|
|
|
|
test "Parallel and sequential produce identical results":
|
|
var cache1 = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
var cache2 = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
# Sequential processing
|
|
let start1 = cpuTime()
|
|
let leaves1 = collectCanonicalLeaves(testDir, cache1, parallel=false)
|
|
let time1 = cpuTime() - start1
|
|
|
|
# Parallel processing
|
|
let start2 = cpuTime()
|
|
let leaves2 = collectCanonicalLeaves(testDir, cache2, parallel=true)
|
|
let time2 = cpuTime() - start2
|
|
|
|
# Results must be identical
|
|
check leaves1.len == leaves2.len
|
|
for i in 0..<leaves1.len:
|
|
check leaves1[i].relativePath == leaves2[i].relativePath
|
|
check leaves1[i].canonicalHash == leaves2[i].canonicalHash
|
|
|
|
echo " Sequential: " & time1.formatFloat(ffDecimal, 3) & "s"
|
|
echo " Parallel: " & time2.formatFloat(ffDecimal, 3) & "s"
|
|
echo " Speedup: " & (time1 / time2).formatFloat(ffDecimal, 2) & "x"
|
|
|
|
test "Parallel processing with cache hits":
|
|
var cache = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
# First run - populate cache
|
|
let leaves1 = collectCanonicalLeaves(testDir, cache, parallel=true)
|
|
|
|
# Second run - should use cache
|
|
let start = cpuTime()
|
|
let leaves2 = collectCanonicalLeaves(testDir, cache, parallel=true)
|
|
let duration = cpuTime() - start
|
|
|
|
# Results must be identical
|
|
check leaves1.len == leaves2.len
|
|
for i in 0..<leaves1.len:
|
|
check leaves1[i].canonicalHash == leaves2[i].canonicalHash
|
|
|
|
# Cache hit should be very fast
|
|
check duration < 0.1 # Should complete in < 100ms
|
|
echo " Cache hit duration: " & duration.formatFloat(ffDecimal, 3) & "s"
|
|
|
|
suite "Merkle Tree Construction":
|
|
setup:
|
|
let testDir = "/tmp/test_merkle_canonical"
|
|
createDir(testDir)
|
|
createDir(testDir / "subdir")
|
|
writeFile(testDir / "file1.txt", "Content 1")
|
|
writeFile(testDir / "file2.txt", "Content 2")
|
|
writeFile(testDir / "subdir" / "file3.txt", "Content 3")
|
|
|
|
teardown:
|
|
removeDir("/tmp/test_merkle_canonical")
|
|
|
|
test "Merkle tree is deterministic":
|
|
var cache1 = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
var cache2 = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
let tree1 = buildMerkleTree(testDir, cache1)
|
|
let tree2 = buildMerkleTree(testDir, cache2)
|
|
|
|
check tree1.rootHash == tree2.rootHash
|
|
check tree1.totalFiles == tree2.totalFiles
|
|
check tree1.totalSize == tree2.totalSize
|
|
|
|
test "Empty directory has deterministic hash":
|
|
let emptyDir = "/tmp/test_empty"
|
|
createDir(emptyDir)
|
|
defer: removeDir(emptyDir)
|
|
|
|
var cache = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
let tree = buildMerkleTree(emptyDir, cache)
|
|
check tree.totalFiles == 0
|
|
check tree.rootHash.len > 0
|
|
|
|
test "Single file tree":
|
|
let singleDir = "/tmp/test_single"
|
|
createDir(singleDir)
|
|
writeFile(singleDir / "only.txt", "Only file")
|
|
defer: removeDir(singleDir)
|
|
|
|
var cache = IntegrityCache(
|
|
fileHashes: initTable[string, string](),
|
|
dirHashes: initTable[string, string](),
|
|
lastModified: initTable[string, int64]()
|
|
)
|
|
|
|
let tree = buildMerkleTree(singleDir, cache)
|
|
check tree.totalFiles == 1
|
|
check tree.rootHash.len > 0
|
|
|
|
suite "Content Verification":
|
|
setup:
|
|
let testDir = "/tmp/test_verify_canonical"
|
|
createDir(testDir)
|
|
writeFile(testDir / "test.txt", "Test content")
|
|
|
|
teardown:
|
|
removeDir("/tmp/test_verify_canonical")
|
|
|
|
test "Successful verification":
|
|
var manager = newIntegrityManager("/tmp")
|
|
|
|
# Calculate expected hash
|
|
let tree = buildMerkleTree(testDir, manager.cache)
|
|
let expectedHash = tree.rootHash
|
|
|
|
# Verify
|
|
let result = verifyContent(testDir, expectedHash, manager)
|
|
|
|
check result.success
|
|
check result.actualHash == expectedHash
|
|
check result.errors.len == 0
|
|
check result.verifiedFiles == 1
|
|
|
|
test "Failed verification - hash mismatch":
|
|
var manager = newIntegrityManager("/tmp")
|
|
|
|
let wrongHash = "blake2b-0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
|
|
|
|
let result = verifyContent(testDir, wrongHash, manager)
|
|
|
|
check not result.success
|
|
check result.actualHash != wrongHash
|
|
check result.errors.len > 0
|
|
check result.errors[0].errorType == HashMismatch
|
|
|
|
suite "Build Hash Calculation":
|
|
test "calculateBuildHash convenience function":
|
|
let testDir = "/tmp/test_build_hash"
|
|
createDir(testDir)
|
|
writeFile(testDir / "main.nim", "echo \"Hello\"")
|
|
writeFile(testDir / "config.nims", "switch(\"opt\", \"size\")")
|
|
defer: removeDir(testDir)
|
|
|
|
let buildHash = calculateBuildHash(testDir)
|
|
|
|
check buildHash.len > 0
|
|
check buildHash.startsWith("blake2b-")
|
|
|
|
# Should be deterministic
|
|
let buildHash2 = calculateBuildHash(testDir)
|
|
check buildHash == buildHash2
|
|
|
|
when isMainModule:
|
|
echo "Running canonical leaf hashing and parallel processing tests..."
|