|
|
@@ -1,34 +1,56 @@
|
|
|
-// Copyright 2013 The Go Authors. All rights reserved.
|
|
|
+// Copyright 2014 The Go Authors. All rights reserved.
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
package sha3
|
|
|
|
|
|
-// These tests are a subset of those provided by the Keccak web site(http://keccak.noekeon.org/).
|
|
|
+// Tests include all the ShortMsgKATs provided by the Keccak team at
|
|
|
+// https://github.com/gvanas/KeccakCodePackage
|
|
|
+//
|
|
|
+// They only include the zero-bit case of the utterly useless bitwise
|
|
|
+// testvectors published by NIST in the draft of FIPS-202.
|
|
|
|
|
|
import (
|
|
|
"bytes"
|
|
|
+ "compress/flate"
|
|
|
"encoding/hex"
|
|
|
- "fmt"
|
|
|
+ "encoding/json"
|
|
|
"hash"
|
|
|
+ "os"
|
|
|
"strings"
|
|
|
"testing"
|
|
|
)
|
|
|
|
|
|
-// testDigests maintains a digest state of each standard type.
|
|
|
-var testDigests = map[string]*digest{
|
|
|
- "Keccak224": {outputSize: 224 / 8, capacity: 2 * 224 / 8},
|
|
|
- "Keccak256": {outputSize: 256 / 8, capacity: 2 * 256 / 8},
|
|
|
- "Keccak384": {outputSize: 384 / 8, capacity: 2 * 384 / 8},
|
|
|
- "Keccak512": {outputSize: 512 / 8, capacity: 2 * 512 / 8},
|
|
|
+const (
|
|
|
+ testString = "brekeccakkeccak koax koax"
|
|
|
+ katFilename = "keccakKats.json.deflate"
|
|
|
+)
|
|
|
+
|
|
|
+// Internal-use instances of SHAKE used to test against KATs.
|
|
|
+func newHashShake128() hash.Hash {
|
|
|
+ return &state{rate: 168, dsbyte: 0x1f, outputLen: 512}
|
|
|
+}
|
|
|
+func newHashShake256() hash.Hash {
|
|
|
+ return &state{rate: 136, dsbyte: 0x1f, outputLen: 512}
|
|
|
}
|
|
|
|
|
|
-// testVector represents a test input and expected outputs from multiple algorithm variants.
|
|
|
-type testVector struct {
|
|
|
- desc string
|
|
|
- input []byte
|
|
|
- repeat int // input will be concatenated the input this many times.
|
|
|
- want map[string]string
|
|
|
+// testDigests contains functions returning hash.Hash instances
|
|
|
+// with output-length equal to the KAT length for both SHA-3 and
|
|
|
+// SHAKE instances.
|
|
|
+var testDigests = map[string]func() hash.Hash{
|
|
|
+ "SHA3-224": New224,
|
|
|
+ "SHA3-256": New256,
|
|
|
+ "SHA3-384": New384,
|
|
|
+ "SHA3-512": New512,
|
|
|
+ "SHAKE128": newHashShake128,
|
|
|
+ "SHAKE256": newHashShake256,
|
|
|
+}
|
|
|
+
|
|
|
+// testShakes contains functions returning ShakeHash instances for
|
|
|
+// testing the ShakeHash-specific interface.
|
|
|
+var testShakes = map[string]func() ShakeHash{
|
|
|
+ "SHAKE128": NewShake128,
|
|
|
+ "SHAKE256": NewShake256,
|
|
|
}
|
|
|
|
|
|
// decodeHex converts an hex-encoded string into a raw byte string.
|
|
|
@@ -40,102 +62,61 @@ func decodeHex(s string) []byte {
|
|
|
return b
|
|
|
}
|
|
|
|
|
|
-// shortTestVectors stores a series of short testVectors.
|
|
|
-// Inputs of 8, 248, and 264 bits from http://keccak.noekeon.org/ are included below.
|
|
|
-// The standard defines additional test inputs of all sizes between 0 and 2047 bits.
|
|
|
-// Because the current implementation can only handle an integral number of bytes,
|
|
|
-// most of the standard test inputs can't be used.
|
|
|
-var shortKeccakTestVectors = []testVector{
|
|
|
- {
|
|
|
- desc: "short-8b",
|
|
|
- input: decodeHex("CC"),
|
|
|
- repeat: 1,
|
|
|
- want: map[string]string{
|
|
|
- "Keccak224": "A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802",
|
|
|
- "Keccak256": "EEAD6DBFC7340A56CAEDC044696A168870549A6A7F6F56961E84A54BD9970B8A",
|
|
|
- "Keccak384": "1B84E62A46E5A201861754AF5DC95C4A1A69CAF4A796AE405680161E29572641F5FA1E8641D7958336EE7B11C58F73E9",
|
|
|
- "Keccak512": "8630C13CBD066EA74BBE7FE468FEC1DEE10EDC1254FB4C1B7C5FD69B646E44160B8CE01D05A0908CA790DFB080F4B513BC3B6225ECE7A810371441A5AC666EB9",
|
|
|
- },
|
|
|
- },
|
|
|
- {
|
|
|
- desc: "short-248b",
|
|
|
- input: decodeHex("84FB51B517DF6C5ACCB5D022F8F28DA09B10232D42320FFC32DBECC3835B29"),
|
|
|
- repeat: 1,
|
|
|
- want: map[string]string{
|
|
|
- "Keccak224": "81AF3A7A5BD4C1F948D6AF4B96F93C3B0CF9C0E7A6DA6FCD71EEC7F6",
|
|
|
- "Keccak256": "D477FB02CAAA95B3280EC8EE882C29D9E8A654B21EF178E0F97571BF9D4D3C1C",
|
|
|
- "Keccak384": "503DCAA4ADDA5A9420B2E436DD62D9AB2E0254295C2982EF67FCE40F117A2400AB492F7BD5D133C6EC2232268BC27B42",
|
|
|
- "Keccak512": "9D8098D8D6EDBBAA2BCFC6FB2F89C3EAC67FEC25CDFE75AA7BD570A648E8C8945FF2EC280F6DCF73386109155C5BBC444C707BB42EAB873F5F7476657B1BC1A8",
|
|
|
- },
|
|
|
- },
|
|
|
- {
|
|
|
- desc: "short-264b",
|
|
|
- input: decodeHex("DE8F1B3FAA4B7040ED4563C3B8E598253178E87E4D0DF75E4FF2F2DEDD5A0BE046"),
|
|
|
- repeat: 1,
|
|
|
- want: map[string]string{
|
|
|
- "Keccak224": "F217812E362EC64D4DC5EACFABC165184BFA456E5C32C2C7900253D0",
|
|
|
- "Keccak256": "E78C421E6213AFF8DE1F025759A4F2C943DB62BBDE359C8737E19B3776ED2DD2",
|
|
|
- "Keccak384": "CF38764973F1EC1C34B5433AE75A3AAD1AAEF6AB197850C56C8617BCD6A882F6666883AC17B2DCCDBAA647075D0972B5",
|
|
|
- "Keccak512": "9A7688E31AAF40C15575FC58C6B39267AAD3722E696E518A9945CF7F7C0FEA84CB3CB2E9F0384A6B5DC671ADE7FB4D2B27011173F3EEEAF17CB451CF26542031",
|
|
|
- },
|
|
|
- },
|
|
|
-}
|
|
|
-
|
|
|
-// longTestVectors stores longer testVectors (currently only one).
|
|
|
-// The computed test vector is 64 MiB long and is a truncated version of the
|
|
|
-// ExtremelyLongMsgKAT taken from http://keccak.noekeon.org/.
|
|
|
-var longKeccakTestVectors = []testVector{
|
|
|
- {
|
|
|
- desc: "long-64MiB",
|
|
|
- input: []byte("abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmno"),
|
|
|
- repeat: 1024 * 1024,
|
|
|
- want: map[string]string{
|
|
|
- "Keccak224": "50E35E40980FEEFF1EA490957B0E970257F75EA0D410EE0F0B8A7A58",
|
|
|
- "Keccak256": "5015A4935F0B51E091C6550A94DCD262C08998232CCAA22E7F0756DEAC0DC0D0",
|
|
|
- "Keccak384": "7907A8D0FAA7BC6A90FE14C6C958C956A0877E751455D8F13ACDB96F144B5896E716C06EC0CB56557A94EF5C3355F6F3",
|
|
|
- "Keccak512": "3EC327D6759F769DEB74E80CA70C831BC29CAB048A4BF4190E4A1DD5C6507CF2B4B58937FDE81D36014E7DFE1B1DD8B0F27CB7614F9A645FEC114F1DAAEFC056",
|
|
|
- },
|
|
|
- },
|
|
|
+// structs used to marshal JSON test-cases.
|
|
|
+type KeccakKats struct {
|
|
|
+ Kats map[string][]struct {
|
|
|
+ Digest string `json:"digest"`
|
|
|
+ Length int64 `json:"length"`
|
|
|
+ Message string `json:"message"`
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
-// TestKeccakVectors checks that correct output is produced for a set of known testVectors.
|
|
|
-func TestKeccakVectors(t *testing.T) {
|
|
|
- testCases := append([]testVector{}, shortKeccakTestVectors...)
|
|
|
- if !testing.Short() {
|
|
|
- testCases = append(testCases, longKeccakTestVectors...)
|
|
|
+// TestKeccakKats tests the SHA-3 and Shake implementations against all the
|
|
|
+// ShortMsgKATs from https://github.com/gvanas/KeccakCodePackage
|
|
|
+// (The testvectors are stored in keccakKats.json.deflate due to their length.)
|
|
|
+func TestKeccakKats(t *testing.T) {
|
|
|
+ // Read the KATs.
|
|
|
+ deflated, err := os.Open(katFilename)
|
|
|
+ if err != nil {
|
|
|
+ t.Errorf("Error opening %s: %s", katFilename, err)
|
|
|
+ }
|
|
|
+ file := flate.NewReader(deflated)
|
|
|
+ dec := json.NewDecoder(file)
|
|
|
+ var katSet KeccakKats
|
|
|
+ err = dec.Decode(&katSet)
|
|
|
+ if err != nil {
|
|
|
+ t.Errorf("%s", err)
|
|
|
}
|
|
|
- for _, tc := range testCases {
|
|
|
- for alg, want := range tc.want {
|
|
|
- d := testDigests[alg]
|
|
|
+
|
|
|
+ // Do the KATs.
|
|
|
+ for functionName, kats := range katSet.Kats {
|
|
|
+ d := testDigests[functionName]()
|
|
|
+ t.Logf("%s", functionName)
|
|
|
+ for _, kat := range kats {
|
|
|
d.Reset()
|
|
|
- for i := 0; i < tc.repeat; i++ {
|
|
|
- d.Write(tc.input)
|
|
|
+ in, err := hex.DecodeString(kat.Message)
|
|
|
+ if err != nil {
|
|
|
+ t.Errorf("%s", err)
|
|
|
}
|
|
|
+ d.Write(in[:kat.Length/8])
|
|
|
got := strings.ToUpper(hex.EncodeToString(d.Sum(nil)))
|
|
|
+ want := kat.Digest
|
|
|
if got != want {
|
|
|
- t.Errorf("%s, alg=%s\ngot %q, want %q", tc.desc, alg, got, want)
|
|
|
+ t.Errorf("function=%s, length=%d\nmessage:\n %s\ngot:\n %s\nwanted:\n %s",
|
|
|
+ functionName, kat.Length, kat.Message, got, want)
|
|
|
+ t.Logf("wanted %+v", kat)
|
|
|
+ t.FailNow()
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-// dumpState is a debugging function to pretty-print the internal state of the hash.
|
|
|
-func (d *digest) dumpState() {
|
|
|
- fmt.Printf("SHA3 hash, %d B output, %d B capacity (%d B rate)\n", d.outputSize, d.capacity, d.rate())
|
|
|
- fmt.Printf("Internal state after absorbing %d B:\n", d.absorbed)
|
|
|
-
|
|
|
- for x := 0; x < sliceSize; x++ {
|
|
|
- for y := 0; y < sliceSize; y++ {
|
|
|
- fmt.Printf("%v, ", d.a[x*sliceSize+y])
|
|
|
- }
|
|
|
- fmt.Println("")
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-// TestUnalignedWrite tests that writing data in an arbitrary pattern with small input buffers.
|
|
|
+// TestUnalignedWrite tests that writing data in an arbitrary pattern with
|
|
|
+// small input buffers.
|
|
|
func TestUnalignedWrite(t *testing.T) {
|
|
|
buf := sequentialBytes(0x10000)
|
|
|
- for alg, d := range testDigests {
|
|
|
+ for alg, df := range testDigests {
|
|
|
+ d := df()
|
|
|
d.Reset()
|
|
|
d.Write(buf)
|
|
|
want := d.Sum(nil)
|
|
|
@@ -145,7 +126,9 @@ func TestUnalignedWrite(t *testing.T) {
|
|
|
// Because 137 is prime this sequence should exercise all corner cases.
|
|
|
offsets := [17]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 1}
|
|
|
for _, j := range offsets {
|
|
|
- j = minInt(j, len(buf)-i)
|
|
|
+ if v := len(buf) - i; v < j {
|
|
|
+ j = v
|
|
|
+ }
|
|
|
d.Write(buf[i : i+j])
|
|
|
i += j
|
|
|
}
|
|
|
@@ -157,8 +140,9 @@ func TestUnalignedWrite(t *testing.T) {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+// Test that appending works when reallocation is necessary.
|
|
|
func TestAppend(t *testing.T) {
|
|
|
- d := NewKeccak224()
|
|
|
+ d := New224()
|
|
|
|
|
|
for capacity := 2; capacity < 64; capacity += 64 {
|
|
|
// The first time around the loop, Sum will have to reallocate.
|
|
|
@@ -167,24 +151,57 @@ func TestAppend(t *testing.T) {
|
|
|
d.Reset()
|
|
|
d.Write([]byte{0xcc})
|
|
|
buf = d.Sum(buf)
|
|
|
- expected := "0000A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802"
|
|
|
+ expected := "0000DF70ADC49B2E76EEE3A6931B93FA41841C3AF2CDF5B32A18B5478C39"
|
|
|
if got := strings.ToUpper(hex.EncodeToString(buf)); got != expected {
|
|
|
t.Errorf("got %s, want %s", got, expected)
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+// Test that appending works when no reallocation is necessary.
|
|
|
func TestAppendNoRealloc(t *testing.T) {
|
|
|
buf := make([]byte, 1, 200)
|
|
|
- d := NewKeccak224()
|
|
|
+ d := New224()
|
|
|
d.Write([]byte{0xcc})
|
|
|
buf = d.Sum(buf)
|
|
|
- expected := "00A9CAB59EB40A10B246290F2D6086E32E3689FAF1D26B470C899F2802"
|
|
|
+ expected := "00DF70ADC49B2E76EEE3A6931B93FA41841C3AF2CDF5B32A18B5478C39"
|
|
|
if got := strings.ToUpper(hex.EncodeToString(buf)); got != expected {
|
|
|
t.Errorf("got %s, want %s", got, expected)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+// TestSqueezing checks that squeezing the full output a single time produces
|
|
|
+// the same output as repeatedly squeezing the instance.
|
|
|
+func TestSqueezing(t *testing.T) {
|
|
|
+ for functionName, newShakeHash := range testShakes {
|
|
|
+ t.Logf("%s", functionName)
|
|
|
+ d0 := newShakeHash()
|
|
|
+ d0.Write([]byte(testString))
|
|
|
+ ref := make([]byte, 32)
|
|
|
+ d0.Read(ref)
|
|
|
+
|
|
|
+ d1 := newShakeHash()
|
|
|
+ d1.Write([]byte(testString))
|
|
|
+ var multiple []byte
|
|
|
+ for range ref {
|
|
|
+ one := make([]byte, 1)
|
|
|
+ d1.Read(one)
|
|
|
+ multiple = append(multiple, one...)
|
|
|
+ }
|
|
|
+ if !bytes.Equal(ref, multiple) {
|
|
|
+ t.Errorf("squeezing %d bytes one at a time failed", len(ref))
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+func TestReadSimulation(t *testing.T) {
|
|
|
+ d := NewShake256()
|
|
|
+ d.Write(nil)
|
|
|
+ dwr := make([]byte, 32)
|
|
|
+ d.Read(dwr)
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
// sequentialBytes produces a buffer of size consecutive bytes 0x00, 0x01, ..., used for testing.
|
|
|
func sequentialBytes(size int) []byte {
|
|
|
result := make([]byte, size)
|
|
|
@@ -194,77 +211,39 @@ func sequentialBytes(size int) []byte {
|
|
|
return result
|
|
|
}
|
|
|
|
|
|
-// benchmarkBlockWrite tests the speed of writing data and never calling the permutation function.
|
|
|
-func benchmarkBlockWrite(b *testing.B, d *digest) {
|
|
|
- b.StopTimer()
|
|
|
- d.Reset()
|
|
|
- // Write all but the last byte of a block, to ensure that the permutation is not called.
|
|
|
- data := sequentialBytes(d.rate() - 1)
|
|
|
- b.SetBytes(int64(len(data)))
|
|
|
- b.StartTimer()
|
|
|
- for i := 0; i < b.N; i++ {
|
|
|
- d.absorbed = 0 // Reset absorbed to avoid ever calling the permutation function
|
|
|
- d.Write(data)
|
|
|
- }
|
|
|
- b.StopTimer()
|
|
|
- d.Reset()
|
|
|
-}
|
|
|
-
|
|
|
-// BenchmarkPermutationFunction measures the speed of the permutation function with no input data.
|
|
|
+// BenchmarkPermutationFunction measures the speed of the permutation function
|
|
|
+// with no input data.
|
|
|
func BenchmarkPermutationFunction(b *testing.B) {
|
|
|
- b.SetBytes(int64(stateSize))
|
|
|
- var lanes [numLanes]uint64
|
|
|
+ b.SetBytes(int64(200))
|
|
|
+ var lanes [25]uint64
|
|
|
for i := 0; i < b.N; i++ {
|
|
|
- keccakF(&lanes)
|
|
|
+ keccakF1600(&lanes)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-// BenchmarkSingleByteWrite tests the latency from writing a single byte
|
|
|
-func BenchmarkSingleByteWrite(b *testing.B) {
|
|
|
- b.StopTimer()
|
|
|
- d := testDigests["Keccak512"]
|
|
|
- d.Reset()
|
|
|
- data := sequentialBytes(1) //1 byte buffer
|
|
|
- b.SetBytes(int64(d.rate()) - 1)
|
|
|
- b.StartTimer()
|
|
|
- for i := 0; i < b.N; i++ {
|
|
|
- d.absorbed = 0 // Reset absorbed to avoid ever calling the permutation function
|
|
|
-
|
|
|
- // Write all but the last byte of a block, one byte at a time.
|
|
|
- for j := 0; j < d.rate()-1; j++ {
|
|
|
- d.Write(data)
|
|
|
- }
|
|
|
- }
|
|
|
- b.StopTimer()
|
|
|
- d.Reset()
|
|
|
-}
|
|
|
-
|
|
|
-// BenchmarkSingleByteX measures the block write speed for each size of the digest.
|
|
|
-func BenchmarkBlockWrite512(b *testing.B) { benchmarkBlockWrite(b, testDigests["Keccak512"]) }
|
|
|
-func BenchmarkBlockWrite384(b *testing.B) { benchmarkBlockWrite(b, testDigests["Keccak384"]) }
|
|
|
-func BenchmarkBlockWrite256(b *testing.B) { benchmarkBlockWrite(b, testDigests["Keccak256"]) }
|
|
|
-func BenchmarkBlockWrite224(b *testing.B) { benchmarkBlockWrite(b, testDigests["Keccak224"]) }
|
|
|
-
|
|
|
-// benchmarkBulkHash tests the speed to hash a 16 KiB buffer.
|
|
|
-func benchmarkBulkHash(b *testing.B, h hash.Hash) {
|
|
|
+// benchmarkBulkHash tests the speed to hash a buffer of buflen.
|
|
|
+func benchmarkBulkHash(b *testing.B, h hash.Hash, size int) {
|
|
|
b.StopTimer()
|
|
|
h.Reset()
|
|
|
- size := 1 << 14
|
|
|
data := sequentialBytes(size)
|
|
|
b.SetBytes(int64(size))
|
|
|
b.StartTimer()
|
|
|
|
|
|
- var digest []byte
|
|
|
+ var state []byte
|
|
|
for i := 0; i < b.N; i++ {
|
|
|
h.Write(data)
|
|
|
- digest = h.Sum(digest[:0])
|
|
|
+ state = h.Sum(state[:0])
|
|
|
}
|
|
|
b.StopTimer()
|
|
|
h.Reset()
|
|
|
}
|
|
|
|
|
|
-// benchmarkBulkKeccakX test the speed to hash a 16 KiB buffer by calling benchmarkBulkHash.
|
|
|
-func BenchmarkBulkKeccak512(b *testing.B) { benchmarkBulkHash(b, NewKeccak512()) }
|
|
|
-func BenchmarkBulkKeccak384(b *testing.B) { benchmarkBulkHash(b, NewKeccak384()) }
|
|
|
-func BenchmarkBulkKeccak256(b *testing.B) { benchmarkBulkHash(b, NewKeccak256()) }
|
|
|
-func BenchmarkBulkKeccak224(b *testing.B) { benchmarkBulkHash(b, NewKeccak224()) }
|
|
|
+func BenchmarkSha3_512_MTU(b *testing.B) { benchmarkBulkHash(b, New512(), 1350) }
|
|
|
+func BenchmarkSha3_384_MTU(b *testing.B) { benchmarkBulkHash(b, New384(), 1350) }
|
|
|
+func BenchmarkSha3_256_MTU(b *testing.B) { benchmarkBulkHash(b, New256(), 1350) }
|
|
|
+func BenchmarkSha3_224_MTU(b *testing.B) { benchmarkBulkHash(b, New224(), 1350) }
|
|
|
+func BenchmarkShake256_MTU(b *testing.B) { benchmarkBulkHash(b, newHashShake256(), 1350) }
|
|
|
+func BenchmarkShake128_MTU(b *testing.B) { benchmarkBulkHash(b, newHashShake128(), 1350) }
|
|
|
+
|
|
|
+func BenchmarkSha3_512_1MiB(b *testing.B) { benchmarkBulkHash(b, New512(), 1<<20) }
|
|
|
+func BenchmarkShake256_1MiB(b *testing.B) { benchmarkBulkHash(b, newHashShake256(), 1<<20) }
|