initial addition of essential crypto, encoders, workflows and LLM instructions
This commit is contained in:
17
pkg/crypto/ec/LICENSE
Normal file
17
pkg/crypto/ec/LICENSE
Normal file
@@ -0,0 +1,17 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2013-2017 The btcsuite developers
|
||||
Copyright (c) 2015-2020 The Decred developers
|
||||
Copyright (c) 2017 The Lightning Network Developers
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
39
pkg/crypto/ec/README.md
Normal file
39
pkg/crypto/ec/README.md
Normal file
@@ -0,0 +1,39 @@
|
||||
realy.lol/pkg/ec
|
||||
=====
|
||||
|
||||
This is a full drop-in replacement for
|
||||
[github.com/btcsuite/btcd/btcec](https://github.com/btcsuite/btcd/tree/master/btcec)
|
||||
eliminating the import from the Decred repository, and including the chainhash
|
||||
helper functions, needed for hashing messages for signatures.
|
||||
|
||||
The decred specific tests also have been removed, as well as all tests that use
|
||||
blake256 hashes as these are irrelevant to bitcoin and nostr. Some of them
|
||||
remain present, commented out, in case it is worth regenerating the vectors
|
||||
based on sha256 hashes, but on first blush it seems unlikely to be any benefit.
|
||||
|
||||
This includes the old style compact secp256k1 ECDSA signatures, that recover the
|
||||
public key rather than take a key as a parameter as used in Bitcoin
|
||||
transactions, the new style Schnorr signatures, and the Musig2 implementation.
|
||||
|
||||
BIP 340 Schnorr signatures are implemented including the variable length
|
||||
message signing with the extra test vectors present and passing.
|
||||
|
||||
The remainder of this document is from the original README.md.
|
||||
|
||||
------------------------------------------------------------------------------
|
||||
|
||||
Package `ec` implements elliptic curve cryptography needed for working with
|
||||
Bitcoin. It is designed so that it may be used with the standard
|
||||
crypto/ecdsa packages provided with Go.
|
||||
|
||||
A comprehensive suite of test is provided to ensure proper functionality.
|
||||
|
||||
Package btcec was originally based on work from ThePiachu which is licensed
|
||||
underthe same terms as Go, but it has signficantly diverged since then. The
|
||||
btcsuite developers original is licensed under the liberal ISC license.
|
||||
|
||||
## Installation and Updating
|
||||
|
||||
```bash
|
||||
$ go get mleku.dev/pkg/ec@latest
|
||||
```
|
||||
14
pkg/crypto/ec/base58/LICENSE
Normal file
14
pkg/crypto/ec/base58/LICENSE
Normal file
@@ -0,0 +1,14 @@
|
||||
Copyright © 2004-2011 []byte Internet Systems Consortium, Inc. ("ISC")
|
||||
Copyright © 1995-2003 []byte Internet Software Consortium
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH REGARD
|
||||
TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
|
||||
ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
|
||||
SOFTWARE.
|
||||
12
pkg/crypto/ec/base58/README.adoc
Normal file
12
pkg/crypto/ec/base58/README.adoc
Normal file
@@ -0,0 +1,12 @@
|
||||
= base58
|
||||
|
||||
image:http://img.shields.io/badge/license-ISC-blue.svg[ISC License,link=http://copyfree.org]
|
||||
|
||||
Package base58 provides an API for encoding and decoding to and from the modified base58 encoding.
|
||||
It also provides an API to do Base58Check encoding, as described https://en.bitcoin.it/wiki/Base58Check_encoding[here].
|
||||
|
||||
A comprehensive suite of tests is provided to ensure proper functionality.
|
||||
|
||||
== License
|
||||
|
||||
Package base58 is licensed under the http://copyfree.org[copyfree] ISC License.
|
||||
49
pkg/crypto/ec/base58/alphabet.go
Normal file
49
pkg/crypto/ec/base58/alphabet.go
Normal file
@@ -0,0 +1,49 @@
|
||||
// Copyright (c) 2015 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// AUTOGENERATED by genalphabet.go; do not edit.
|
||||
|
||||
package base58
|
||||
|
||||
const (
|
||||
// Ciphers is the modified base58 Ciphers used by Bitcoin.
|
||||
Ciphers = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
|
||||
|
||||
alphabetIdx0 = '1'
|
||||
)
|
||||
|
||||
var b58 = [256]byte{
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 0, 1, 2, 3, 4, 5, 6,
|
||||
7, 8, 255, 255, 255, 255, 255, 255,
|
||||
255, 9, 10, 11, 12, 13, 14, 15,
|
||||
16, 255, 17, 18, 19, 20, 21, 255,
|
||||
22, 23, 24, 25, 26, 27, 28, 29,
|
||||
30, 31, 32, 255, 255, 255, 255, 255,
|
||||
255, 33, 34, 35, 36, 37, 38, 39,
|
||||
40, 41, 42, 43, 255, 44, 45, 46,
|
||||
47, 48, 49, 50, 51, 52, 53, 54,
|
||||
55, 56, 57, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
255, 255, 255, 255, 255, 255, 255, 255,
|
||||
}
|
||||
142
pkg/crypto/ec/base58/base58.go
Normal file
142
pkg/crypto/ec/base58/base58.go
Normal file
@@ -0,0 +1,142 @@
|
||||
// Copyright (c) 2013-2015 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package base58
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
)
|
||||
|
||||
//go:generate go run genalphabet.go
|
||||
|
||||
var bigRadix = [...]*big.Int{
|
||||
big.NewInt(0),
|
||||
big.NewInt(58),
|
||||
big.NewInt(58 * 58),
|
||||
big.NewInt(58 * 58 * 58),
|
||||
big.NewInt(58 * 58 * 58 * 58),
|
||||
big.NewInt(58 * 58 * 58 * 58 * 58),
|
||||
big.NewInt(58 * 58 * 58 * 58 * 58 * 58),
|
||||
big.NewInt(58 * 58 * 58 * 58 * 58 * 58 * 58),
|
||||
big.NewInt(58 * 58 * 58 * 58 * 58 * 58 * 58 * 58),
|
||||
big.NewInt(58 * 58 * 58 * 58 * 58 * 58 * 58 * 58 * 58),
|
||||
bigRadix10,
|
||||
}
|
||||
|
||||
var bigRadix10 = big.NewInt(58 * 58 * 58 * 58 * 58 * 58 * 58 * 58 * 58 * 58) // 58^10
|
||||
|
||||
// Decode decodes a modified base58 string to a byte slice.
|
||||
func Decode(b string) []byte {
|
||||
answer := big.NewInt(0)
|
||||
scratch := new(big.Int)
|
||||
|
||||
// Calculating with big.Int is slow for each iteration.
|
||||
// x += b58[b[i]] * j
|
||||
// j *= 58
|
||||
//
|
||||
// Instead we can try to do as much calculations on int64.
|
||||
// We can represent a 10 digit base58 number using an int64.
|
||||
//
|
||||
// Hence we'll try to convert 10, base58 digits at a time.
|
||||
// The rough idea is to calculate `t`, such that:
|
||||
//
|
||||
// t := b58[b[i+9]] * 58^9 ... + b58[b[i+1]] * 58^1 + b58[b[i]] * 58^0
|
||||
// x *= 58^10
|
||||
// x += t
|
||||
//
|
||||
// Of course, in addition, we'll need to handle boundary condition when `b` is not multiple of 58^10.
|
||||
// In that case we'll use the bigRadix[n] lookup for the appropriate power.
|
||||
for t := b; len(t) > 0; {
|
||||
n := len(t)
|
||||
if n > 10 {
|
||||
n = 10
|
||||
}
|
||||
|
||||
total := uint64(0)
|
||||
for _, v := range t[:n] {
|
||||
if v > 255 {
|
||||
return []byte("")
|
||||
}
|
||||
|
||||
tmp := b58[v]
|
||||
if tmp == 255 {
|
||||
return []byte("")
|
||||
}
|
||||
total = total*58 + uint64(tmp)
|
||||
}
|
||||
|
||||
answer.Mul(answer, bigRadix[n])
|
||||
scratch.SetUint64(total)
|
||||
answer.Add(answer, scratch)
|
||||
|
||||
t = t[n:]
|
||||
}
|
||||
|
||||
tmpval := answer.Bytes()
|
||||
|
||||
var numZeros int
|
||||
for numZeros = 0; numZeros < len(b); numZeros++ {
|
||||
if b[numZeros] != alphabetIdx0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
flen := numZeros + len(tmpval)
|
||||
val := make([]byte, flen)
|
||||
copy(val[numZeros:], tmpval)
|
||||
|
||||
return val
|
||||
}
|
||||
|
||||
// Encode encodes a byte slice to a modified base58 string.
|
||||
func Encode(b []byte) string {
|
||||
x := new(big.Int)
|
||||
x.SetBytes(b)
|
||||
|
||||
// maximum length of output is log58(2^(8*len(b))) == len(b) * 8 / log(58)
|
||||
maxlen := int(float64(len(b))*1.365658237309761) + 1
|
||||
answer := make([]byte, 0, maxlen)
|
||||
mod := new(big.Int)
|
||||
for x.Sign() > 0 {
|
||||
// Calculating with big.Int is slow for each iteration.
|
||||
// x, mod = x / 58, x % 58
|
||||
//
|
||||
// Instead we can try to do as much calculations on int64.
|
||||
// x, mod = x / 58^10, x % 58^10
|
||||
//
|
||||
// Which will give us mod, which is 10 digit base58 number.
|
||||
// We'll loop that 10 times to convert to the answer.
|
||||
|
||||
x.DivMod(x, bigRadix10, mod)
|
||||
if x.Sign() == 0 {
|
||||
// When x = 0, we need to ensure we don't add any extra zeros.
|
||||
m := mod.Int64()
|
||||
for m > 0 {
|
||||
answer = append(answer, Ciphers[m%58])
|
||||
m /= 58
|
||||
}
|
||||
} else {
|
||||
m := mod.Int64()
|
||||
for i := 0; i < 10; i++ {
|
||||
answer = append(answer, Ciphers[m%58])
|
||||
m /= 58
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// leading zero bytes
|
||||
for _, i := range b {
|
||||
if i != 0 {
|
||||
break
|
||||
}
|
||||
answer = append(answer, alphabetIdx0)
|
||||
}
|
||||
|
||||
// reverse
|
||||
alen := len(answer)
|
||||
for i := 0; i < alen/2; i++ {
|
||||
answer[i], answer[alen-1-i] = answer[alen-1-i], answer[i]
|
||||
}
|
||||
|
||||
return string(answer)
|
||||
}
|
||||
124
pkg/crypto/ec/base58/base58_test.go
Normal file
124
pkg/crypto/ec/base58/base58_test.go
Normal file
@@ -0,0 +1,124 @@
|
||||
// Copyright (c) 2013-2017 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package base58_test
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/base58"
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
var stringTests = []struct {
|
||||
in string
|
||||
out string
|
||||
}{
|
||||
{"", ""},
|
||||
{" ", "Z"},
|
||||
{"-", "n"},
|
||||
{"0", "q"},
|
||||
{"1", "r"},
|
||||
{"-1", "4SU"},
|
||||
{"11", "4k8"},
|
||||
{"abc", "ZiCa"},
|
||||
{"1234598760", "3mJr7AoUXx2Wqd"},
|
||||
{"abcdefghijklmnopqrstuvwxyz", "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f"},
|
||||
{
|
||||
"00000000000000000000000000000000000000000000000000000000000000",
|
||||
"3sN2THZeE9Eh9eYrwkvZqNstbHGvrxSAM7gXUXvyFQP8XvQLUqNCS27icwUeDT7ckHm4FUHM2mTVh1vbLmk7y",
|
||||
},
|
||||
}
|
||||
|
||||
var invalidStringTests = []struct {
|
||||
in string
|
||||
out string
|
||||
}{
|
||||
{"0", ""},
|
||||
{"O", ""},
|
||||
{"I", ""},
|
||||
{"l", ""},
|
||||
{"3mJr0", ""},
|
||||
{"O3yxU", ""},
|
||||
{"3sNI", ""},
|
||||
{"4kl8", ""},
|
||||
{"0OIl", ""},
|
||||
{"!@#$%^&*()-_=+~`", ""},
|
||||
{"abcd\xd80", ""},
|
||||
{"abcd\U000020BF", ""},
|
||||
}
|
||||
|
||||
var hexTests = []struct {
|
||||
in string
|
||||
out string
|
||||
}{
|
||||
{"", ""},
|
||||
{"61", "2g"},
|
||||
{"626262", "a3gV"},
|
||||
{"636363", "aPEr"},
|
||||
{
|
||||
"73696d706c792061206c6f6e6720737472696e67",
|
||||
"2cFupjhnEsSn59qHXstmK2ffpLv2",
|
||||
},
|
||||
{
|
||||
"00eb15231dfceb60925886b67d065299925915aeb172c06647",
|
||||
"1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L",
|
||||
},
|
||||
{"516b6fcd0f", "ABnLTmg"},
|
||||
{"bf4f89001e670274dd", "3SEo3LWLoPntC"},
|
||||
{"572e4794", "3EFU7m"},
|
||||
{"ecac89cad93923c02321", "EJDM8drfXA6uyA"},
|
||||
{"10c8511e", "Rt5zm"},
|
||||
{"00000000000000000000", "1111111111"},
|
||||
{
|
||||
"000111d38e5fc9071ffcd20b4a763cc9ae4f252bb4e48fd66a835e252ada93ff480d6dd43dc62a641155a5",
|
||||
"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz",
|
||||
},
|
||||
{
|
||||
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
|
||||
"1cWB5HCBdLjAuqGGReWE3R3CguuwSjw6RHn39s2yuDRTS5NsBgNiFpWgAnEx6VQi8csexkgYw3mdYrMHr8x9i7aEwP8kZ7vccXWqKDvGv3u1GxFKPuAkn8JCPPGDMf3vMMnbzm6Nh9zh1gcNsMvH3ZNLmP5fSG6DGbbi2tuwMWPthr4boWwCxf7ewSgNQeacyozhKDDQQ1qL5fQFUW52QKUZDZ5fw3KXNQJMcNTcaB723LchjeKun7MuGW5qyCBZYzA1KjofN1gYBV3NqyhQJ3Ns746GNuf9N2pQPmHz4xpnSrrfCvy6TVVz5d4PdrjeshsWQwpZsZGzvbdAdN8MKV5QsBDY",
|
||||
},
|
||||
}
|
||||
|
||||
func TestBase58(t *testing.T) {
|
||||
// Encode tests
|
||||
for x, test := range stringTests {
|
||||
tmp := []byte(test.in)
|
||||
if res := base58.Encode(tmp); res != test.out {
|
||||
t.Errorf(
|
||||
"Encode test #%d failed: got: %s want: %s",
|
||||
x, res, test.out,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Decode tests
|
||||
for x, test := range hexTests {
|
||||
b, err := hex.DecodeString(test.in)
|
||||
if err != nil {
|
||||
t.Errorf("hex.DecodeString failed failed #%d: got: %s", x, test.in)
|
||||
continue
|
||||
}
|
||||
if res := base58.Decode(test.out); !utils.FastEqual(res, b) {
|
||||
t.Errorf(
|
||||
"Decode test #%d failed: got: %q want: %q",
|
||||
x, res, test.in,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Decode with invalid input
|
||||
for x, test := range invalidStringTests {
|
||||
if res := base58.Decode(test.in); string(res) != test.out {
|
||||
t.Errorf(
|
||||
"Decode invalidString test #%d failed: got: %q want: %q",
|
||||
x, res, test.out,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
47
pkg/crypto/ec/base58/base58bench_test.go
Normal file
47
pkg/crypto/ec/base58/base58bench_test.go
Normal file
@@ -0,0 +1,47 @@
|
||||
// Copyright (c) 2013-2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package base58_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/base58"
|
||||
)
|
||||
|
||||
var (
|
||||
raw5k = bytes.Repeat([]byte{0xff}, 5000)
|
||||
raw100k = bytes.Repeat([]byte{0xff}, 100*1000)
|
||||
encoded5k = base58.Encode(raw5k)
|
||||
encoded100k = base58.Encode(raw100k)
|
||||
)
|
||||
|
||||
func BenchmarkBase58Encode_5K(b *testing.B) {
|
||||
b.SetBytes(int64(len(raw5k)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
base58.Encode(raw5k)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkBase58Encode_100K(b *testing.B) {
|
||||
b.SetBytes(int64(len(raw100k)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
base58.Encode(raw100k)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkBase58Decode_5K(b *testing.B) {
|
||||
b.SetBytes(int64(len(encoded5k)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
base58.Decode(encoded5k)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkBase58Decode_100K(b *testing.B) {
|
||||
b.SetBytes(int64(len(encoded100k)))
|
||||
for i := 0; i < b.N; i++ {
|
||||
base58.Decode(encoded100k)
|
||||
}
|
||||
}
|
||||
53
pkg/crypto/ec/base58/base58check.go
Normal file
53
pkg/crypto/ec/base58/base58check.go
Normal file
@@ -0,0 +1,53 @@
|
||||
// Copyright (c) 2013-2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package base58
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
)
|
||||
|
||||
// ErrChecksum indicates that the checksum of a check-encoded string does not verify against
|
||||
// the checksum.
|
||||
var ErrChecksum = errors.New("checksum error")
|
||||
|
||||
// ErrInvalidFormat indicates that the check-encoded string has an invalid format.
|
||||
var ErrInvalidFormat = errors.New("invalid format: version and/or checksum bytes missing")
|
||||
|
||||
// checksum: first four bytes of sha256^2
|
||||
func checksum(input []byte) (cksum [4]byte) {
|
||||
h := sha256.Sum256(input)
|
||||
h2 := sha256.Sum256(h[:])
|
||||
copy(cksum[:], h2[:4])
|
||||
return
|
||||
}
|
||||
|
||||
// CheckEncode prepends a version byte and appends a four byte checksum.
|
||||
func CheckEncode(input []byte, version byte) string {
|
||||
b := make([]byte, 0, 1+len(input)+4)
|
||||
b = append(b, version)
|
||||
b = append(b, input...)
|
||||
cksum := checksum(b)
|
||||
b = append(b, cksum[:]...)
|
||||
return Encode(b)
|
||||
}
|
||||
|
||||
// CheckDecode decodes a string that was encoded with CheckEncode and verifies the checksum.
|
||||
func CheckDecode(input string) (result []byte, version byte, err error) {
|
||||
decoded := Decode(input)
|
||||
if len(decoded) < 5 {
|
||||
return nil, 0, ErrInvalidFormat
|
||||
}
|
||||
version = decoded[0]
|
||||
var cksum [4]byte
|
||||
copy(cksum[:], decoded[len(decoded)-4:])
|
||||
if checksum(decoded[:len(decoded)-4]) != cksum {
|
||||
return nil, 0, ErrChecksum
|
||||
}
|
||||
payload := decoded[1 : len(decoded)-4]
|
||||
result = append(result, payload...)
|
||||
return
|
||||
}
|
||||
87
pkg/crypto/ec/base58/base58check_test.go
Normal file
87
pkg/crypto/ec/base58/base58check_test.go
Normal file
@@ -0,0 +1,87 @@
|
||||
// Copyright (c) 2013-2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package base58_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/base58"
|
||||
)
|
||||
|
||||
var checkEncodingStringTests = []struct {
|
||||
version byte
|
||||
in string
|
||||
out string
|
||||
}{
|
||||
{20, "", "3MNQE1X"},
|
||||
{20, " ", "B2Kr6dBE"},
|
||||
{20, "-", "B3jv1Aft"},
|
||||
{20, "0", "B482yuaX"},
|
||||
{20, "1", "B4CmeGAC"},
|
||||
{20, "-1", "mM7eUf6kB"},
|
||||
{20, "11", "mP7BMTDVH"},
|
||||
{20, "abc", "4QiVtDjUdeq"},
|
||||
{20, "1234598760", "ZmNb8uQn5zvnUohNCEPP"},
|
||||
{
|
||||
20, "abcdefghijklmnopqrstuvwxyz",
|
||||
"K2RYDcKfupxwXdWhSAxQPCeiULntKm63UXyx5MvEH2",
|
||||
},
|
||||
{
|
||||
20, "00000000000000000000000000000000000000000000000000000000000000",
|
||||
"bi1EWXwJay2udZVxLJozuTb8Meg4W9c6xnmJaRDjg6pri5MBAxb9XwrpQXbtnqEoRV5U2pixnFfwyXC8tRAVC8XxnjK",
|
||||
},
|
||||
}
|
||||
|
||||
func TestBase58Check(t *testing.T) {
|
||||
for x, test := range checkEncodingStringTests {
|
||||
// test encoding
|
||||
if res := base58.CheckEncode(
|
||||
[]byte(test.in),
|
||||
test.version,
|
||||
); res != test.out {
|
||||
t.Errorf(
|
||||
"CheckEncode test #%d failed: got %s, want: %s", x, res,
|
||||
test.out,
|
||||
)
|
||||
}
|
||||
|
||||
// test decoding
|
||||
res, version, err := base58.CheckDecode(test.out)
|
||||
switch {
|
||||
case err != nil:
|
||||
t.Errorf("CheckDecode test #%d failed with err: %v", x, err)
|
||||
|
||||
case version != test.version:
|
||||
t.Errorf(
|
||||
"CheckDecode test #%d failed: got version: %d want: %d", x,
|
||||
version, test.version,
|
||||
)
|
||||
|
||||
case string(res) != test.in:
|
||||
t.Errorf(
|
||||
"CheckDecode test #%d failed: got: %s want: %s", x, res,
|
||||
test.in,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// test the two decoding failure cases
|
||||
// case 1: checksum error
|
||||
_, _, err := base58.CheckDecode("3MNQE1Y")
|
||||
if err != base58.ErrChecksum {
|
||||
t.Error("Checkdecode test failed, expected ErrChecksum")
|
||||
}
|
||||
// case 2: invalid formats (string lengths below 5 mean the version byte and/or the checksum
|
||||
// bytes are missing).
|
||||
testString := ""
|
||||
for len := 0; len < 4; len++ {
|
||||
testString += "x"
|
||||
_, _, err = base58.CheckDecode(testString)
|
||||
if err != base58.ErrInvalidFormat {
|
||||
t.Error("Checkdecode test failed, expected ErrInvalidFormat")
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
17
pkg/crypto/ec/base58/cov_report.sh
Normal file
17
pkg/crypto/ec/base58/cov_report.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script uses gocov to generate a test coverage report.
|
||||
# The gocov tool my be obtained with the following command:
|
||||
# go get github.com/axw/gocov/gocov
|
||||
#
|
||||
# It will be installed to $GOPATH/bin, so ensure that location is in your $PATH.
|
||||
|
||||
# Check for gocov.
|
||||
type gocov >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo >&2 "This script requires the gocov tool."
|
||||
echo >&2 "You may obtain it with the following command:"
|
||||
echo >&2 "go get github.com/axw/gocov/gocov"
|
||||
exit 1
|
||||
fi
|
||||
gocov test | gocov report
|
||||
29
pkg/crypto/ec/base58/doc.go
Normal file
29
pkg/crypto/ec/base58/doc.go
Normal file
@@ -0,0 +1,29 @@
|
||||
// Copyright (c) 2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package base58 provides an API for working with modified base58 and Base58Check
|
||||
encodings.
|
||||
|
||||
# Modified Base58 Encoding
|
||||
|
||||
Standard base58 encoding is similar to standard base64 encoding except, as the
|
||||
name implies, it uses a 58 character Ciphers which results in an alphanumeric
|
||||
string and allows some characters which are problematic for humans to be
|
||||
excluded. Due to this, there can be various base58 alphabets.
|
||||
|
||||
The modified base58 Ciphers used by Bitcoin, and hence this package, omits the
|
||||
0, O, I, and l characters that look the same in many fonts and are therefore
|
||||
hard to humans to distinguish.
|
||||
|
||||
# Base58Check Encoding Scheme
|
||||
|
||||
The Base58Check encoding scheme is primarily used for Bitcoin addresses at the
|
||||
time of this writing, however it can be used to generically encode arbitrary
|
||||
byte arrays into human-readable strings along with a version byte that can be
|
||||
used to differentiate the same payload. For Bitcoin addresses, the extra
|
||||
version is used to differentiate the network of otherwise identical public keys
|
||||
which helps prevent using an address intended for one network on another.
|
||||
*/
|
||||
package base58
|
||||
71
pkg/crypto/ec/base58/example_test.go
Normal file
71
pkg/crypto/ec/base58/example_test.go
Normal file
@@ -0,0 +1,71 @@
|
||||
// Copyright (c) 2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package base58_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/base58"
|
||||
)
|
||||
|
||||
// This example demonstrates how to decode modified base58 encoded data.
|
||||
func ExampleDecode() {
|
||||
// Decode example modified base58 encoded data.
|
||||
encoded := "25JnwSn7XKfNQ"
|
||||
decoded := base58.Decode(encoded)
|
||||
|
||||
// Show the decoded data.
|
||||
fmt.Println("Decoded Data:", string(decoded))
|
||||
|
||||
// Output:
|
||||
// Decoded Data: Test data
|
||||
}
|
||||
|
||||
// This example demonstrates how to encode data using the modified base58
|
||||
// encoding scheme.
|
||||
func ExampleEncode() {
|
||||
// Encode example data with the modified base58 encoding scheme.
|
||||
data := []byte("Test data")
|
||||
encoded := base58.Encode(data)
|
||||
|
||||
// Show the encoded data.
|
||||
fmt.Println("Encoded Data:", encoded)
|
||||
|
||||
// Output:
|
||||
// Encoded Data: 25JnwSn7XKfNQ
|
||||
}
|
||||
|
||||
// This example demonstrates how to decode Base58Check encoded data.
|
||||
func ExampleCheckDecode() {
|
||||
// Decode an example Base58Check encoded data.
|
||||
encoded := "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa"
|
||||
decoded, version, err := base58.CheckDecode(encoded)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
// Show the decoded data.
|
||||
fmt.Printf("Decoded data: %x\n", decoded)
|
||||
fmt.Println("Version Byte:", version)
|
||||
|
||||
// Output:
|
||||
// Decoded data: 62e907b15cbf27d5425399ebf6f0fb50ebb88f18
|
||||
// Version Byte: 0
|
||||
}
|
||||
|
||||
// This example demonstrates how to encode data using the Base58Check encoding
|
||||
// scheme.
|
||||
func ExampleCheckEncode() {
|
||||
// Encode example data with the Base58Check encoding scheme.
|
||||
data := []byte("Test data")
|
||||
encoded := base58.CheckEncode(data, 0)
|
||||
|
||||
// Show the encoded data.
|
||||
fmt.Println("Encoded Data:", encoded)
|
||||
|
||||
// Output:
|
||||
// Encoded Data: 182iP79GRURMp7oMHDU
|
||||
}
|
||||
77
pkg/crypto/ec/base58/gen/genalphabet.go
Normal file
77
pkg/crypto/ec/base58/gen/genalphabet.go
Normal file
@@ -0,0 +1,77 @@
|
||||
// Copyright (c) 2015 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var (
|
||||
start = []byte(`// Copyright (c) 2015 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// AUTOGENERATED by genalphabet.go; do not edit.
|
||||
|
||||
package base58
|
||||
|
||||
const (
|
||||
// Ciphers is the modified base58 alphabet used by Bitcoin.
|
||||
Ciphers = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
|
||||
|
||||
alphabetIdx0 = '1'
|
||||
)
|
||||
|
||||
var b58 = [256]byte{`)
|
||||
|
||||
end = []byte(`}`)
|
||||
|
||||
alphabet = []byte("123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz")
|
||||
tab = []byte("\t")
|
||||
invalid = []byte("255")
|
||||
comma = []byte(",")
|
||||
space = []byte(" ")
|
||||
nl = []byte("\n")
|
||||
)
|
||||
|
||||
func write(w io.Writer, b []byte) {
|
||||
_, err := w.Write(b)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
fi, err := os.Create("alphabet.go")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer fi.Close()
|
||||
|
||||
write(fi, start)
|
||||
write(fi, nl)
|
||||
for i := byte(0); i < 32; i++ {
|
||||
write(fi, tab)
|
||||
for j := byte(0); j < 8; j++ {
|
||||
idx := bytes.IndexByte(alphabet, i*8+j)
|
||||
if idx == -1 {
|
||||
write(fi, invalid)
|
||||
} else {
|
||||
write(fi, strconv.AppendInt(nil, int64(idx), 10))
|
||||
}
|
||||
write(fi, comma)
|
||||
if j != 7 {
|
||||
write(fi, space)
|
||||
}
|
||||
}
|
||||
write(fi, nl)
|
||||
}
|
||||
write(fi, end)
|
||||
write(fi, nl)
|
||||
}
|
||||
27
pkg/crypto/ec/bech32/README.adoc
Normal file
27
pkg/crypto/ec/bech32/README.adoc
Normal file
@@ -0,0 +1,27 @@
|
||||
= bech32
|
||||
|
||||
image:http://img.shields.io/badge/license-ISC-blue.svg[ISC License,link=http://copyfree.org]
|
||||
image:https://godoc.org/realy.lol/pkg/ec/bech32?status.png[GoDoc,link=http://godoc.org/realy.lol/pkg/ec/bech32]
|
||||
|
||||
Package bech32 provides a Go implementation of the bech32 format specified in
|
||||
https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki[BIP 173].
|
||||
|
||||
Test vectors from BIP 173 are added to ensure compatibility with the BIP.
|
||||
|
||||
== Installation and Updating
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ go get -u mleku.dev/pkg/ec/bech32
|
||||
----
|
||||
|
||||
== Examples
|
||||
|
||||
* http://godoc.org/realy.lol/pkg/ec/bech32#example-Bech32Decode[Bech32 decode Example]
|
||||
Demonstrates how to decode a bech32 encoded string.
|
||||
* http://godoc.org/realy.lol/pkg/ec/bech32#example-BechEncode[Bech32 encode Example]
|
||||
Demonstrates how to encode data into a bech32 string.
|
||||
|
||||
== License
|
||||
|
||||
Package bech32 is licensed under the http://copyfree.org[copyfree] ISC License.
|
||||
411
pkg/crypto/ec/bech32/bech32.go
Normal file
411
pkg/crypto/ec/bech32/bech32.go
Normal file
@@ -0,0 +1,411 @@
|
||||
// Copyright (c) 2017 The btcsuite developers
|
||||
// Copyright (c) 2019 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package bech32
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Charset is the set of characters used in the data section of bech32 strings.
|
||||
// Note that this is ordered, such that for a given charset[i], i is the binary
|
||||
// value of the character.
|
||||
//
|
||||
// This wasn't exported in the original lol.
|
||||
const Charset = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
|
||||
|
||||
// gen encodes the generator polynomial for the bech32 BCH checksum.
|
||||
var gen = []int{0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3}
|
||||
|
||||
// toBytes converts each character in the string 'chars' to the value of the
|
||||
// index of the corresponding character in 'charset'.
|
||||
func toBytes(chars []byte) ([]byte, error) {
|
||||
decoded := make([]byte, 0, len(chars))
|
||||
for i := 0; i < len(chars); i++ {
|
||||
index := strings.IndexByte(Charset, chars[i])
|
||||
if index < 0 {
|
||||
return nil, ErrNonCharsetChar(chars[i])
|
||||
}
|
||||
decoded = append(decoded, byte(index))
|
||||
}
|
||||
return decoded, nil
|
||||
}
|
||||
|
||||
// bech32Polymod calculates the BCH checksum for a given hrp, values and
|
||||
// checksum data. Checksum is optional, and if nil a 0 checksum is assumed.
|
||||
//
|
||||
// Values and checksum (if provided) MUST be encoded as 5 bits per element (base
|
||||
// 32), otherwise the results are undefined.
|
||||
//
|
||||
// For more details on the polymod calculation, please refer to BIP 173.
|
||||
func bech32Polymod(hrp []byte, values, checksum []byte) int {
|
||||
check := 1
|
||||
// Account for the high bits of the HRP in the checksum.
|
||||
for i := 0; i < len(hrp); i++ {
|
||||
b := check >> 25
|
||||
hiBits := int(hrp[i]) >> 5
|
||||
check = (check&0x1ffffff)<<5 ^ hiBits
|
||||
for i := 0; i < 5; i++ {
|
||||
if (b>>uint(i))&1 == 1 {
|
||||
check ^= gen[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
// Account for the separator (0) between high and low bits of the HRP.
|
||||
// x^0 == x, so we eliminate the redundant xor used in the other rounds.
|
||||
b := check >> 25
|
||||
check = (check & 0x1ffffff) << 5
|
||||
for i := 0; i < 5; i++ {
|
||||
if (b>>uint(i))&1 == 1 {
|
||||
check ^= gen[i]
|
||||
}
|
||||
}
|
||||
// Account for the low bits of the HRP.
|
||||
for i := 0; i < len(hrp); i++ {
|
||||
b := check >> 25
|
||||
loBits := int(hrp[i]) & 31
|
||||
check = (check&0x1ffffff)<<5 ^ loBits
|
||||
for i := 0; i < 5; i++ {
|
||||
if (b>>uint(i))&1 == 1 {
|
||||
check ^= gen[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
// Account for the values.
|
||||
for _, v := range values {
|
||||
b := check >> 25
|
||||
check = (check&0x1ffffff)<<5 ^ int(v)
|
||||
for i := 0; i < 5; i++ {
|
||||
if (b>>uint(i))&1 == 1 {
|
||||
check ^= gen[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
if checksum == nil {
|
||||
// A nil checksum is used during encoding, so assume all bytes are zero.
|
||||
// x^0 == x, so we eliminate the redundant xor used in the other rounds.
|
||||
for v := 0; v < 6; v++ {
|
||||
b := check >> 25
|
||||
check = (check & 0x1ffffff) << 5
|
||||
for i := 0; i < 5; i++ {
|
||||
if (b>>uint(i))&1 == 1 {
|
||||
check ^= gen[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Checksum is provided during decoding, so use it.
|
||||
for _, v := range checksum {
|
||||
b := check >> 25
|
||||
check = (check&0x1ffffff)<<5 ^ int(v)
|
||||
for i := 0; i < 5; i++ {
|
||||
if (b>>uint(i))&1 == 1 {
|
||||
check ^= gen[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return check
|
||||
}
|
||||
|
||||
// writeBech32Checksum calculates the checksum data expected for a string that
|
||||
// will have the given hrp and payload data and writes it to the provided string
|
||||
// builder.
|
||||
//
|
||||
// The payload data MUST be encoded as a base 32 (5 bits per element) byte slice
|
||||
// and the hrp MUST only use the allowed character set (ascii chars between 33
|
||||
// and 126), otherwise the results are undefined.
|
||||
//
|
||||
// For more details on the checksum calculation, please refer to BIP 173.
|
||||
func writeBech32Checksum(
|
||||
hrp []byte, data []byte, bldr *bytes.Buffer,
|
||||
version Version,
|
||||
) {
|
||||
|
||||
bech32Const := int(VersionToConsts[version])
|
||||
polymod := bech32Polymod(hrp, data, nil) ^ bech32Const
|
||||
for i := 0; i < 6; i++ {
|
||||
b := byte((polymod >> uint(5*(5-i))) & 31)
|
||||
// This can't fail, given we explicitly cap the previous b byte by the
|
||||
// first 31 bits.
|
||||
c := Charset[b]
|
||||
bldr.WriteByte(c)
|
||||
}
|
||||
}
|
||||
|
||||
// bech32VerifyChecksum verifies whether the bech32 string specified by the
|
||||
// provided hrp and payload data (encoded as 5 bits per element byte slice) has
|
||||
// the correct checksum suffix. The version of bech32 used (bech32 OG, or
|
||||
// bech32m) is also returned to allow the caller to perform proper address
|
||||
// validation (segwitv0 should use bech32, v1+ should use bech32m).
|
||||
//
|
||||
// Data MUST have more than 6 elements, otherwise this function panics.
|
||||
//
|
||||
// For more details on the checksum verification, please refer to BIP 173.
|
||||
func bech32VerifyChecksum(hrp []byte, data []byte) (Version, bool) {
|
||||
checksum := data[len(data)-6:]
|
||||
values := data[:len(data)-6]
|
||||
polymod := bech32Polymod(hrp, values, checksum)
|
||||
// Before BIP-350, we'd always check this against a static constant of
|
||||
// 1 to know if the checksum was computed properly. As we want to
|
||||
// generically support decoding for bech32m as well as bech32, we'll
|
||||
// look up the returned value and compare it to the set of defined
|
||||
// constants.
|
||||
bech32Version, ok := ConstsToVersion[ChecksumConst(polymod)]
|
||||
if ok {
|
||||
return bech32Version, true
|
||||
}
|
||||
return VersionUnknown, false
|
||||
}
|
||||
|
||||
// DecodeNoLimit is a bech32 checksum version aware arbitrary string length
|
||||
// decoder. This function will return the version of the decoded checksum
|
||||
// constant so higher level validation can be performed to ensure the correct
|
||||
// version of bech32 was used when encoding.
|
||||
func decodeNoLimit(bech []byte) ([]byte, []byte, Version, error) {
|
||||
// The minimum allowed size of a bech32 string is 8 characters, since it
|
||||
// needs a non-empty HRP, a separator, and a 6 character checksum.
|
||||
if len(bech) < 8 {
|
||||
return nil, nil, VersionUnknown, ErrInvalidLength(len(bech))
|
||||
}
|
||||
// Only ASCII characters between 33 and 126 are allowed.
|
||||
var hasLower, hasUpper bool
|
||||
for i := 0; i < len(bech); i++ {
|
||||
if bech[i] < 33 || bech[i] > 126 {
|
||||
return nil, nil, VersionUnknown, ErrInvalidCharacter(bech[i])
|
||||
}
|
||||
// The characters must be either all lowercase or all uppercase. Testing
|
||||
// directly with ascii codes is safe here, given the previous test.
|
||||
hasLower = hasLower || (bech[i] >= 97 && bech[i] <= 122)
|
||||
hasUpper = hasUpper || (bech[i] >= 65 && bech[i] <= 90)
|
||||
if hasLower && hasUpper {
|
||||
return nil, nil, VersionUnknown, ErrMixedCase{}
|
||||
}
|
||||
}
|
||||
// Bech32 standard uses only the lowercase for of strings for checksum
|
||||
// calculation.
|
||||
if hasUpper {
|
||||
bech = bytes.ToLower(bech)
|
||||
}
|
||||
// The string is invalid if the last '1' is non-existent, it is the
|
||||
// first character of the string (no human-readable part) or one of the
|
||||
// last 6 characters of the string (since checksum cannot contain '1').
|
||||
one := bytes.LastIndexByte(bech, '1')
|
||||
if one < 1 || one+7 > len(bech) {
|
||||
return nil, nil, VersionUnknown, ErrInvalidSeparatorIndex(one)
|
||||
}
|
||||
// The human-readable part is everything before the last '1'.
|
||||
hrp := bech[:one]
|
||||
data := bech[one+1:]
|
||||
// Each character corresponds to the byte with value of the index in
|
||||
// 'charset'.
|
||||
decoded, err := toBytes(data)
|
||||
if err != nil {
|
||||
return nil, nil, VersionUnknown, err
|
||||
}
|
||||
// Verify if the checksum (stored inside decoded[:]) is valid, given the
|
||||
// previously decoded hrp.
|
||||
bech32Version, ok := bech32VerifyChecksum(hrp, decoded)
|
||||
if !ok {
|
||||
// Invalid checksum. Calculate what it should have been, so that the
|
||||
// error contains this information.
|
||||
//
|
||||
// Extract the payload bytes and actual checksum in the string.
|
||||
actual := bech[len(bech)-6:]
|
||||
payload := decoded[:len(decoded)-6]
|
||||
// Calculate the expected checksum, given the hrp and payload
|
||||
// data. We'll actually compute _both_ possibly valid checksum
|
||||
// to further aide in debugging.
|
||||
var expectedBldr bytes.Buffer
|
||||
expectedBldr.Grow(6)
|
||||
writeBech32Checksum(hrp, payload, &expectedBldr, Version0)
|
||||
expectedVersion0 := expectedBldr.String()
|
||||
var b strings.Builder
|
||||
b.Grow(6)
|
||||
writeBech32Checksum(hrp, payload, &expectedBldr, VersionM)
|
||||
expectedVersionM := expectedBldr.String()
|
||||
err = ErrInvalidChecksum{
|
||||
Expected: expectedVersion0,
|
||||
ExpectedM: expectedVersionM,
|
||||
Actual: string(actual),
|
||||
}
|
||||
return nil, nil, VersionUnknown, err
|
||||
}
|
||||
// We exclude the last 6 bytes, which is the checksum.
|
||||
return hrp, decoded[:len(decoded)-6], bech32Version, nil
|
||||
}
|
||||
|
||||
// DecodeNoLimit decodes a bech32 encoded string, returning the human-readable
|
||||
// part and the data part excluding the checksum. This function does NOT
|
||||
// validate against the BIP-173 maximum length allowed for bech32 strings and
|
||||
// is meant for use in custom applications (such as lightning network payment
|
||||
// requests), NOT on-chain addresses.
|
||||
//
|
||||
// Note that the returned data is 5-bit (base32) encoded and the human-readable
|
||||
// part will be lowercase.
|
||||
func DecodeNoLimit(bech []byte) ([]byte, []byte, error) {
|
||||
hrp, data, _, err := decodeNoLimit(bech)
|
||||
return hrp, data, err
|
||||
}
|
||||
|
||||
// Decode decodes a bech32 encoded string, returning the human-readable part and
|
||||
// the data part excluding the checksum.
|
||||
//
|
||||
// Note that the returned data is 5-bit (base32) encoded and the human-readable
|
||||
// part will be lowercase.
|
||||
func Decode(bech []byte) ([]byte, []byte, error) {
|
||||
// The maximum allowed length for a bech32 string is 90.
|
||||
if len(bech) > 90 {
|
||||
return nil, nil, ErrInvalidLength(len(bech))
|
||||
}
|
||||
hrp, data, _, err := decodeNoLimit(bech)
|
||||
return hrp, data, err
|
||||
}
|
||||
|
||||
// DecodeGeneric is identical to the existing Decode method, but will also
|
||||
// return bech32 version that matches the decoded checksum. This method should
|
||||
// be used when decoding segwit addresses, as it enables additional
|
||||
// verification to ensure the proper checksum is used.
|
||||
func DecodeGeneric(bech []byte) ([]byte, []byte, Version, error) {
|
||||
// The maximum allowed length for a bech32 string is 90.
|
||||
if len(bech) > 90 {
|
||||
return nil, nil, VersionUnknown, ErrInvalidLength(len(bech))
|
||||
}
|
||||
return decodeNoLimit(bech)
|
||||
}
|
||||
|
||||
// encodeGeneric is the base bech32 encoding function that is aware of the
|
||||
// existence of the checksum versions. This method is private, as the Encode
|
||||
// and EncodeM methods are intended to be used instead.
|
||||
func encodeGeneric(hrp []byte, data []byte, version Version) ([]byte, error) {
|
||||
// The resulting bech32 string is the concatenation of the lowercase
|
||||
// hrp, the separator 1, data and the 6-byte checksum.
|
||||
hrp = bytes.ToLower(hrp)
|
||||
var bldr bytes.Buffer
|
||||
bldr.Grow(len(hrp) + 1 + len(data) + 6)
|
||||
bldr.Write(hrp)
|
||||
bldr.WriteString("1")
|
||||
// Write the data part, using the bech32 charset.
|
||||
for _, b := range data {
|
||||
if int(b) >= len(Charset) {
|
||||
return nil, ErrInvalidDataByte(b)
|
||||
}
|
||||
bldr.WriteByte(Charset[b])
|
||||
}
|
||||
// Calculate and write the checksum of the data.
|
||||
writeBech32Checksum(hrp, data, &bldr, version)
|
||||
return bldr.Bytes(), nil
|
||||
}
|
||||
|
||||
// Encode encodes a byte slice into a bech32 string with the given
|
||||
// human-readable part (HRP). The HRP will be converted to lowercase if needed
|
||||
// since mixed cased encodings are not permitted and lowercase is used for
|
||||
// checksum purposes. Note that the bytes must each encode 5 bits (base32).
|
||||
func Encode(hrp, data []byte) ([]byte, error) {
|
||||
return encodeGeneric(hrp, data, Version0)
|
||||
}
|
||||
|
||||
// EncodeM is the exactly same as the Encode method, but it uses the new
|
||||
// bech32m constant instead of the original one. It should be used whenever one
|
||||
// attempts to encode a segwit address of v1 and beyond.
|
||||
func EncodeM(hrp, data []byte) ([]byte, error) {
|
||||
return encodeGeneric(hrp, data, VersionM)
|
||||
}
|
||||
|
||||
// ConvertBits converts a byte slice where each byte is encoding fromBits bits,
|
||||
// to a byte slice where each byte is encoding toBits bits.
|
||||
func ConvertBits(data []byte, fromBits, toBits uint8, pad bool) (
|
||||
[]byte,
|
||||
error,
|
||||
) {
|
||||
|
||||
if fromBits < 1 || fromBits > 8 || toBits < 1 || toBits > 8 {
|
||||
return nil, ErrInvalidBitGroups{}
|
||||
}
|
||||
// Determine the maximum size the resulting array can have after base
|
||||
// conversion, so that we can size it a single time. This might be off
|
||||
// by a byte depending on whether padding is used or not and if the input
|
||||
// data is a multiple of both fromBits and toBits, but we ignore that and
|
||||
// just size it to the maximum possible.
|
||||
maxSize := len(data)*int(fromBits)/int(toBits) + 1
|
||||
// The final bytes, each byte encoding toBits bits.
|
||||
regrouped := make([]byte, 0, maxSize)
|
||||
// Keep track of the next byte we create and how many bits we have
|
||||
// added to it out of the toBits goal.
|
||||
nextByte := byte(0)
|
||||
filledBits := uint8(0)
|
||||
for _, b := range data {
|
||||
// Discard unused bits.
|
||||
b <<= 8 - fromBits
|
||||
// How many bits remaining to extract from the input data.
|
||||
remFromBits := fromBits
|
||||
for remFromBits > 0 {
|
||||
// How many bits remaining to be added to the next byte.
|
||||
remToBits := toBits - filledBits
|
||||
// The number of bytes to next extract is the minimum of
|
||||
// remFromBits and remToBits.
|
||||
toExtract := remFromBits
|
||||
if remToBits < toExtract {
|
||||
toExtract = remToBits
|
||||
}
|
||||
// Add the next bits to nextByte, shifting the already
|
||||
// added bits to the left.
|
||||
nextByte = (nextByte << toExtract) | (b >> (8 - toExtract))
|
||||
// Discard the bits we just extracted and get ready for
|
||||
// next iteration.
|
||||
b <<= toExtract
|
||||
remFromBits -= toExtract
|
||||
filledBits += toExtract
|
||||
// If the nextByte is completely filled, we add it to
|
||||
// our regrouped bytes and start on the next byte.
|
||||
if filledBits == toBits {
|
||||
regrouped = append(regrouped, nextByte)
|
||||
filledBits = 0
|
||||
nextByte = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
// We pad any unfinished group if specified.
|
||||
if pad && filledBits > 0 {
|
||||
nextByte <<= toBits - filledBits
|
||||
regrouped = append(regrouped, nextByte)
|
||||
filledBits = 0
|
||||
nextByte = 0
|
||||
}
|
||||
// Any incomplete group must be <= 4 bits, and all zeroes.
|
||||
if filledBits > 0 && (filledBits > 4 || nextByte != 0) {
|
||||
return nil, ErrInvalidIncompleteGroup{}
|
||||
}
|
||||
return regrouped, nil
|
||||
}
|
||||
|
||||
// EncodeFromBase256 converts a base256-encoded byte slice into a base32-encoded
|
||||
// byte slice and then encodes it into a bech32 string with the given
|
||||
// human-readable part (HRP). The HRP will be converted to lowercase if needed
|
||||
// since mixed cased encodings are not permitted and lowercase is used for
|
||||
// checksum purposes.
|
||||
func EncodeFromBase256(hrp, data []byte) ([]byte, error) {
|
||||
converted, err := ConvertBits(data, 8, 5, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return Encode(hrp, converted)
|
||||
}
|
||||
|
||||
// DecodeToBase256 decodes a bech32-encoded string into its associated
|
||||
// human-readable part (HRP) and base32-encoded data, converts that data to a
|
||||
// base256-encoded byte slice and returns it along with the lowercase HRP.
|
||||
func DecodeToBase256(bech []byte) ([]byte, []byte, error) {
|
||||
hrp, data, err := Decode(bech)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
converted, err := ConvertBits(data, 5, 8, false)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return hrp, converted, nil
|
||||
}
|
||||
776
pkg/crypto/ec/bech32/bech32_test.go
Normal file
776
pkg/crypto/ec/bech32/bech32_test.go
Normal file
@@ -0,0 +1,776 @@
|
||||
// Copyright (c) 2017-2020 The btcsuite developers
|
||||
// Copyright (c) 2019 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package bech32
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
// TestBech32 tests whether decoding and re-encoding the valid BIP-173 test
|
||||
// vectors works and if decoding invalid test vectors fails for the correct
|
||||
// reason.
|
||||
func TestBech32(t *testing.T) {
|
||||
tests := []struct {
|
||||
str string
|
||||
expectedError error
|
||||
}{
|
||||
{"A12UEL5L", nil},
|
||||
{"a12uel5l", nil},
|
||||
{
|
||||
"an83characterlonghumanreadablepartthatcontainsthenumber1andtheexcludedcharactersbio1tt5tgs",
|
||||
nil,
|
||||
},
|
||||
{"abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw", nil},
|
||||
{
|
||||
"11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc8247j",
|
||||
nil,
|
||||
},
|
||||
{"split1checkupstagehandshakeupstreamerranterredcaperred2y9e3w", nil},
|
||||
{
|
||||
"split1checkupstagehandshakeupstreamerranterredcaperred2y9e2w",
|
||||
ErrInvalidChecksum{
|
||||
"2y9e3w", "2y9e3wlc445v",
|
||||
"2y9e2w",
|
||||
},
|
||||
}, // invalid checksum
|
||||
{
|
||||
"s lit1checkupstagehandshakeupstreamerranterredcaperredp8hs2p",
|
||||
ErrInvalidCharacter(' '),
|
||||
}, // invalid character (space) in hrp
|
||||
{
|
||||
"spl\x7Ft1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
|
||||
ErrInvalidCharacter(127),
|
||||
}, // invalid character (DEL) in hrp
|
||||
{
|
||||
"split1cheo2y9e2w",
|
||||
ErrNonCharsetChar('o'),
|
||||
}, // invalid character (o) in data part
|
||||
{"split1a2y9w", ErrInvalidSeparatorIndex(5)}, // too short data part
|
||||
{
|
||||
"1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
|
||||
ErrInvalidSeparatorIndex(0),
|
||||
}, // empty hrp
|
||||
{
|
||||
"11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqsqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc8247j",
|
||||
ErrInvalidLength(91),
|
||||
}, // too long
|
||||
// Additional test vectors used in bitcoin core
|
||||
{" 1nwldj5", ErrInvalidCharacter(' ')},
|
||||
{"\x7f" + "1axkwrx", ErrInvalidCharacter(0x7f)},
|
||||
{"\x801eym55h", ErrInvalidCharacter(0x80)},
|
||||
{
|
||||
"an84characterslonghumanreadablepartthatcontainsthenumber1andtheexcludedcharactersbio1569pvx",
|
||||
ErrInvalidLength(91),
|
||||
},
|
||||
{"pzry9x0s0muk", ErrInvalidSeparatorIndex(-1)},
|
||||
{"1pzry9x0s0muk", ErrInvalidSeparatorIndex(0)},
|
||||
{"x1b4n0q5v", ErrNonCharsetChar(98)},
|
||||
{"li1dgmt3", ErrInvalidSeparatorIndex(2)},
|
||||
{"de1lg7wt\xff", ErrInvalidCharacter(0xff)},
|
||||
{"A1G7SGD8", ErrInvalidChecksum{"2uel5l", "2uel5llqfn3a", "g7sgd8"}},
|
||||
{"10a06t8", ErrInvalidLength(7)},
|
||||
{"1qzzfhee", ErrInvalidSeparatorIndex(0)},
|
||||
{"a12UEL5L", ErrMixedCase{}},
|
||||
{"A12uEL5L", ErrMixedCase{}},
|
||||
}
|
||||
for i, test := range tests {
|
||||
str := []byte(test.str)
|
||||
hrp, decoded, err := Decode([]byte(str))
|
||||
if !errors.Is(err, test.expectedError) {
|
||||
t.Errorf(
|
||||
"%d: expected decoding error %v "+
|
||||
"instead got %v", i, test.expectedError, err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
// End test case here if a decoding error was expected.
|
||||
continue
|
||||
}
|
||||
// Check that it encodes to the same string
|
||||
encoded, err := Encode(hrp, decoded)
|
||||
if err != nil {
|
||||
t.Errorf("encoding failed: %v", err)
|
||||
}
|
||||
if !utils.FastEqual(encoded, bytes.ToLower([]byte(str))) {
|
||||
t.Errorf(
|
||||
"expected data to encode to %v, but got %v",
|
||||
str, encoded,
|
||||
)
|
||||
}
|
||||
// Flip a bit in the string an make sure it is caught.
|
||||
pos := bytes.LastIndexAny(str, "1")
|
||||
flipped := []byte(string(str[:pos+1]) + string(str[pos+1]^1) + string(str[pos+2:]))
|
||||
_, _, err = Decode(flipped)
|
||||
if err == nil {
|
||||
t.Error("expected decoding to fail")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestBech32M tests that the following set of strings, based on the test
|
||||
// vectors in BIP-350 are either valid or invalid using the new bech32m
|
||||
// checksum algo. Some of these strings are similar to the set of above test
|
||||
// vectors, but end up with different checksums.
|
||||
func TestBech32M(t *testing.T) {
|
||||
tests := []struct {
|
||||
str string
|
||||
expectedError error
|
||||
}{
|
||||
{"A1LQFN3A", nil},
|
||||
{"a1lqfn3a", nil},
|
||||
{
|
||||
"an83characterlonghumanreadablepartthatcontainsthetheexcludedcharactersbioandnumber11sg7hg6",
|
||||
nil,
|
||||
},
|
||||
{"abcdef1l7aum6echk45nj3s0wdvt2fg8x9yrzpqzd3ryx", nil},
|
||||
{
|
||||
"11llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllludsr8",
|
||||
nil,
|
||||
},
|
||||
{"split1checkupstagehandshakeupstreamerranterredcaperredlc445v", nil},
|
||||
{"?1v759aa", nil},
|
||||
// Additional test vectors used in bitcoin core
|
||||
{"\x201xj0phk", ErrInvalidCharacter('\x20')},
|
||||
{"\x7f1g6xzxy", ErrInvalidCharacter('\x7f')},
|
||||
{"\x801vctc34", ErrInvalidCharacter('\x80')},
|
||||
{
|
||||
"an84characterslonghumanreadablepartthatcontainsthetheexcludedcharactersbioandnumber11d6pts4",
|
||||
ErrInvalidLength(91),
|
||||
},
|
||||
{"qyrz8wqd2c9m", ErrInvalidSeparatorIndex(-1)},
|
||||
{"1qyrz8wqd2c9m", ErrInvalidSeparatorIndex(0)},
|
||||
{"y1b0jsk6g", ErrNonCharsetChar(98)},
|
||||
{"lt1igcx5c0", ErrNonCharsetChar(105)},
|
||||
{"in1muywd", ErrInvalidSeparatorIndex(2)},
|
||||
{"mm1crxm3i", ErrNonCharsetChar(105)},
|
||||
{"au1s5cgom", ErrNonCharsetChar(111)},
|
||||
{"M1VUXWEZ", ErrInvalidChecksum{"mzl49c", "mzl49cw70eq6", "vuxwez"}},
|
||||
{"16plkw9", ErrInvalidLength(7)},
|
||||
{"1p2gdwpf", ErrInvalidSeparatorIndex(0)},
|
||||
|
||||
{" 1nwldj5", ErrInvalidCharacter(' ')},
|
||||
{"\x7f" + "1axkwrx", ErrInvalidCharacter(0x7f)},
|
||||
{"\x801eym55h", ErrInvalidCharacter(0x80)},
|
||||
}
|
||||
for i, test := range tests {
|
||||
str := []byte(test.str)
|
||||
hrp, decoded, err := Decode(str)
|
||||
if test.expectedError != err {
|
||||
t.Errorf(
|
||||
"%d: (%v) expected decoding error %v "+
|
||||
"instead got %v", i, str, test.expectedError,
|
||||
err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
// End test case here if a decoding error was expected.
|
||||
continue
|
||||
}
|
||||
// Check that it encodes to the same string, using bech32 m.
|
||||
encoded, err := EncodeM(hrp, decoded)
|
||||
if err != nil {
|
||||
t.Errorf("encoding failed: %v", err)
|
||||
}
|
||||
|
||||
if !utils.FastEqual(encoded, bytes.ToLower(str)) {
|
||||
t.Errorf(
|
||||
"expected data to encode to %v, but got %v",
|
||||
str, encoded,
|
||||
)
|
||||
}
|
||||
// Flip a bit in the string an make sure it is caught.
|
||||
pos := bytes.LastIndexAny(str, "1")
|
||||
flipped := []byte(string(str[:pos+1]) + string(str[pos+1]^1) + string(str[pos+2:]))
|
||||
_, _, err = Decode(flipped)
|
||||
if err == nil {
|
||||
t.Error("expected decoding to fail")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestBech32DecodeGeneric tests that given a bech32 string, or a bech32m
|
||||
// string, the proper checksum version is returned so that callers can perform
|
||||
// segwit addr validation.
|
||||
func TestBech32DecodeGeneric(t *testing.T) {
|
||||
tests := []struct {
|
||||
str string
|
||||
version Version
|
||||
}{
|
||||
{"A1LQFN3A", VersionM},
|
||||
{"a1lqfn3a", VersionM},
|
||||
{
|
||||
"an83characterlonghumanreadablepartthatcontainsthetheexcludedcharactersbioandnumber11sg7hg6",
|
||||
VersionM,
|
||||
},
|
||||
{"abcdef1l7aum6echk45nj3s0wdvt2fg8x9yrzpqzd3ryx", VersionM},
|
||||
{
|
||||
"11llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllludsr8",
|
||||
VersionM,
|
||||
},
|
||||
{
|
||||
"split1checkupstagehandshakeupstreamerranterredcaperredlc445v",
|
||||
VersionM,
|
||||
},
|
||||
{"?1v759aa", VersionM},
|
||||
{"A12UEL5L", Version0},
|
||||
{"a12uel5l", Version0},
|
||||
{
|
||||
"an83characterlonghumanreadablepartthatcontainsthenumber1andtheexcludedcharactersbio1tt5tgs",
|
||||
Version0,
|
||||
},
|
||||
{"abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw", Version0},
|
||||
{
|
||||
"11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc8247j",
|
||||
Version0,
|
||||
},
|
||||
{
|
||||
"split1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
|
||||
Version0,
|
||||
},
|
||||
{"BC1QW508D6QEJXTDG4Y5R3ZARVARY0C5XW7KV8F3T4", Version0},
|
||||
{
|
||||
"tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7",
|
||||
Version0,
|
||||
},
|
||||
{
|
||||
"bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kt5nd6y",
|
||||
VersionM,
|
||||
},
|
||||
{"BC1SW50QGDZ25J", VersionM},
|
||||
{"bc1zw508d6qejxtdg4y5r3zarvaryvaxxpcs", VersionM},
|
||||
{
|
||||
"tb1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesrxh6hy",
|
||||
Version0,
|
||||
},
|
||||
{
|
||||
"tb1pqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesf3hn0c",
|
||||
VersionM,
|
||||
},
|
||||
{
|
||||
"bc1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqzk5jj0",
|
||||
VersionM,
|
||||
},
|
||||
}
|
||||
for i, test := range tests {
|
||||
_, _, version, err := DecodeGeneric([]byte(test.str))
|
||||
if err != nil {
|
||||
t.Errorf(
|
||||
"%d: (%v) unexpected error during "+
|
||||
"decoding: %v", i, test.str, err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if version != test.version {
|
||||
t.Errorf(
|
||||
"(%v): invalid version: expected %v, got %v",
|
||||
test.str, test.version, version,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestMixedCaseEncode ensures mixed case HRPs are converted to lowercase as
|
||||
// expected when encoding and that decoding the produced encoding when converted
|
||||
// to all uppercase produces the lowercase HRP and original data.
|
||||
func TestMixedCaseEncode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
hrp string
|
||||
data string
|
||||
encoded string
|
||||
}{
|
||||
{
|
||||
name: "all uppercase HRP with no data",
|
||||
hrp: "A",
|
||||
data: "",
|
||||
encoded: "a12uel5l",
|
||||
}, {
|
||||
name: "all uppercase HRP with data",
|
||||
hrp: "UPPERCASE",
|
||||
data: "787878",
|
||||
encoded: "uppercase10pu8sss7kmp",
|
||||
}, {
|
||||
name: "mixed case HRP even offsets uppercase",
|
||||
hrp: "AbCdEf",
|
||||
data: "00443214c74254b635cf84653a56d7c675be77df",
|
||||
encoded: "abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
|
||||
}, {
|
||||
name: "mixed case HRP odd offsets uppercase ",
|
||||
hrp: "aBcDeF",
|
||||
data: "00443214c74254b635cf84653a56d7c675be77df",
|
||||
encoded: "abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
|
||||
}, {
|
||||
name: "all lowercase HRP",
|
||||
hrp: "abcdef",
|
||||
data: "00443214c74254b635cf84653a56d7c675be77df",
|
||||
encoded: "abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
// Convert the text hex to bytes, convert those bytes from base256 to
|
||||
// base32, then ensure the encoded result with the HRP provided in the
|
||||
// test data is as expected.
|
||||
data, err := hex.DecodeString(test.data)
|
||||
if err != nil {
|
||||
t.Errorf("%q: invalid hex %q: %v", test.name, test.data, err)
|
||||
continue
|
||||
}
|
||||
convertedData, err := ConvertBits(data, 8, 5, true)
|
||||
if err != nil {
|
||||
t.Errorf(
|
||||
"%q: unexpected convert bits error: %v", test.name,
|
||||
err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
gotEncoded, err := Encode([]byte(test.hrp), convertedData)
|
||||
if err != nil {
|
||||
t.Errorf("%q: unexpected encode error: %v", test.name, err)
|
||||
continue
|
||||
}
|
||||
if !utils.FastEqual(gotEncoded, []byte(test.encoded)) {
|
||||
t.Errorf(
|
||||
"%q: mismatched encoding -- got %q, want %q", test.name,
|
||||
gotEncoded, test.encoded,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Ensure the decoding the expected lowercase encoding converted to all
|
||||
// uppercase produces the lowercase HRP and original data.
|
||||
gotHRP, gotData, err := Decode(bytes.ToUpper([]byte(test.encoded)))
|
||||
if err != nil {
|
||||
t.Errorf("%q: unexpected decode error: %v", test.name, err)
|
||||
continue
|
||||
}
|
||||
wantHRP := strings.ToLower(test.hrp)
|
||||
if !utils.FastEqual(gotHRP, []byte(wantHRP)) {
|
||||
t.Errorf(
|
||||
"%q: mismatched decoded HRP -- got %q, want %q", test.name,
|
||||
gotHRP, wantHRP,
|
||||
)
|
||||
continue
|
||||
}
|
||||
convertedGotData, err := ConvertBits(gotData, 5, 8, false)
|
||||
if err != nil {
|
||||
t.Errorf(
|
||||
"%q: unexpected convert bits error: %v", test.name,
|
||||
err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if !utils.FastEqual(convertedGotData, data) {
|
||||
t.Errorf(
|
||||
"%q: mismatched data -- got %x, want %x", test.name,
|
||||
convertedGotData, data,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestCanDecodeUnlimtedBech32 tests whether decoding a large bech32 string works
|
||||
// when using the DecodeNoLimit version
|
||||
func TestCanDecodeUnlimtedBech32(t *testing.T) {
|
||||
input := "11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqsqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq5kx0yd"
|
||||
// Sanity check that an input of this length errors on regular Decode()
|
||||
_, _, err := Decode([]byte(input))
|
||||
if err == nil {
|
||||
t.Fatalf("Test vector not appropriate")
|
||||
}
|
||||
// Try and decode it.
|
||||
hrp, data, err := DecodeNoLimit([]byte(input))
|
||||
if err != nil {
|
||||
t.Fatalf(
|
||||
"Expected decoding of large string to work. Got error: %v",
|
||||
err,
|
||||
)
|
||||
}
|
||||
// Verify data for correctness.
|
||||
if !utils.FastEqual(hrp, []byte("1")) {
|
||||
t.Fatalf("Unexpected hrp: %v", hrp)
|
||||
}
|
||||
decodedHex := fmt.Sprintf("%x", data)
|
||||
expected := "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000"
|
||||
if decodedHex != expected {
|
||||
t.Fatalf("Unexpected decoded data: %s", decodedHex)
|
||||
}
|
||||
}
|
||||
|
||||
// TestBech32Base256 ensures decoding and encoding various bech32, HRPs, and
|
||||
// data produces the expected results when using EncodeFromBase256 and
|
||||
// DecodeToBase256. It includes tests for proper handling of case
|
||||
// manipulations.
|
||||
func TestBech32Base256(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string // test name
|
||||
encoded string // bech32 string to decode
|
||||
hrp string // expected human-readable part
|
||||
data string // expected hex-encoded data
|
||||
err error // expected error
|
||||
}{
|
||||
{
|
||||
name: "all uppercase, no data",
|
||||
encoded: "A12UEL5L",
|
||||
hrp: "a",
|
||||
data: "",
|
||||
}, {
|
||||
name: "long hrp with separator and excluded chars, no data",
|
||||
encoded: "an83characterlonghumanreadablepartthatcontainsthenumber1andtheexcludedcharactersbio1tt5tgs",
|
||||
hrp: "an83characterlonghumanreadablepartthatcontainsthenumber1andtheexcludedcharactersbio",
|
||||
data: "",
|
||||
}, {
|
||||
name: "6 char hrp with data with leading zero",
|
||||
encoded: "abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
|
||||
hrp: "abcdef",
|
||||
data: "00443214c74254b635cf84653a56d7c675be77df",
|
||||
}, {
|
||||
name: "hrp same as separator and max length encoded string",
|
||||
encoded: "11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc8247j",
|
||||
hrp: "1",
|
||||
data: "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
}, {
|
||||
name: "5 char hrp with data chosen to produce human-readable data part",
|
||||
encoded: "split1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
|
||||
hrp: "split",
|
||||
data: "c5f38b70305f519bf66d85fb6cf03058f3dde463ecd7918f2dc743918f2d",
|
||||
}, {
|
||||
name: "same as previous but with checksum invalidated",
|
||||
encoded: "split1checkupstagehandshakeupstreamerranterredcaperred2y9e2w",
|
||||
err: ErrInvalidChecksum{"2y9e3w", "2y9e3wlc445v", "2y9e2w"},
|
||||
}, {
|
||||
name: "hrp with invalid character (space)",
|
||||
encoded: "s lit1checkupstagehandshakeupstreamerranterredcaperredp8hs2p",
|
||||
err: ErrInvalidCharacter(' '),
|
||||
}, {
|
||||
name: "hrp with invalid character (DEL)",
|
||||
encoded: "spl\x7ft1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
|
||||
err: ErrInvalidCharacter(127),
|
||||
}, {
|
||||
name: "data part with invalid character (o)",
|
||||
encoded: "split1cheo2y9e2w",
|
||||
err: ErrNonCharsetChar('o'),
|
||||
}, {
|
||||
name: "data part too short",
|
||||
encoded: "split1a2y9w",
|
||||
err: ErrInvalidSeparatorIndex(5),
|
||||
}, {
|
||||
name: "empty hrp",
|
||||
encoded: "1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
|
||||
err: ErrInvalidSeparatorIndex(0),
|
||||
}, {
|
||||
name: "no separator",
|
||||
encoded: "pzry9x0s0muk",
|
||||
err: ErrInvalidSeparatorIndex(-1),
|
||||
}, {
|
||||
name: "too long by one char",
|
||||
encoded: "11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqsqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc8247j",
|
||||
err: ErrInvalidLength(91),
|
||||
}, {
|
||||
name: "invalid due to mixed case in hrp",
|
||||
encoded: "aBcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
|
||||
err: ErrMixedCase{},
|
||||
}, {
|
||||
name: "invalid due to mixed case in data part",
|
||||
encoded: "abcdef1Qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
|
||||
err: ErrMixedCase{},
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
// Ensure the decode either produces an error or not as expected.
|
||||
str := test.encoded
|
||||
gotHRP, gotData, err := DecodeToBase256([]byte(str))
|
||||
if test.err != err {
|
||||
t.Errorf(
|
||||
"%q: unexpected decode error -- got %v, want %v",
|
||||
test.name, err, test.err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
// End test case here if a decoding error was expected.
|
||||
continue
|
||||
}
|
||||
// Ensure the expected HRP and original data are as expected.
|
||||
if !utils.FastEqual(gotHRP, []byte(test.hrp)) {
|
||||
t.Errorf(
|
||||
"%q: mismatched decoded HRP -- got %q, want %q", test.name,
|
||||
gotHRP, test.hrp,
|
||||
)
|
||||
continue
|
||||
}
|
||||
data, err := hex.DecodeString(test.data)
|
||||
if err != nil {
|
||||
t.Errorf("%q: invalid hex %q: %v", test.name, test.data, err)
|
||||
continue
|
||||
}
|
||||
if !utils.FastEqual(gotData, data) {
|
||||
t.Errorf(
|
||||
"%q: mismatched data -- got %x, want %x", test.name,
|
||||
gotData, data,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Encode the same data with the HRP converted to all uppercase and
|
||||
// ensure the result is the lowercase version of the original encoded
|
||||
// bech32 string.
|
||||
gotEncoded, err := EncodeFromBase256(
|
||||
bytes.ToUpper([]byte(test.hrp)), data,
|
||||
)
|
||||
if err != nil {
|
||||
t.Errorf(
|
||||
"%q: unexpected uppercase HRP encode error: %v", test.name,
|
||||
err,
|
||||
)
|
||||
}
|
||||
wantEncoded := bytes.ToLower([]byte(str))
|
||||
if !utils.FastEqual(gotEncoded, wantEncoded) {
|
||||
t.Errorf(
|
||||
"%q: mismatched encoding -- got %q, want %q", test.name,
|
||||
gotEncoded, wantEncoded,
|
||||
)
|
||||
}
|
||||
// Encode the same data with the HRP converted to all lowercase and
|
||||
// ensure the result is the lowercase version of the original encoded
|
||||
// bech32 string.
|
||||
gotEncoded, err = EncodeFromBase256(
|
||||
bytes.ToLower([]byte(test.hrp)), data,
|
||||
)
|
||||
if err != nil {
|
||||
t.Errorf(
|
||||
"%q: unexpected lowercase HRP encode error: %v", test.name,
|
||||
err,
|
||||
)
|
||||
}
|
||||
if !utils.FastEqual(gotEncoded, wantEncoded) {
|
||||
t.Errorf(
|
||||
"%q: mismatched encoding -- got %q, want %q", test.name,
|
||||
gotEncoded, wantEncoded,
|
||||
)
|
||||
}
|
||||
// Encode the same data with the HRP converted to mixed upper and
|
||||
// lowercase and ensure the result is the lowercase version of the
|
||||
// original encoded bech32 string.
|
||||
var mixedHRPBuilder bytes.Buffer
|
||||
for i, r := range test.hrp {
|
||||
if i%2 == 0 {
|
||||
mixedHRPBuilder.WriteString(strings.ToUpper(string(r)))
|
||||
continue
|
||||
}
|
||||
mixedHRPBuilder.WriteRune(r)
|
||||
}
|
||||
gotEncoded, err = EncodeFromBase256(mixedHRPBuilder.Bytes(), data)
|
||||
if err != nil {
|
||||
t.Errorf(
|
||||
"%q: unexpected lowercase HRP encode error: %v", test.name,
|
||||
err,
|
||||
)
|
||||
}
|
||||
if !utils.FastEqual(gotEncoded, wantEncoded) {
|
||||
t.Errorf(
|
||||
"%q: mismatched encoding -- got %q, want %q", test.name,
|
||||
gotEncoded, wantEncoded,
|
||||
)
|
||||
}
|
||||
// Ensure a bit flip in the string is caught.
|
||||
pos := strings.LastIndexAny(test.encoded, "1")
|
||||
flipped := str[:pos+1] + string(str[pos+1]^1) + str[pos+2:]
|
||||
_, _, err = DecodeToBase256([]byte(flipped))
|
||||
if err == nil {
|
||||
t.Error("expected decoding to fail")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkEncodeDecodeCycle performs a benchmark for a full encode/decode
|
||||
// cycle of a bech32 string. It also reports the allocation count, which we
|
||||
// expect to be 2 for a fully optimized cycle.
|
||||
func BenchmarkEncodeDecodeCycle(b *testing.B) {
|
||||
// Use a fixed, 49-byte raw data for testing.
|
||||
inputData, err := hex.DecodeString("cbe6365ddbcda9a9915422c3f091c13f8c7b2f263b8d34067bd12c274408473fa764871c9dd51b1bb34873b3473b633ed1")
|
||||
if err != nil {
|
||||
b.Fatalf("failed to initialize input data: %v", err)
|
||||
}
|
||||
// Convert this into a 79-byte, base 32 byte slice.
|
||||
base32Input, err := ConvertBits(inputData, 8, 5, true)
|
||||
if err != nil {
|
||||
b.Fatalf("failed to convert input to 32 bits-per-element: %v", err)
|
||||
}
|
||||
// Use a fixed hrp for the tests. This should generate an encoded bech32
|
||||
// string of size 90 (the maximum allowed by BIP-173).
|
||||
hrp := "bc"
|
||||
// Begin the benchmark. Given that we test one roundtrip per iteration
|
||||
// (that is, one Encode() and one Decode() operation), we expect at most
|
||||
// 2 allocations per reported test op.
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
str, err := Encode([]byte(hrp), base32Input)
|
||||
if err != nil {
|
||||
b.Fatalf("failed to encode input: %v", err)
|
||||
}
|
||||
_, _, err = Decode(str)
|
||||
if err != nil {
|
||||
b.Fatalf("failed to decode string: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestConvertBits tests whether base conversion works using TestConvertBits().
|
||||
func TestConvertBits(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
output string
|
||||
fromBits uint8
|
||||
toBits uint8
|
||||
pad bool
|
||||
}{
|
||||
// Trivial empty conversions.
|
||||
{"", "", 8, 5, false},
|
||||
{"", "", 8, 5, true},
|
||||
{"", "", 5, 8, false},
|
||||
{"", "", 5, 8, true},
|
||||
// Conversions of 0 value with/without padding.
|
||||
{"00", "00", 8, 5, false},
|
||||
{"00", "0000", 8, 5, true},
|
||||
{"0000", "00", 5, 8, false},
|
||||
{"0000", "0000", 5, 8, true},
|
||||
// Testing when conversion ends exactly at the byte edge. This makes
|
||||
// both padded and unpadded versions the same.
|
||||
{"0000000000", "0000000000000000", 8, 5, false},
|
||||
{"0000000000", "0000000000000000", 8, 5, true},
|
||||
{"0000000000000000", "0000000000", 5, 8, false},
|
||||
{"0000000000000000", "0000000000", 5, 8, true},
|
||||
// Conversions of full byte sequences.
|
||||
{"ffffff", "1f1f1f1f1e", 8, 5, true},
|
||||
{"1f1f1f1f1e", "ffffff", 5, 8, false},
|
||||
{"1f1f1f1f1e", "ffffff00", 5, 8, true},
|
||||
// Sample random conversions.
|
||||
{"c9ca", "190705", 8, 5, false},
|
||||
{"c9ca", "19070500", 8, 5, true},
|
||||
{"19070500", "c9ca", 5, 8, false},
|
||||
{"19070500", "c9ca00", 5, 8, true},
|
||||
// Test cases tested on TestConvertBitsFailures with their corresponding
|
||||
// fixes.
|
||||
{"ff", "1f1c", 8, 5, true},
|
||||
{"1f1c10", "ff20", 5, 8, true},
|
||||
// Large conversions.
|
||||
{
|
||||
"cbe6365ddbcda9a9915422c3f091c13f8c7b2f263b8d34067bd12c274408473fa764871c9dd51b1bb34873b3473b633ed1",
|
||||
"190f13030c170e1b1916141a13040a14040b011f01040e01071e0607160b1906070e06130801131b1a0416020e110008081c1f1a0e19040703120e1d0a06181b160d0407070c1a07070d11131d1408",
|
||||
8, 5, true,
|
||||
},
|
||||
{
|
||||
"190f13030c170e1b1916141a13040a14040b011f01040e01071e0607160b1906070e06130801131b1a0416020e110008081c1f1a0e19040703120e1d0a06181b160d0407070c1a07070d11131d1408",
|
||||
"cbe6365ddbcda9a9915422c3f091c13f8c7b2f263b8d34067bd12c274408473fa764871c9dd51b1bb34873b3473b633ed100",
|
||||
5, 8, true,
|
||||
},
|
||||
}
|
||||
for i, tc := range tests {
|
||||
input, err := hex.DecodeString(tc.input)
|
||||
if err != nil {
|
||||
t.Fatalf("invalid test input data: %v", err)
|
||||
}
|
||||
expected, err := hex.DecodeString(tc.output)
|
||||
if err != nil {
|
||||
t.Fatalf("invalid test output data: %v", err)
|
||||
}
|
||||
actual, err := ConvertBits(input, tc.fromBits, tc.toBits, tc.pad)
|
||||
if err != nil {
|
||||
t.Fatalf("test case %d failed: %v", i, err)
|
||||
}
|
||||
if !utils.FastEqual(actual, expected) {
|
||||
t.Fatalf(
|
||||
"test case %d has wrong output; expected=%x actual=%x",
|
||||
i, expected, actual,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestConvertBitsFailures tests for the expected conversion failures of
|
||||
// ConvertBits().
|
||||
func TestConvertBitsFailures(t *testing.T) {
|
||||
tests := []struct {
|
||||
input string
|
||||
fromBits uint8
|
||||
toBits uint8
|
||||
pad bool
|
||||
err error
|
||||
}{
|
||||
// Not enough output bytes when not using padding.
|
||||
{"ff", 8, 5, false, ErrInvalidIncompleteGroup{}},
|
||||
{"1f1c10", 5, 8, false, ErrInvalidIncompleteGroup{}},
|
||||
// Unsupported bit conversions.
|
||||
{"", 0, 5, false, ErrInvalidBitGroups{}},
|
||||
{"", 10, 5, false, ErrInvalidBitGroups{}},
|
||||
{"", 5, 0, false, ErrInvalidBitGroups{}},
|
||||
{"", 5, 10, false, ErrInvalidBitGroups{}},
|
||||
}
|
||||
for i, tc := range tests {
|
||||
input, err := hex.DecodeString(tc.input)
|
||||
if err != nil {
|
||||
t.Fatalf("invalid test input data: %v", err)
|
||||
}
|
||||
_, err = ConvertBits(input, tc.fromBits, tc.toBits, tc.pad)
|
||||
if err != tc.err {
|
||||
t.Fatalf(
|
||||
"test case %d failure: expected '%v' got '%v'", i,
|
||||
tc.err, err,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkConvertBitsDown benchmarks the speed and memory allocation behavior
|
||||
// of ConvertBits when converting from a higher base into a lower base (e.g. 8
|
||||
// => 5).
|
||||
//
|
||||
// Only a single allocation is expected, which is used for the output array.
|
||||
func BenchmarkConvertBitsDown(b *testing.B) {
|
||||
// Use a fixed, 49-byte raw data for testing.
|
||||
inputData, err := hex.DecodeString("cbe6365ddbcda9a9915422c3f091c13f8c7b2f263b8d34067bd12c274408473fa764871c9dd51b1bb34873b3473b633ed1")
|
||||
if err != nil {
|
||||
b.Fatalf("failed to initialize input data: %v", err)
|
||||
}
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := ConvertBits(inputData, 8, 5, true)
|
||||
if err != nil {
|
||||
b.Fatalf("error converting bits: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkConvertBitsDown benchmarks the speed and memory allocation behavior
|
||||
// of ConvertBits when converting from a lower base into a higher base (e.g. 5
|
||||
// => 8).
|
||||
//
|
||||
// Only a single allocation is expected, which is used for the output array.
|
||||
func BenchmarkConvertBitsUp(b *testing.B) {
|
||||
// Use a fixed, 79-byte raw data for testing.
|
||||
inputData, err := hex.DecodeString("190f13030c170e1b1916141a13040a14040b011f01040e01071e0607160b1906070e06130801131b1a0416020e110008081c1f1a0e19040703120e1d0a06181b160d0407070c1a07070d11131d1408")
|
||||
if err != nil {
|
||||
b.Fatalf("failed to initialize input data: %v", err)
|
||||
}
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := ConvertBits(inputData, 8, 5, true)
|
||||
if err != nil {
|
||||
b.Fatalf("error converting bits: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
13
pkg/crypto/ec/bech32/doc.go
Normal file
13
pkg/crypto/ec/bech32/doc.go
Normal file
@@ -0,0 +1,13 @@
|
||||
// Copyright (c) 2017 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package bech32 provides a Go implementation of the bech32 format specified in
|
||||
// BIP 173.
|
||||
//
|
||||
// Bech32 strings consist of a human-readable part (hrp), followed by the
|
||||
// separator 1, then a checksummed data part encoded using the 32 characters
|
||||
// "qpzry9x8gf2tvdw0s3jn54khce6mua7l".
|
||||
//
|
||||
// More info: https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki
|
||||
package bech32
|
||||
89
pkg/crypto/ec/bech32/error.go
Normal file
89
pkg/crypto/ec/bech32/error.go
Normal file
@@ -0,0 +1,89 @@
|
||||
// Copyright (c) 2019 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package bech32
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// ErrMixedCase is returned when the bech32 string has both lower and uppercase
|
||||
// characters.
|
||||
type ErrMixedCase struct{}
|
||||
|
||||
func (err ErrMixedCase) Error() string {
|
||||
return "string not all lowercase or all uppercase"
|
||||
}
|
||||
|
||||
// ErrInvalidBitGroups is returned when conversion is attempted between byte
|
||||
// slices using bit-per-element of unsupported value.
|
||||
type ErrInvalidBitGroups struct{}
|
||||
|
||||
func (err ErrInvalidBitGroups) Error() string {
|
||||
return "only bit groups between 1 and 8 allowed"
|
||||
}
|
||||
|
||||
// ErrInvalidIncompleteGroup is returned when then byte slice used as input has
|
||||
// data of wrong length.
|
||||
type ErrInvalidIncompleteGroup struct{}
|
||||
|
||||
func (err ErrInvalidIncompleteGroup) Error() string {
|
||||
return "invalid incomplete group"
|
||||
}
|
||||
|
||||
// ErrInvalidLength is returned when the bech32 string has an invalid length
|
||||
// given the BIP-173 defined restrictions.
|
||||
type ErrInvalidLength int
|
||||
|
||||
func (err ErrInvalidLength) Error() string {
|
||||
return fmt.Sprintf("invalid bech32 string length %d", int(err))
|
||||
}
|
||||
|
||||
// ErrInvalidCharacter is returned when the bech32 string has a character
|
||||
// outside the range of the supported charset.
|
||||
type ErrInvalidCharacter rune
|
||||
|
||||
func (err ErrInvalidCharacter) Error() string {
|
||||
return fmt.Sprintf("invalid character in string: '%c'", rune(err))
|
||||
}
|
||||
|
||||
// ErrInvalidSeparatorIndex is returned when the separator character '1' is
|
||||
// in an invalid position in the bech32 string.
|
||||
type ErrInvalidSeparatorIndex int
|
||||
|
||||
func (err ErrInvalidSeparatorIndex) Error() string {
|
||||
return fmt.Sprintf("invalid separator index %d", int(err))
|
||||
}
|
||||
|
||||
// ErrNonCharsetChar is returned when a character outside of the specific
|
||||
// bech32 charset is used in the string.
|
||||
type ErrNonCharsetChar rune
|
||||
|
||||
func (err ErrNonCharsetChar) Error() string {
|
||||
return fmt.Sprintf("invalid character not part of charset: %v", int(err))
|
||||
}
|
||||
|
||||
// ErrInvalidChecksum is returned when the extracted checksum of the string
|
||||
// is different than what was expected. Both the original version, as well as
|
||||
// the new bech32m checksum may be specified.
|
||||
type ErrInvalidChecksum struct {
|
||||
Expected string
|
||||
ExpectedM string
|
||||
Actual string
|
||||
}
|
||||
|
||||
func (err ErrInvalidChecksum) Error() string {
|
||||
return fmt.Sprintf(
|
||||
"invalid checksum (expected (bech32=%v, "+
|
||||
"bech32m=%v), got %v)", err.Expected, err.ExpectedM, err.Actual,
|
||||
)
|
||||
}
|
||||
|
||||
// ErrInvalidDataByte is returned when a byte outside the range required for
|
||||
// conversion into a string was found.
|
||||
type ErrInvalidDataByte byte
|
||||
|
||||
func (err ErrInvalidDataByte) Error() string {
|
||||
return fmt.Sprintf("invalid data byte: %v", byte(err))
|
||||
}
|
||||
43
pkg/crypto/ec/bech32/example_test.go
Normal file
43
pkg/crypto/ec/bech32/example_test.go
Normal file
@@ -0,0 +1,43 @@
|
||||
// Copyright (c) 2017 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package bech32
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// This example demonstrates how to decode a bech32 encoded string.
|
||||
func ExampleDecode() {
|
||||
encoded := "bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx"
|
||||
hrp, decoded, err := Decode([]byte(encoded))
|
||||
if err != nil {
|
||||
fmt.Println("Error:", err)
|
||||
}
|
||||
// Show the decoded data.
|
||||
fmt.Printf("Decoded human-readable part: %s\n", hrp)
|
||||
fmt.Println("Decoded Data:", hex.EncodeToString(decoded))
|
||||
// Output:
|
||||
// Decoded human-readable part: bc
|
||||
// Decoded Data: 010e140f070d1a001912060b0d081504140311021d030c1d03040f1814060e1e160e140f070d1a001912060b0d081504140311021d030c1d03040f1814060e1e16
|
||||
}
|
||||
|
||||
// This example demonstrates how to encode data into a bech32 string.
|
||||
func ExampleEncode() {
|
||||
data := []byte("Test data")
|
||||
// Convert test data to base32:
|
||||
conv, err := ConvertBits(data, 8, 5, true)
|
||||
if err != nil {
|
||||
fmt.Println("Error:", err)
|
||||
}
|
||||
encoded, err := Encode([]byte("customHrp!11111q"), conv)
|
||||
if err != nil {
|
||||
fmt.Println("Error:", err)
|
||||
}
|
||||
// Show the encoded data.
|
||||
fmt.Printf("Encoded Data: %s", encoded)
|
||||
// Output:
|
||||
// Encoded Data: customhrp!11111q123jhxapqv3shgcgkxpuhe
|
||||
}
|
||||
40
pkg/crypto/ec/bech32/version.go
Normal file
40
pkg/crypto/ec/bech32/version.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package bech32
|
||||
|
||||
// ChecksumConst is a type that represents the currently defined bech32
|
||||
// checksum constants.
|
||||
type ChecksumConst int
|
||||
|
||||
const (
|
||||
// Version0Const is the original constant used in the checksum
|
||||
// verification for bech32.
|
||||
Version0Const ChecksumConst = 1
|
||||
// VersionMConst is the new constant used for bech32m checksum
|
||||
// verification.
|
||||
VersionMConst ChecksumConst = 0x2bc830a3
|
||||
)
|
||||
|
||||
// Version defines the current set of bech32 versions.
|
||||
type Version uint8
|
||||
|
||||
const (
|
||||
// Version0 defines the original bech version.
|
||||
Version0 Version = iota
|
||||
// VersionM is the new bech32 version defined in BIP-350, also known as
|
||||
// bech32m.
|
||||
VersionM
|
||||
// VersionUnknown denotes an unknown bech version.
|
||||
VersionUnknown
|
||||
)
|
||||
|
||||
// VersionToConsts maps bech32 versions to the checksum constant to be used
|
||||
// when encoding, and asserting a particular version when decoding.
|
||||
var VersionToConsts = map[Version]ChecksumConst{
|
||||
Version0: Version0Const,
|
||||
VersionM: VersionMConst,
|
||||
}
|
||||
|
||||
// ConstsToVersion maps a bech32 constant to the version it's associated with.
|
||||
var ConstsToVersion = map[ChecksumConst]Version{
|
||||
Version0Const: Version0,
|
||||
VersionMConst: VersionM,
|
||||
}
|
||||
188
pkg/crypto/ec/bench_test.go
Normal file
188
pkg/crypto/ec/bench_test.go
Normal file
@@ -0,0 +1,188 @@
|
||||
// Copyright 2013-2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// setHex decodes the passed big-endian hex string into the internal field value
|
||||
// representation. Only the first 32-bytes are used.
|
||||
//
|
||||
// This is NOT constant time.
|
||||
//
|
||||
// The field value is returned to support chaining. This enables syntax like:
|
||||
// f := new(FieldVal).SetHex("0abc").Add(1) so that f = 0x0abc + 1
|
||||
func setHex(hexString string) *FieldVal {
|
||||
if len(hexString)%2 != 0 {
|
||||
hexString = "0" + hexString
|
||||
}
|
||||
bytes, _ := hex.Dec(hexString)
|
||||
var f FieldVal
|
||||
f.SetByteSlice(bytes)
|
||||
return &f
|
||||
}
|
||||
|
||||
// hexToFieldVal converts the passed hex string into a FieldVal and will panic
|
||||
// if there is an error. This is only provided for the hard-coded constants so
|
||||
// errors in the source code can be detected. It will only (and must only) be
|
||||
// called with hard-coded values.
|
||||
func hexToFieldVal(s string) *FieldVal {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
var f FieldVal
|
||||
if overflow := f.SetByteSlice(b); overflow {
|
||||
panic("hex in source file overflows mod P: " + s)
|
||||
}
|
||||
return &f
|
||||
}
|
||||
|
||||
// fromHex converts the passed hex string into a big integer pointer and will
|
||||
// panic is there is an error. This is only provided for the hard-coded
|
||||
// constants so errors in the source code can bet detected. It will only (and
|
||||
// must only) be called for initialization purposes.
|
||||
func fromHex(s string) *big.Int {
|
||||
if s == "" {
|
||||
return big.NewInt(0)
|
||||
}
|
||||
r, ok := new(big.Int).SetString(s, 16)
|
||||
if !ok {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// jacobianPointFromHex decodes the passed big-endian hex strings into a
|
||||
// Jacobian point with its internal fields set to the resulting values. Only
|
||||
// the first 32-bytes are used.
|
||||
func jacobianPointFromHex(x, y, z string) JacobianPoint {
|
||||
var p JacobianPoint
|
||||
p.X = *setHex(x)
|
||||
p.Y = *setHex(y)
|
||||
p.Z = *setHex(z)
|
||||
return p
|
||||
}
|
||||
|
||||
// BenchmarkAddNonConst benchmarks the secp256k1 curve AddNonConst function with
|
||||
// Z values of 1 so that the associated optimizations are used.
|
||||
func BenchmarkAddJacobian(b *testing.B) {
|
||||
p1 := jacobianPointFromHex(
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
)
|
||||
p2 := jacobianPointFromHex(
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var result JacobianPoint
|
||||
for i := 0; i < b.N; i++ {
|
||||
secp256k1.AddNonConst(&p1, &p2, &result)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkAddNonConstNotZOne benchmarks the secp256k1 curve AddNonConst
|
||||
// function with Z values other than one so the optimizations associated with
|
||||
// Z=1 aren't used.
|
||||
func BenchmarkAddJacobianNotZOne(b *testing.B) {
|
||||
x1 := setHex("d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718")
|
||||
y1 := setHex("5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190")
|
||||
z1 := setHex("2")
|
||||
x2 := setHex("91abba6a34b7481d922a4bd6a04899d5a686f6cf6da4e66a0cb427fb25c04bd4")
|
||||
y2 := setHex("03fede65e30b4e7576a2abefc963ddbf9fdccbf791b77c29beadefe49951f7d1")
|
||||
z2 := setHex("3")
|
||||
p1 := MakeJacobianPoint(x1, y1, z1)
|
||||
p2 := MakeJacobianPoint(x2, y2, z2)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var result JacobianPoint
|
||||
for i := 0; i < b.N; i++ {
|
||||
AddNonConst(&p1, &p2, &result)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkScalarBaseMult benchmarks the secp256k1 curve ScalarBaseMult
|
||||
// function.
|
||||
func BenchmarkScalarBaseMult(b *testing.B) {
|
||||
k := fromHex("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575")
|
||||
curve := S256()
|
||||
for i := 0; i < b.N; i++ {
|
||||
curve.ScalarBaseMult(k.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkScalarBaseMultLarge benchmarks the secp256k1 curve ScalarBaseMult
|
||||
// function with abnormally large k values.
|
||||
func BenchmarkScalarBaseMultLarge(b *testing.B) {
|
||||
k := fromHex("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c005751111111011111110")
|
||||
curve := S256()
|
||||
for i := 0; i < b.N; i++ {
|
||||
curve.ScalarBaseMult(k.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkScalarMult benchmarks the secp256k1 curve ScalarMult function.
|
||||
func BenchmarkScalarMult(b *testing.B) {
|
||||
x := fromHex("34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6")
|
||||
y := fromHex("0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232")
|
||||
k := fromHex("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575")
|
||||
curve := S256()
|
||||
for i := 0; i < b.N; i++ {
|
||||
curve.ScalarMult(x, y, k.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
// hexToModNScalar converts the passed hex string into a ModNScalar and will
|
||||
// panic if there is an error. This is only provided for the hard-coded
|
||||
// constants so errors in the source code can be detected. It will only (and
|
||||
// must only) be called with hard-coded values.
|
||||
func hexToModNScalar(s string) *ModNScalar {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
var scalar ModNScalar
|
||||
if overflow := scalar.SetByteSlice(b); overflow {
|
||||
panic("hex in source file overflows mod N scalar: " + s)
|
||||
}
|
||||
return &scalar
|
||||
}
|
||||
|
||||
// BenchmarkFieldNormalize benchmarks how long it takes the internal field
|
||||
// to perform normalization (which includes modular reduction).
|
||||
func BenchmarkFieldNormalize(b *testing.B) {
|
||||
// The normalize function is constant time so default value is fine.
|
||||
var f FieldVal
|
||||
for i := 0; i < b.N; i++ {
|
||||
f.Normalize()
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkParseCompressedPubKey benchmarks how long it takes to decompress and
|
||||
// validate a compressed public key from a byte array.
|
||||
func BenchmarkParseCompressedPubKey(b *testing.B) {
|
||||
rawPk, _ := hex.Dec("0234f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6")
|
||||
|
||||
var (
|
||||
pk *PublicKey
|
||||
err error
|
||||
)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
pk, err = ParsePubKey(rawPk)
|
||||
}
|
||||
_ = pk
|
||||
_ = err
|
||||
}
|
||||
53
pkg/crypto/ec/btcec.go
Normal file
53
pkg/crypto/ec/btcec.go
Normal file
@@ -0,0 +1,53 @@
|
||||
// Copyright 2010 The Go Authors. All rights reserved.
|
||||
// Copyright 2011 ThePiachu. All rights reserved.
|
||||
// Copyright 2013-2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
// References:
|
||||
// [SECG]: Recommended Elliptic Curve Domain Parameters
|
||||
// http://www.secg.org/sec2-v2.pdf
|
||||
//
|
||||
// [GECC]: Guide to Elliptic Curve Cryptography (Hankerson, Menezes, Vanstone)
|
||||
|
||||
// This package operates, internally, on Jacobian coordinates. For a given
|
||||
// (x, y) position on the curve, the Jacobian coordinates are (x1, y1, z1)
|
||||
// where x = x1/z1² and y = y1/z1³. The greatest speedups come when the whole
|
||||
// calculation can be performed within the transform (as in ScalarMult and
|
||||
// ScalarBaseMult). But even for Add and Double, it's faster to apply and
|
||||
// reverse the transform than to operate in affine coordinates.
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// KoblitzCurve provides an implementation for secp256k1 that fits the ECC
|
||||
// Curve interface from crypto/elliptic.
|
||||
type KoblitzCurve = secp256k1.KoblitzCurve
|
||||
|
||||
// S256 returns a Curve which implements secp256k1.
|
||||
func S256() *KoblitzCurve {
|
||||
return secp256k1.S256()
|
||||
}
|
||||
|
||||
// CurveParams contains the parameters for the secp256k1 curve.
|
||||
type CurveParams = secp256k1.CurveParams
|
||||
|
||||
// Params returns the secp256k1 curve parameters for convenience.
|
||||
func Params() *CurveParams {
|
||||
return secp256k1.Params()
|
||||
}
|
||||
|
||||
// Generator returns the public key at the Generator Point.
|
||||
func Generator() *PublicKey {
|
||||
var (
|
||||
result JacobianPoint
|
||||
k secp256k1.ModNScalar
|
||||
)
|
||||
k.SetInt(1)
|
||||
ScalarBaseMultNonConst(&k, &result)
|
||||
result.ToAffine()
|
||||
return NewPublicKey(&result.X, &result.Y)
|
||||
}
|
||||
918
pkg/crypto/ec/btcec_test.go
Normal file
918
pkg/crypto/ec/btcec_test.go
Normal file
@@ -0,0 +1,918 @@
|
||||
// Copyright 2011 The Go Authors. All rights reserved.
|
||||
// Copyright 2011 ThePiachu. All rights reserved.
|
||||
// Copyright 2013-2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// isJacobianOnS256Curve returns boolean if the point (x,y,z) is on the
|
||||
// secp256k1 curve.
|
||||
func isJacobianOnS256Curve(point *JacobianPoint) bool {
|
||||
// Elliptic curve equation for secp256k1 is: y^2 = x^3 + 7
|
||||
// In Jacobian coordinates, Y = y/z^3 and X = x/z^2
|
||||
// Thus:
|
||||
// (y/z^3)^2 = (x/z^2)^3 + 7
|
||||
// y^2/z^6 = x^3/z^6 + 7
|
||||
// y^2 = x^3 + 7*z^6
|
||||
var y2, z2, x3, result FieldVal
|
||||
y2.SquareVal(&point.Y).Normalize()
|
||||
z2.SquareVal(&point.Z)
|
||||
x3.SquareVal(&point.X).Mul(&point.X)
|
||||
result.SquareVal(&z2).Mul(&z2).MulInt(7).Add(&x3).Normalize()
|
||||
return y2.Equals(&result)
|
||||
}
|
||||
|
||||
// TestAddJacobian tests addition of points projected in Jacobian coordinates.
|
||||
func TestAddJacobian(t *testing.T) {
|
||||
tests := []struct {
|
||||
x1, y1, z1 string // Coordinates (in hex) of first point to add
|
||||
x2, y2, z2 string // Coordinates (in hex) of second point to add
|
||||
x3, y3, z3 string // Coordinates (in hex) of expected point
|
||||
}{
|
||||
// Addition with a point at infinity (left hand side).
|
||||
// ∞ + P = P
|
||||
{
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"1",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"1",
|
||||
},
|
||||
// Addition with a point at infinity (right hand side).
|
||||
// P + ∞ = P
|
||||
{
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"1",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"1",
|
||||
},
|
||||
// Addition with z1=z2=1 different x values.
|
||||
{
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"1",
|
||||
"0cfbc7da1e569b334460788faae0286e68b3af7379d5504efc25e4dba16e46a6",
|
||||
"e205f79361bbe0346b037b4010985dbf4f9e1e955e7d0d14aca876bfa79aad87",
|
||||
"44a5646b446e3877a648d6d381370d9ef55a83b666ebce9df1b1d7d65b817b2f",
|
||||
},
|
||||
// Addition with z1=z2=1 same x opposite y.
|
||||
// P(x, y, z) + P(x, -y, z) = infinity
|
||||
{
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"f48e156428cf0276dc092da5856e182288d7569f97934a56fe44be60f0d359fd",
|
||||
"1",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
},
|
||||
// Addition with z1=z2=1 same point.
|
||||
// P(x, y, z) + P(x, y, z) = 2P
|
||||
{
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
"ec9f153b13ee7bd915882859635ea9730bf0dc7611b2c7b0e37ee64f87c50c27",
|
||||
"b082b53702c466dcf6e984a35671756c506c67c2fcb8adb408c44dd0755c8f2a",
|
||||
"16e3d537ae61fb1247eda4b4f523cfbaee5152c0d0d96b520376833c1e594464",
|
||||
},
|
||||
|
||||
// Addition with z1=z2 (!=1) different x values.
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"5d2fe112c21891d440f65a98473cb626111f8a234d2cd82f22172e369f002147",
|
||||
"98e3386a0a622a35c4561ffb32308d8e1c6758e10ebb1b4ebd3d04b4eb0ecbe8",
|
||||
"2",
|
||||
"cfbc7da1e569b334460788faae0286e68b3af7379d5504efc25e4dba16e46a60",
|
||||
"817de4d86ef80d1ac0ded00426176fd3e787a5579f43452b2a1db021e6ac3778",
|
||||
"129591ad11b8e1de99235b4e04dc367bd56a0ed99baf3a77c6c75f5a6e05f08d",
|
||||
},
|
||||
// Addition with z1=z2 (!=1) same x opposite y.
|
||||
// P(x, y, z) + P(x, -y, z) = infinity
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"a470ab21467813b6e0496d2c2b70c11446bab4fcbc9a52b7f225f30e869aea9f",
|
||||
"2",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
},
|
||||
// Addition with z1=z2 (!=1) same point.
|
||||
// P(x, y, z) + P(x, y, z) = 2P
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"9f153b13ee7bd915882859635ea9730bf0dc7611b2c7b0e37ee65073c50fabac",
|
||||
"2b53702c466dcf6e984a35671756c506c67c2fcb8adb408c44dd125dc91cb988",
|
||||
"6e3d537ae61fb1247eda4b4f523cfbaee5152c0d0d96b520376833c2e5944a11",
|
||||
},
|
||||
|
||||
// Addition with z1!=z2 and z2=1 different x values.
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"1",
|
||||
"3ef1f68795a6ccd1181e23eab80a1b9a2cebdcde755413bf097936eb5b91b4f3",
|
||||
"0bef26c377c068d606f6802130bb7e9f3c3d2abcfa1a295950ed81133561cb04",
|
||||
"252b235a2371c3bd3246b69c09b86cf7aad41db3375e74ef8d8ebeb4dc0be11a",
|
||||
},
|
||||
// Addition with z1!=z2 and z2=1 same x opposite y.
|
||||
// P(x, y, z) + P(x, -y, z) = infinity
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"f48e156428cf0276dc092da5856e182288d7569f97934a56fe44be60f0d359fd",
|
||||
"1",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
},
|
||||
// Addition with z1!=z2 and z2=1 same point.
|
||||
// P(x, y, z) + P(x, y, z) = 2P
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
"9f153b13ee7bd915882859635ea9730bf0dc7611b2c7b0e37ee65073c50fabac",
|
||||
"2b53702c466dcf6e984a35671756c506c67c2fcb8adb408c44dd125dc91cb988",
|
||||
"6e3d537ae61fb1247eda4b4f523cfbaee5152c0d0d96b520376833c2e5944a11",
|
||||
},
|
||||
|
||||
// Addition with z1!=z2 and z2!=1 different x values.
|
||||
// P(x, y, z) + P(x, y, z) = 2P
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"91abba6a34b7481d922a4bd6a04899d5a686f6cf6da4e66a0cb427fb25c04bd4",
|
||||
"03fede65e30b4e7576a2abefc963ddbf9fdccbf791b77c29beadefe49951f7d1",
|
||||
"3",
|
||||
"3f07081927fd3f6dadd4476614c89a09eba7f57c1c6c3b01fa2d64eac1eef31e",
|
||||
"949166e04ebc7fd95a9d77e5dfd88d1492ecffd189792e3944eb2b765e09e031",
|
||||
"eb8cba81bcffa4f44d75427506737e1f045f21e6d6f65543ee0e1d163540c931",
|
||||
}, // Addition with z1!=z2 and z2!=1 same x opposite y.
|
||||
// P(x, y, z) + P(x, -y, z) = infinity
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"dcc3768780c74a0325e2851edad0dc8a566fa61a9e7fc4a34d13dcb509f99bc7",
|
||||
"cafc41904dd5428934f7d075129c8ba46eb622d4fc88d72cd1401452664add18",
|
||||
"3",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
},
|
||||
// Addition with z1!=z2 and z2!=1 same point.
|
||||
// P(x, y, z) + P(x, y, z) = 2P
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"dcc3768780c74a0325e2851edad0dc8a566fa61a9e7fc4a34d13dcb509f99bc7",
|
||||
"3503be6fb22abd76cb082f8aed63745b9149dd2b037728d32ebfebac99b51f17",
|
||||
"3",
|
||||
"9f153b13ee7bd915882859635ea9730bf0dc7611b2c7b0e37ee65073c50fabac",
|
||||
"2b53702c466dcf6e984a35671756c506c67c2fcb8adb408c44dd125dc91cb988",
|
||||
"6e3d537ae61fb1247eda4b4f523cfbaee5152c0d0d96b520376833c2e5944a11",
|
||||
},
|
||||
}
|
||||
|
||||
t.Logf("Running %d tests", len(tests))
|
||||
for i, test := range tests {
|
||||
// Convert hex to Jacobian points.
|
||||
p1 := jacobianPointFromHex(test.x1, test.y1, test.z1)
|
||||
p2 := jacobianPointFromHex(test.x2, test.y2, test.z2)
|
||||
want := jacobianPointFromHex(test.x3, test.y3, test.z3)
|
||||
// Ensure the test data is using points that are actually on
|
||||
// the curve (or the point at infinity).
|
||||
if !p1.Z.IsZero() && !isJacobianOnS256Curve(&p1) {
|
||||
t.Errorf(
|
||||
"#%d first point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if !p2.Z.IsZero() && !isJacobianOnS256Curve(&p2) {
|
||||
t.Errorf(
|
||||
"#%d second point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if !want.Z.IsZero() && !isJacobianOnS256Curve(&want) {
|
||||
t.Errorf(
|
||||
"#%d expected point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Add the two points.
|
||||
var r JacobianPoint
|
||||
AddNonConst(&p1, &p2, &r)
|
||||
|
||||
// Ensure result matches expected.
|
||||
if !r.X.Equals(&want.X) || !r.Y.Equals(&want.Y) || !r.Z.Equals(&want.Z) {
|
||||
t.Errorf(
|
||||
"#%d wrong result\ngot: (%v, %v, %v)\n"+
|
||||
"want: (%v, %v, %v)", i, r.X, r.Y, r.Z, want.X, want.Y,
|
||||
want.Z,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestAddAffine tests addition of points in affine coordinates.
|
||||
func TestAddAffine(t *testing.T) {
|
||||
tests := []struct {
|
||||
x1, y1 string // Coordinates (in hex) of first point to add
|
||||
x2, y2 string // Coordinates (in hex) of second point to add
|
||||
x3, y3 string // Coordinates (in hex) of expected point
|
||||
}{
|
||||
// Addition with a point at infinity (left hand side).
|
||||
// ∞ + P = P
|
||||
{
|
||||
"0",
|
||||
"0",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
},
|
||||
// Addition with a point at infinity (right hand side).
|
||||
// P + ∞ = P
|
||||
{
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"0",
|
||||
"0",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
},
|
||||
|
||||
// Addition with different x values.
|
||||
{
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
"131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
"fd5b88c21d3143518d522cd2796f3d726793c88b3e05636bc829448e053fed69",
|
||||
"21cf4f6a5be5ff6380234c50424a970b1f7e718f5eb58f68198c108d642a137f",
|
||||
},
|
||||
// Addition with same x opposite y.
|
||||
// P(x, y) + P(x, -y) = infinity
|
||||
{
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"f48e156428cf0276dc092da5856e182288d7569f97934a56fe44be60f0d359fd",
|
||||
"0",
|
||||
"0",
|
||||
},
|
||||
// Addition with same point.
|
||||
// P(x, y) + P(x, y) = 2P
|
||||
{
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"59477d88ae64a104dbb8d31ec4ce2d91b2fe50fa628fb6a064e22582196b365b",
|
||||
"938dc8c0f13d1e75c987cb1a220501bd614b0d3dd9eb5c639847e1240216e3b6",
|
||||
},
|
||||
}
|
||||
t.Logf("Running %d tests", len(tests))
|
||||
for i, test := range tests {
|
||||
// Convert hex to field values.
|
||||
x1, y1 := fromHex(test.x1), fromHex(test.y1)
|
||||
x2, y2 := fromHex(test.x2), fromHex(test.y2)
|
||||
x3, y3 := fromHex(test.x3), fromHex(test.y3)
|
||||
// Ensure the test data is using points that are actually on
|
||||
// the curve (or the point at infinity).
|
||||
if !(x1.Sign() == 0 && y1.Sign() == 0) && !S256().IsOnCurve(x1, y1) {
|
||||
t.Errorf(
|
||||
"#%d first point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if !(x2.Sign() == 0 && y2.Sign() == 0) && !S256().IsOnCurve(x2, y2) {
|
||||
t.Errorf(
|
||||
"#%d second point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if !(x3.Sign() == 0 && y3.Sign() == 0) && !S256().IsOnCurve(x3, y3) {
|
||||
t.Errorf(
|
||||
"#%d expected point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Add the two points.
|
||||
rx, ry := S256().Add(x1, y1, x2, y2)
|
||||
|
||||
// Ensure result matches expected.
|
||||
if rx.Cmp(x3) != 00 || ry.Cmp(y3) != 0 {
|
||||
t.Errorf(
|
||||
"#%d wrong result\ngot: (%x, %x)\n"+
|
||||
"want: (%x, %x)", i, rx, ry, x3, y3,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// isStrictlyEqual returns whether or not the two Jacobian points are strictly
|
||||
// equal for use in the tests. Recall that several Jacobian points can be
|
||||
// equal in affine coordinates, while not having the same coordinates in
|
||||
// projective space, so the two points not being equal doesn't necessarily mean
|
||||
// they aren't actually the same affine point.
|
||||
func isStrictlyEqual(p, other *JacobianPoint) bool {
|
||||
return p.X.Equals(&other.X) && p.Y.Equals(&other.Y) && p.Z.Equals(&other.Z)
|
||||
}
|
||||
|
||||
// TestDoubleJacobian tests doubling of points projected in Jacobian
|
||||
// coordinates.
|
||||
func TestDoubleJacobian(t *testing.T) {
|
||||
tests := []struct {
|
||||
x1, y1, z1 string // Coordinates (in hex) of point to double
|
||||
x3, y3, z3 string // Coordinates (in hex) of expected point
|
||||
}{
|
||||
// Doubling a point at infinity is still infinity.
|
||||
{
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
},
|
||||
// Doubling with z1=1.
|
||||
{
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
"ec9f153b13ee7bd915882859635ea9730bf0dc7611b2c7b0e37ee64f87c50c27",
|
||||
"b082b53702c466dcf6e984a35671756c506c67c2fcb8adb408c44dd0755c8f2a",
|
||||
"16e3d537ae61fb1247eda4b4f523cfbaee5152c0d0d96b520376833c1e594464",
|
||||
},
|
||||
// Doubling with z1!=1.
|
||||
{
|
||||
"d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718",
|
||||
"5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190",
|
||||
"2",
|
||||
"9f153b13ee7bd915882859635ea9730bf0dc7611b2c7b0e37ee65073c50fabac",
|
||||
"2b53702c466dcf6e984a35671756c506c67c2fcb8adb408c44dd125dc91cb988",
|
||||
"6e3d537ae61fb1247eda4b4f523cfbaee5152c0d0d96b520376833c2e5944a11",
|
||||
},
|
||||
// From btcd issue #709.
|
||||
{
|
||||
"201e3f75715136d2f93c4f4598f91826f94ca01f4233a5bd35de9708859ca50d",
|
||||
"bdf18566445e7562c6ada68aef02d498d7301503de5b18c6aef6e2b1722412e1",
|
||||
"0000000000000000000000000000000000000000000000000000000000000001",
|
||||
"4a5e0559863ebb4e9ed85f5c4fa76003d05d9a7626616e614a1f738621e3c220",
|
||||
"00000000000000000000000000000000000000000000000000000001b1388778",
|
||||
"7be30acc88bceac58d5b4d15de05a931ae602a07bcb6318d5dedc563e4482993",
|
||||
},
|
||||
}
|
||||
t.Logf("Running %d tests", len(tests))
|
||||
for i, test := range tests {
|
||||
// Convert hex to field values.
|
||||
p1 := jacobianPointFromHex(test.x1, test.y1, test.z1)
|
||||
want := jacobianPointFromHex(test.x3, test.y3, test.z3)
|
||||
// Ensure the test data is using points that are actually on
|
||||
// the curve (or the point at infinity).
|
||||
if !p1.Z.IsZero() && !isJacobianOnS256Curve(&p1) {
|
||||
t.Errorf(
|
||||
"#%d first point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if !want.Z.IsZero() && !isJacobianOnS256Curve(&want) {
|
||||
t.Errorf(
|
||||
"#%d expected point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Double the point.
|
||||
var result JacobianPoint
|
||||
DoubleNonConst(&p1, &result)
|
||||
// Ensure result matches expected.
|
||||
if !isStrictlyEqual(&result, &want) {
|
||||
t.Errorf(
|
||||
"#%d wrong result\ngot: (%v, %v, %v)\n"+
|
||||
"want: (%v, %v, %v)", i, result.X, result.Y, result.Z,
|
||||
want.X, want.Y, want.Z,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestDoubleAffine tests doubling of points in affine coordinates.
|
||||
func TestDoubleAffine(t *testing.T) {
|
||||
tests := []struct {
|
||||
x1, y1 string // Coordinates (in hex) of point to double
|
||||
x3, y3 string // Coordinates (in hex) of expected point
|
||||
}{
|
||||
// Doubling a point at infinity is still infinity.
|
||||
// 2*∞ = ∞ (point at infinity)
|
||||
{
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
"0",
|
||||
},
|
||||
// Random points.
|
||||
{
|
||||
"e41387ffd8baaeeb43c2faa44e141b19790e8ac1f7ff43d480dc132230536f86",
|
||||
"1b88191d430f559896149c86cbcb703193105e3cf3213c0c3556399836a2b899",
|
||||
"88da47a089d333371bd798c548ef7caae76e737c1980b452d367b3cfe3082c19",
|
||||
"3b6f659b09a362821dfcfefdbfbc2e59b935ba081b6c249eb147b3c2100b1bc1",
|
||||
},
|
||||
{
|
||||
"b3589b5d984f03ef7c80aeae444f919374799edf18d375cab10489a3009cff0c",
|
||||
"c26cf343875b3630e15bccc61202815b5d8f1fd11308934a584a5babe69db36a",
|
||||
"e193860172998751e527bb12563855602a227fc1f612523394da53b746bb2fb1",
|
||||
"2bfcf13d2f5ab8bb5c611fab5ebbed3dc2f057062b39a335224c22f090c04789",
|
||||
},
|
||||
{
|
||||
"2b31a40fbebe3440d43ac28dba23eee71c62762c3fe3dbd88b4ab82dc6a82340",
|
||||
"9ba7deb02f5c010e217607fd49d58db78ec273371ea828b49891ce2fd74959a1",
|
||||
"2c8d5ef0d343b1a1a48aa336078eadda8481cb048d9305dc4fdf7ee5f65973a2",
|
||||
"bb4914ac729e26d3cd8f8dc8f702f3f4bb7e0e9c5ae43335f6e94c2de6c3dc95",
|
||||
},
|
||||
{
|
||||
"61c64b760b51981fab54716d5078ab7dffc93730b1d1823477e27c51f6904c7a",
|
||||
"ef6eb16ea1a36af69d7f66524c75a3a5e84c13be8fbc2e811e0563c5405e49bd",
|
||||
"5f0dcdd2595f5ad83318a0f9da481039e36f135005420393e72dfca985b482f4",
|
||||
"a01c849b0837065c1cb481b0932c441f49d1cab1b4b9f355c35173d93f110ae0",
|
||||
},
|
||||
}
|
||||
|
||||
t.Logf("Running %d tests", len(tests))
|
||||
for i, test := range tests {
|
||||
// Convert hex to field values.
|
||||
x1, y1 := fromHex(test.x1), fromHex(test.y1)
|
||||
x3, y3 := fromHex(test.x3), fromHex(test.y3)
|
||||
// Ensure the test data is using points that are actually on
|
||||
// the curve (or the point at infinity).
|
||||
if !(x1.Sign() == 0 && y1.Sign() == 0) && !S256().IsOnCurve(x1, y1) {
|
||||
t.Errorf(
|
||||
"#%d first point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if !(x3.Sign() == 0 && y3.Sign() == 0) && !S256().IsOnCurve(x3, y3) {
|
||||
t.Errorf(
|
||||
"#%d expected point is not on the curve -- "+
|
||||
"invalid test data", i,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Double the point.
|
||||
rx, ry := S256().Double(x1, y1)
|
||||
|
||||
// Ensure result matches expected.
|
||||
if rx.Cmp(x3) != 00 || ry.Cmp(y3) != 0 {
|
||||
t.Errorf(
|
||||
"#%d wrong result\ngot: (%x, %x)\n"+
|
||||
"want: (%x, %x)", i, rx, ry, x3, y3,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestOnCurve(t *testing.T) {
|
||||
s256 := S256()
|
||||
if !s256.IsOnCurve(s256.Params().Gx, s256.Params().Gy) {
|
||||
t.Errorf("FAIL S256")
|
||||
}
|
||||
}
|
||||
|
||||
type baseMultTest struct {
|
||||
k string
|
||||
x, y string
|
||||
}
|
||||
|
||||
// TODO: add more test vectors
|
||||
var s256BaseMultTests = []baseMultTest{
|
||||
{
|
||||
"AA5E28D6A97A2479A65527F7290311A3624D4CC0FA1578598EE3C2613BF99522",
|
||||
"34F9460F0E4F08393D192B3C5133A6BA099AA0AD9FD54EBCCFACDFA239FF49C6",
|
||||
"B71EA9BD730FD8923F6D25A7A91E7DD7728A960686CB5A901BB419E0F2CA232",
|
||||
},
|
||||
{
|
||||
"7E2B897B8CEBC6361663AD410835639826D590F393D90A9538881735256DFAE3",
|
||||
"D74BF844B0862475103D96A611CF2D898447E288D34B360BC885CB8CE7C00575",
|
||||
"131C670D414C4546B88AC3FF664611B1C38CEB1C21D76369D7A7A0969D61D97D",
|
||||
},
|
||||
{
|
||||
"6461E6DF0FE7DFD05329F41BF771B86578143D4DD1F7866FB4CA7E97C5FA945D",
|
||||
"E8AECC370AEDD953483719A116711963CE201AC3EB21D3F3257BB48668C6A72F",
|
||||
"C25CAF2F0EBA1DDB2F0F3F47866299EF907867B7D27E95B3873BF98397B24EE1",
|
||||
},
|
||||
{
|
||||
"376A3A2CDCD12581EFFF13EE4AD44C4044B8A0524C42422A7E1E181E4DEECCEC",
|
||||
"14890E61FCD4B0BD92E5B36C81372CA6FED471EF3AA60A3E415EE4FE987DABA1",
|
||||
"297B858D9F752AB42D3BCA67EE0EB6DCD1C2B7B0DBE23397E66ADC272263F982",
|
||||
},
|
||||
{
|
||||
"1B22644A7BE026548810C378D0B2994EEFA6D2B9881803CB02CEFF865287D1B9",
|
||||
"F73C65EAD01C5126F28F442D087689BFA08E12763E0CEC1D35B01751FD735ED3",
|
||||
"F449A8376906482A84ED01479BD18882B919C140D638307F0C0934BA12590BDE",
|
||||
},
|
||||
}
|
||||
|
||||
// TODO: test different curves as well?
|
||||
func TestBaseMult(t *testing.T) {
|
||||
s256 := S256()
|
||||
for i, e := range s256BaseMultTests {
|
||||
k, ok := new(big.Int).SetString(e.k, 16)
|
||||
if !ok {
|
||||
t.Errorf("%d: bad value for k: %s", i, e.k)
|
||||
}
|
||||
x, y := s256.ScalarBaseMult(k.Bytes())
|
||||
if fmt.Sprintf("%X", x) != e.x || fmt.Sprintf("%X", y) != e.y {
|
||||
t.Errorf(
|
||||
"%d: bad output for k=%s: got (%X, %X), want (%s, %s)", i,
|
||||
e.k, x, y, e.x, e.y,
|
||||
)
|
||||
}
|
||||
if testing.Short() && i > 5 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBaseMultVerify(t *testing.T) {
|
||||
s256 := S256()
|
||||
for bytes := 1; bytes < 40; bytes++ {
|
||||
for i := 0; i < 30; i++ {
|
||||
data := make([]byte, bytes)
|
||||
_, err := rand.Read(data)
|
||||
if err != nil {
|
||||
t.Errorf("failed to read random data for %d", i)
|
||||
continue
|
||||
}
|
||||
x, y := s256.ScalarBaseMult(data)
|
||||
xWant, yWant := s256.ScalarMult(s256.Gx, s256.Gy, data)
|
||||
if x.Cmp(xWant) != 0 || y.Cmp(yWant) != 0 {
|
||||
t.Errorf(
|
||||
"%d: bad output for %X: got (%X, %X), want (%X, %X)",
|
||||
i, data, x, y, xWant, yWant,
|
||||
)
|
||||
}
|
||||
if testing.Short() && i > 2 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestScalarMult(t *testing.T) {
|
||||
tests := []struct {
|
||||
x string
|
||||
y string
|
||||
k string
|
||||
rx string
|
||||
ry string
|
||||
}{
|
||||
// base mult, essentially.
|
||||
{
|
||||
"79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798",
|
||||
"483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8",
|
||||
"18e14a7b6a307f426a94f8114701e7c8e774e7f9a47e2c2035db29a206321725",
|
||||
"50863ad64a87ae8a2fe83c1af1a8403cb53f53e486d8511dad8a04887e5b2352",
|
||||
"2cd470243453a299fa9e77237716103abc11a1df38855ed6f2ee187e9c582ba6",
|
||||
},
|
||||
// From btcd issue #709.
|
||||
{
|
||||
"000000000000000000000000000000000000000000000000000000000000002c",
|
||||
"420e7a99bba18a9d3952597510fd2b6728cfeafc21a4e73951091d4d8ddbe94e",
|
||||
"a2e8ba2e8ba2e8ba2e8ba2e8ba2e8ba219b51835b55cc30ebfe2f6599bc56f58",
|
||||
"a2112dcdfbcd10ae1133a358de7b82db68e0a3eb4b492cc8268d1e7118c98788",
|
||||
"27fc7463b7bb3c5f98ecf2c84a6272bb1681ed553d92c69f2dfe25a9f9fd3836",
|
||||
},
|
||||
}
|
||||
s256 := S256()
|
||||
for i, test := range tests {
|
||||
x, _ := new(big.Int).SetString(test.x, 16)
|
||||
y, _ := new(big.Int).SetString(test.y, 16)
|
||||
k, _ := new(big.Int).SetString(test.k, 16)
|
||||
xWant, _ := new(big.Int).SetString(test.rx, 16)
|
||||
yWant, _ := new(big.Int).SetString(test.ry, 16)
|
||||
xGot, yGot := s256.ScalarMult(x, y, k.Bytes())
|
||||
if xGot.Cmp(xWant) != 0 || yGot.Cmp(yWant) != 0 {
|
||||
t.Fatalf(
|
||||
"%d: bad output: got (%X, %X), want (%X, %X)", i, xGot,
|
||||
yGot, xWant, yWant,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestScalarMultRand(t *testing.T) {
|
||||
// Strategy for this test:
|
||||
// Get a random exponent from the generator point at first
|
||||
// This creates a new point which is used in the next iteration
|
||||
// Use another random exponent on the new point.
|
||||
// We use BaseMult to verify by multiplying the previous exponent
|
||||
// and the new random exponent together (mod no)
|
||||
s256 := S256()
|
||||
x, y := s256.Gx, s256.Gy
|
||||
exponent := big.NewInt(1)
|
||||
for i := 0; i < 1024; i++ {
|
||||
data := make([]byte, 32)
|
||||
_, err := rand.Read(data)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read random data at %d", i)
|
||||
break
|
||||
}
|
||||
x, y = s256.ScalarMult(x, y, data)
|
||||
exponent.Mul(exponent, new(big.Int).SetBytes(data))
|
||||
xWant, yWant := s256.ScalarBaseMult(exponent.Bytes())
|
||||
if x.Cmp(xWant) != 0 || y.Cmp(yWant) != 0 {
|
||||
t.Fatalf(
|
||||
"%d: bad output for %X: got (%X, %X), want (%X, %X)", i,
|
||||
data, x, y, xWant, yWant,
|
||||
)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
// Next 6 constants are from Hal Finney's bitcointalk.org post:
|
||||
// https://bitcointalk.org/index.php?topic=3238.msg45565#msg45565
|
||||
// May he rest in peace.
|
||||
//
|
||||
// They have also been independently derived from the code in the
|
||||
// EndomorphismVectors function in genstatics.go.
|
||||
endomorphismLambda = fromHex("5363ad4cc05c30e0a5261c028812645a122e22ea20816678df02967c1b23bd72")
|
||||
endomorphismBeta = hexToFieldVal("7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee")
|
||||
endomorphismA1 = fromHex("3086d221a7d46bcde86c90e49284eb15")
|
||||
endomorphismB1 = fromHex("-e4437ed6010e88286f547fa90abfe4c3")
|
||||
endomorphismA2 = fromHex("114ca50f7a8e2f3f657c1108d9d44cfd8")
|
||||
endomorphismB2 = fromHex("3086d221a7d46bcde86c90e49284eb15")
|
||||
)
|
||||
|
||||
// splitK returns a balanced length-two representation of k and their signs.
|
||||
// This is algorithm 3.74 from [GECC].
|
||||
//
|
||||
// One thing of note about this algorithm is that no matter what c1 and c2 are,
|
||||
// the final equation of k = k1 + k2 * lambda (mod n) will hold. This is
|
||||
// provable mathematically due to how a1/b1/a2/b2 are computed.
|
||||
//
|
||||
// c1 and c2 are chosen to minimize the max(k1,k2).
|
||||
func splitK(k []byte) ([]byte, []byte, int, int) {
|
||||
// All math here is done with big.Int, which is slow.
|
||||
// At some point, it might be useful to write something similar to
|
||||
// FieldVal but for no instead of P as the prime field if this ends up
|
||||
// being a bottleneck.
|
||||
bigIntK := new(big.Int)
|
||||
c1, c2 := new(big.Int), new(big.Int)
|
||||
tmp1, tmp2 := new(big.Int), new(big.Int)
|
||||
k1, k2 := new(big.Int), new(big.Int)
|
||||
bigIntK.SetBytes(k)
|
||||
// c1 = round(b2 * k / n) from step 4.
|
||||
// Rounding isn't really necessary and costs too much, hence skipped
|
||||
c1.Mul(endomorphismB2, bigIntK)
|
||||
c1.Div(c1, Params().N)
|
||||
// c2 = round(b1 * k / n) from step 4 (sign reversed to optimize one step)
|
||||
// Rounding isn't really necessary and costs too much, hence skipped
|
||||
c2.Mul(endomorphismB1, bigIntK)
|
||||
c2.Div(c2, Params().N)
|
||||
// k1 = k - c1 * a1 - c2 * a2 from step 5 (note c2's sign is reversed)
|
||||
tmp1.Mul(c1, endomorphismA1)
|
||||
tmp2.Mul(c2, endomorphismA2)
|
||||
k1.Sub(bigIntK, tmp1)
|
||||
k1.Add(k1, tmp2)
|
||||
// k2 = - c1 * b1 - c2 * b2 from step 5 (note c2's sign is reversed)
|
||||
tmp1.Mul(c1, endomorphismB1)
|
||||
tmp2.Mul(c2, endomorphismB2)
|
||||
k2.Sub(tmp2, tmp1)
|
||||
// Note Bytes() throws out the sign of k1 and k2. This matters
|
||||
// since k1 and/or k2 can be negative. Hence, we pass that
|
||||
// back separately.
|
||||
return k1.Bytes(), k2.Bytes(), k1.Sign(), k2.Sign()
|
||||
}
|
||||
|
||||
func TestSplitK(t *testing.T) {
|
||||
tests := []struct {
|
||||
k string
|
||||
k1, k2 string
|
||||
s1, s2 int
|
||||
}{
|
||||
{
|
||||
"6df2b5d30854069ccdec40ae022f5c948936324a4e9ebed8eb82cfd5a6b6d766",
|
||||
"00000000000000000000000000000000b776e53fb55f6b006a270d42d64ec2b1",
|
||||
"00000000000000000000000000000000d6cc32c857f1174b604eefc544f0c7f7",
|
||||
-1, -1,
|
||||
},
|
||||
{
|
||||
"6ca00a8f10632170accc1b3baf2a118fa5725f41473f8959f34b8f860c47d88d",
|
||||
"0000000000000000000000000000000007b21976c1795723c1bfbfa511e95b84",
|
||||
"00000000000000000000000000000000d8d2d5f9d20fc64fd2cf9bda09a5bf90",
|
||||
1, -1,
|
||||
},
|
||||
{
|
||||
"b2eda8ab31b259032d39cbc2a234af17fcee89c863a8917b2740b67568166289",
|
||||
"00000000000000000000000000000000507d930fecda7414fc4a523b95ef3c8c",
|
||||
"00000000000000000000000000000000f65ffb179df189675338c6185cb839be",
|
||||
-1, -1,
|
||||
},
|
||||
{
|
||||
"f6f00e44f179936f2befc7442721b0633f6bafdf7161c167ffc6f7751980e3a0",
|
||||
"0000000000000000000000000000000008d0264f10bcdcd97da3faa38f85308d",
|
||||
"0000000000000000000000000000000065fed1506eb6605a899a54e155665f79",
|
||||
-1, -1,
|
||||
},
|
||||
{
|
||||
"8679085ab081dc92cdd23091ce3ee998f6b320e419c3475fae6b5b7d3081996e",
|
||||
"0000000000000000000000000000000089fbf24fbaa5c3c137b4f1cedc51d975",
|
||||
"00000000000000000000000000000000d38aa615bd6754d6f4d51ccdaf529fea",
|
||||
-1, -1,
|
||||
},
|
||||
{
|
||||
"6b1247bb7931dfcae5b5603c8b5ae22ce94d670138c51872225beae6bba8cdb3",
|
||||
"000000000000000000000000000000008acc2a521b21b17cfb002c83be62f55d",
|
||||
"0000000000000000000000000000000035f0eff4d7430950ecb2d94193dedc79",
|
||||
-1, -1,
|
||||
},
|
||||
{
|
||||
"a2e8ba2e8ba2e8ba2e8ba2e8ba2e8ba219b51835b55cc30ebfe2f6599bc56f58",
|
||||
"0000000000000000000000000000000045c53aa1bb56fcd68c011e2dad6758e4",
|
||||
"00000000000000000000000000000000a2e79d200f27f2360fba57619936159b",
|
||||
-1, -1,
|
||||
},
|
||||
}
|
||||
s256 := S256()
|
||||
for i, test := range tests {
|
||||
k, ok := new(big.Int).SetString(test.k, 16)
|
||||
if !ok {
|
||||
t.Errorf("%d: bad value for k: %s", i, test.k)
|
||||
}
|
||||
k1, k2, k1Sign, k2Sign := splitK(k.Bytes())
|
||||
k1str := fmt.Sprintf("%064x", k1)
|
||||
if test.k1 != k1str {
|
||||
t.Errorf("%d: bad k1: got %v, want %v", i, k1str, test.k1)
|
||||
}
|
||||
k2str := fmt.Sprintf("%064x", k2)
|
||||
if test.k2 != k2str {
|
||||
t.Errorf("%d: bad k2: got %v, want %v", i, k2str, test.k2)
|
||||
}
|
||||
if test.s1 != k1Sign {
|
||||
t.Errorf("%d: bad k1 sign: got %d, want %d", i, k1Sign, test.s1)
|
||||
}
|
||||
if test.s2 != k2Sign {
|
||||
t.Errorf("%d: bad k2 sign: got %d, want %d", i, k2Sign, test.s2)
|
||||
}
|
||||
k1Int := new(big.Int).SetBytes(k1)
|
||||
k1SignInt := new(big.Int).SetInt64(int64(k1Sign))
|
||||
k1Int.Mul(k1Int, k1SignInt)
|
||||
k2Int := new(big.Int).SetBytes(k2)
|
||||
k2SignInt := new(big.Int).SetInt64(int64(k2Sign))
|
||||
k2Int.Mul(k2Int, k2SignInt)
|
||||
gotK := new(big.Int).Mul(k2Int, endomorphismLambda)
|
||||
gotK.Add(k1Int, gotK)
|
||||
gotK.Mod(gotK, s256.N)
|
||||
if k.Cmp(gotK) != 0 {
|
||||
t.Errorf("%d: bad k: got %X, want %X", i, gotK.Bytes(), k.Bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitKRand(t *testing.T) {
|
||||
s256 := S256()
|
||||
for i := 0; i < 1024; i++ {
|
||||
bytesK := make([]byte, 32)
|
||||
_, err := rand.Read(bytesK)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read random data at %d", i)
|
||||
break
|
||||
}
|
||||
k := new(big.Int).SetBytes(bytesK)
|
||||
k1, k2, k1Sign, k2Sign := splitK(bytesK)
|
||||
k1Int := new(big.Int).SetBytes(k1)
|
||||
k1SignInt := new(big.Int).SetInt64(int64(k1Sign))
|
||||
k1Int.Mul(k1Int, k1SignInt)
|
||||
k2Int := new(big.Int).SetBytes(k2)
|
||||
k2SignInt := new(big.Int).SetInt64(int64(k2Sign))
|
||||
k2Int.Mul(k2Int, k2SignInt)
|
||||
gotK := new(big.Int).Mul(k2Int, endomorphismLambda)
|
||||
gotK.Add(k1Int, gotK)
|
||||
gotK.Mod(gotK, s256.N)
|
||||
if k.Cmp(gotK) != 0 {
|
||||
t.Errorf("%d: bad k: got %X, want %X", i, gotK.Bytes(), k.Bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test this curve's usage with the ecdsa package.
|
||||
|
||||
func testKeyGeneration(t *testing.T, c *KoblitzCurve, tag string) {
|
||||
priv, err := NewSecretKey()
|
||||
if err != nil {
|
||||
t.Errorf("%s: error: %s", tag, err)
|
||||
return
|
||||
}
|
||||
pub := priv.PubKey()
|
||||
if !c.IsOnCurve(pub.X(), pub.Y()) {
|
||||
t.Errorf("%s: public key invalid: %s", tag, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestKeyGeneration(t *testing.T) {
|
||||
testKeyGeneration(t, S256(), "S256")
|
||||
}
|
||||
|
||||
// checkNAFEncoding returns an error if the provided positive and negative
|
||||
// portions of an overall NAF encoding do not adhere to the requirements or they
|
||||
// do not sum back to the provided original value.
|
||||
func checkNAFEncoding(pos, neg []byte, origValue *big.Int) error {
|
||||
// NAF must not have a leading zero byte and the number of negative
|
||||
// bytes must not exceed the positive portion.
|
||||
if len(pos) > 0 && pos[0] == 0 {
|
||||
return fmt.Errorf("positive has leading zero -- got %x", pos)
|
||||
}
|
||||
if len(neg) > len(pos) {
|
||||
return fmt.Errorf(
|
||||
"negative has len %d > pos len %d", len(neg),
|
||||
len(pos),
|
||||
)
|
||||
}
|
||||
// Ensure the result doesn't have any adjacent non-zero digits.
|
||||
gotPos := new(big.Int).SetBytes(pos)
|
||||
gotNeg := new(big.Int).SetBytes(neg)
|
||||
posOrNeg := new(big.Int).Or(gotPos, gotNeg)
|
||||
prevBit := posOrNeg.Bit(0)
|
||||
for bit := 1; bit < posOrNeg.BitLen(); bit++ {
|
||||
thisBit := posOrNeg.Bit(bit)
|
||||
if prevBit == 1 && thisBit == 1 {
|
||||
return fmt.Errorf(
|
||||
"adjacent non-zero digits found at bit pos %d",
|
||||
bit-1,
|
||||
)
|
||||
}
|
||||
prevBit = thisBit
|
||||
}
|
||||
// Ensure the resulting positive and negative portions of the overall
|
||||
// NAF representation sum back to the original value.
|
||||
gotValue := new(big.Int).Sub(gotPos, gotNeg)
|
||||
if origValue.Cmp(gotValue) != 0 {
|
||||
return fmt.Errorf(
|
||||
"pos-neg is not original value: got %x, want %x",
|
||||
gotValue, origValue,
|
||||
)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
153
pkg/crypto/ec/chaincfg/deployment_time_frame.go
Normal file
153
pkg/crypto/ec/chaincfg/deployment_time_frame.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package chaincfg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/wire"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrNoBlockClock is returned when an operation fails due to lack of
|
||||
// synchornization with the current up to date block clock.
|
||||
ErrNoBlockClock = fmt.Errorf("no block clock synchronized")
|
||||
)
|
||||
|
||||
// ConsensusDeploymentStarter determines if a given consensus deployment has
|
||||
// started. A deployment has started once according to the current "time", the
|
||||
// deployment is eligible for activation once a perquisite condition has
|
||||
// passed.
|
||||
type ConsensusDeploymentStarter interface {
|
||||
// HasStarted returns true if the consensus deployment has started.
|
||||
HasStarted(*wire.BlockHeader) (bool, error)
|
||||
}
|
||||
|
||||
// ConsensusDeploymentEnder determines if a given consensus deployment has
|
||||
// ended. A deployment has ended once according got eh current "time", the
|
||||
// deployment is no longer eligible for activation.
|
||||
type ConsensusDeploymentEnder interface {
|
||||
// HasEnded returns true if the consensus deployment has ended.
|
||||
HasEnded(*wire.BlockHeader) (bool, error)
|
||||
}
|
||||
|
||||
// BlockClock is an abstraction over the past median time computation. The past
|
||||
// median time computation is used in several consensus checks such as CSV, and
|
||||
// also BIP 9 version bits. This interface allows callers to abstract away the
|
||||
// computation of the past median time from the perspective of a given block
|
||||
// header.
|
||||
type BlockClock interface {
|
||||
// PastMedianTime returns the past median time from the PoV of the
|
||||
// passed block header. The past median time is the median time of the
|
||||
// 11 blocks prior to the passed block header.
|
||||
PastMedianTime(*wire.BlockHeader) (time.Time, error)
|
||||
}
|
||||
|
||||
// ClockConsensusDeploymentEnder is a more specialized version of the
|
||||
// ConsensusDeploymentEnder that uses a BlockClock in order to determine if a
|
||||
// deployment has started or not.
|
||||
//
|
||||
// NOTE: Any calls to HasEnded will _fail_ with ErrNoBlockClock if they
|
||||
// happen before SynchronizeClock is executed.
|
||||
type ClockConsensusDeploymentEnder interface {
|
||||
ConsensusDeploymentEnder
|
||||
// SynchronizeClock synchronizes the target ConsensusDeploymentStarter
|
||||
// with the current up-to date BlockClock.
|
||||
SynchronizeClock(clock BlockClock)
|
||||
}
|
||||
|
||||
// MedianTimeDeploymentStarter is a ClockConsensusDeploymentStarter that uses
|
||||
// the median time past of a target block node to determine if a deployment has
|
||||
// started.
|
||||
type MedianTimeDeploymentStarter struct {
|
||||
blockClock BlockClock
|
||||
startTime time.Time
|
||||
}
|
||||
|
||||
// NewMedianTimeDeploymentStarter returns a new instance of a
|
||||
// MedianTimeDeploymentStarter for a given start time. Using a time.Time
|
||||
// instance where IsZero() is true, indicates that a deployment should be
|
||||
// considered to always have been started.
|
||||
func NewMedianTimeDeploymentStarter(startTime time.Time) *MedianTimeDeploymentStarter {
|
||||
return &MedianTimeDeploymentStarter{
|
||||
startTime: startTime,
|
||||
}
|
||||
}
|
||||
|
||||
// HasStarted returns true if the consensus deployment has started.
|
||||
func (m *MedianTimeDeploymentStarter) HasStarted(blkHeader *wire.BlockHeader) (
|
||||
bool,
|
||||
error,
|
||||
) {
|
||||
switch {
|
||||
// If we haven't yet been synchronized with a block clock, then we
|
||||
// can't tell the time, so we'll fail.
|
||||
case m.blockClock == nil:
|
||||
return false, ErrNoBlockClock
|
||||
// If the time is "zero", then the deployment has always started.
|
||||
case m.startTime.IsZero():
|
||||
return true, nil
|
||||
}
|
||||
medianTime, err := m.blockClock.PastMedianTime(blkHeader)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
// We check both after and equal here as after will fail for equivalent
|
||||
// times, and we want to be inclusive.
|
||||
return medianTime.After(m.startTime) || medianTime.Equal(m.startTime), nil
|
||||
}
|
||||
|
||||
// MedianTimeDeploymentEnder is a ClockConsensusDeploymentEnder that uses the
|
||||
// median time past of a target block to determine if a deployment has ended.
|
||||
type MedianTimeDeploymentEnder struct {
|
||||
blockClock BlockClock
|
||||
endTime time.Time
|
||||
}
|
||||
|
||||
// NewMedianTimeDeploymentEnder returns a new instance of the
|
||||
// MedianTimeDeploymentEnder anchored around the passed endTime. Using a
|
||||
// time.Time instance where IsZero() is true, indicates that a deployment
|
||||
// should be considered to never end.
|
||||
func NewMedianTimeDeploymentEnder(endTime time.Time) *MedianTimeDeploymentEnder {
|
||||
return &MedianTimeDeploymentEnder{
|
||||
endTime: endTime,
|
||||
}
|
||||
}
|
||||
|
||||
// HasEnded returns true if the deployment has ended.
|
||||
func (m *MedianTimeDeploymentEnder) HasEnded(blkHeader *wire.BlockHeader) (
|
||||
bool,
|
||||
error,
|
||||
) {
|
||||
switch {
|
||||
// If we haven't yet been synchronized with a block clock, then we can't tell
|
||||
// the time, so we'll we haven't yet been synchronized with a block
|
||||
// clock, then w can't tell the time, so we'll fail.
|
||||
case m.blockClock == nil:
|
||||
return false, ErrNoBlockClock
|
||||
// If the time is "zero", then the deployment never ends.
|
||||
case m.endTime.IsZero():
|
||||
return false, nil
|
||||
}
|
||||
medianTime, err := m.blockClock.PastMedianTime(blkHeader)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
// We check both after and equal here as after will fail for equivalent
|
||||
// times, and we want to be inclusive.
|
||||
return medianTime.After(m.endTime) || medianTime.Equal(m.endTime), nil
|
||||
}
|
||||
|
||||
// EndTime returns the raw end time of the deployment.
|
||||
func (m *MedianTimeDeploymentEnder) EndTime() time.Time {
|
||||
return m.endTime
|
||||
}
|
||||
|
||||
// SynchronizeClock synchronizes the target ConsensusDeploymentEnder with the
|
||||
// current up-to date BlockClock.
|
||||
func (m *MedianTimeDeploymentEnder) SynchronizeClock(clock BlockClock) {
|
||||
m.blockClock = clock
|
||||
}
|
||||
|
||||
// A compile-time assertion to ensure MedianTimeDeploymentEnder implements the
|
||||
// ClockConsensusDeploymentStarter interface.
|
||||
var _ ClockConsensusDeploymentEnder = (*MedianTimeDeploymentEnder)(nil)
|
||||
110
pkg/crypto/ec/chaincfg/genesis.go
Normal file
110
pkg/crypto/ec/chaincfg/genesis.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package chaincfg
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/chainhash"
|
||||
"next.orly.dev/pkg/crypto/ec/wire"
|
||||
)
|
||||
|
||||
var (
|
||||
// genesisCoinbaseTx is the coinbase transaction for the genesis blocks for
|
||||
// the main network, regression test network, and test network (version 3).
|
||||
genesisCoinbaseTx = wire.MsgTx{
|
||||
Version: 1,
|
||||
TxIn: []*wire.TxIn{
|
||||
{
|
||||
PreviousOutPoint: wire.OutPoint{
|
||||
Hash: chainhash.Hash{},
|
||||
Index: 0xffffffff,
|
||||
},
|
||||
SignatureScript: []byte{
|
||||
0x04, 0xff, 0xff, 0x00, 0x1d, 0x01, 0x04,
|
||||
0x45, /* |.......E| */
|
||||
0x54, 0x68, 0x65, 0x20, 0x54, 0x69, 0x6d,
|
||||
0x65, /* |The Time| */
|
||||
0x73, 0x20, 0x30, 0x33, 0x2f, 0x4a, 0x61,
|
||||
0x6e, /* |s 03/Jan| */
|
||||
0x2f, 0x32, 0x30, 0x30, 0x39, 0x20, 0x43,
|
||||
0x68, /* |/2009 Ch| */
|
||||
0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x6f,
|
||||
0x72, /* |ancellor| */
|
||||
0x20, 0x6f, 0x6e, 0x20, 0x62, 0x72, 0x69,
|
||||
0x6e, /* | on brin| */
|
||||
0x6b, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x65,
|
||||
0x63, /* |k of sec|*/
|
||||
0x6f, 0x6e, 0x64, 0x20, 0x62, 0x61, 0x69,
|
||||
0x6c, /* |ond bail| */
|
||||
0x6f, 0x75, 0x74, 0x20, 0x66, 0x6f, 0x72,
|
||||
0x20, /* |out for |*/
|
||||
0x62, 0x61, 0x6e, 0x6b, 0x73, /* |banks| */
|
||||
},
|
||||
Sequence: 0xffffffff,
|
||||
},
|
||||
},
|
||||
TxOut: []*wire.TxOut{
|
||||
{
|
||||
Value: 0x12a05f200,
|
||||
PkScript: []byte{
|
||||
0x41, 0x04, 0x67, 0x8a, 0xfd, 0xb0, 0xfe,
|
||||
0x55, /* |A.g....U| */
|
||||
0x48, 0x27, 0x19, 0x67, 0xf1, 0xa6, 0x71,
|
||||
0x30, /* |H'.g..q0| */
|
||||
0xb7, 0x10, 0x5c, 0xd6, 0xa8, 0x28, 0xe0,
|
||||
0x39, /* |..\..(.9| */
|
||||
0x09, 0xa6, 0x79, 0x62, 0xe0, 0xea, 0x1f,
|
||||
0x61, /* |..yb...a| */
|
||||
0xde, 0xb6, 0x49, 0xf6, 0xbc, 0x3f, 0x4c,
|
||||
0xef, /* |..I..?L.| */
|
||||
0x38, 0xc4, 0xf3, 0x55, 0x04, 0xe5, 0x1e,
|
||||
0xc1, /* |8..U....| */
|
||||
0x12, 0xde, 0x5c, 0x38, 0x4d, 0xf7, 0xba,
|
||||
0x0b, /* |..\8M...| */
|
||||
0x8d, 0x57, 0x8a, 0x4c, 0x70, 0x2b, 0x6b,
|
||||
0xf1, /* |.W.Lp+k.| */
|
||||
0x1d, 0x5f, 0xac, /* |._.| */
|
||||
},
|
||||
},
|
||||
},
|
||||
LockTime: 0,
|
||||
}
|
||||
// genesisHash is the hash of the first block in the block chain for the main
|
||||
// network (genesis block).
|
||||
genesisHash = chainhash.Hash(
|
||||
[chainhash.HashSize]byte{
|
||||
// Make go vet happy.
|
||||
0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72,
|
||||
0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f,
|
||||
0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c,
|
||||
0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
},
|
||||
)
|
||||
// genesisMerkleRoot is the hash of the first transaction in the genesis block
|
||||
// for the main network.
|
||||
genesisMerkleRoot = chainhash.Hash(
|
||||
[chainhash.HashSize]byte{
|
||||
// Make go vet happy.
|
||||
0x3b, 0xa3, 0xed, 0xfd, 0x7a, 0x7b, 0x12, 0xb2,
|
||||
0x7a, 0xc7, 0x2c, 0x3e, 0x67, 0x76, 0x8f, 0x61,
|
||||
0x7f, 0xc8, 0x1b, 0xc3, 0x88, 0x8a, 0x51, 0x32,
|
||||
0x3a, 0x9f, 0xb8, 0xaa, 0x4b, 0x1e, 0x5e, 0x4a,
|
||||
},
|
||||
)
|
||||
// genesisBlock defines
|
||||
// genesisBlock defines the genesis block of the block chain which serves as the
|
||||
// public transaction ledger for the main network.
|
||||
genesisBlock = wire.MsgBlock{
|
||||
Header: wire.BlockHeader{
|
||||
Version: 1,
|
||||
PrevBlock: chainhash.Hash{}, // 0000000000000000000000000000000000000000000000000000000000000000
|
||||
MerkleRoot: genesisMerkleRoot, // 4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b
|
||||
Timestamp: time.Unix(
|
||||
0x495fab29,
|
||||
0,
|
||||
), // 2009-01-03 18:15:05 +0000 UTC
|
||||
Bits: 0x1d00ffff, // 486604799 [00000000ffff0000000000000000000000000000000000000000000000000000]
|
||||
Nonce: 0x7c2bac1d, // 2083236893
|
||||
},
|
||||
Transactions: []*wire.MsgTx{&genesisCoinbaseTx},
|
||||
}
|
||||
)
|
||||
493
pkg/crypto/ec/chaincfg/params.go
Normal file
493
pkg/crypto/ec/chaincfg/params.go
Normal file
@@ -0,0 +1,493 @@
|
||||
// Package chaincfg provides basic parameters for bitcoin chain and testnets.
|
||||
package chaincfg
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"time"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/chainhash"
|
||||
"next.orly.dev/pkg/crypto/ec/wire"
|
||||
)
|
||||
|
||||
var (
|
||||
// bigOne is 1 represented as a big.Int. It is defined here to avoid
|
||||
// the overhead of creating it multiple times.
|
||||
bigOne = big.NewInt(1)
|
||||
|
||||
// mainPowLimit is the highest proof of work value a Bitcoin block can
|
||||
// have for the main network. It is the value 2^224 - 1.
|
||||
mainPowLimit = new(big.Int).Sub(new(big.Int).Lsh(bigOne, 224), bigOne)
|
||||
)
|
||||
|
||||
// Constants that define the deployment offset in the deployments field of the
|
||||
// parameters for each deployment. This is useful to be able to get the details
|
||||
// of a specific deployment by name.
|
||||
const (
|
||||
// DeploymentTestDummy defines the rule change deployment ID for testing
|
||||
// purposes.
|
||||
DeploymentTestDummy = iota
|
||||
|
||||
// DeploymentTestDummyMinActivation defines the rule change deployment
|
||||
// ID for testing purposes. This differs from the DeploymentTestDummy
|
||||
// in that it specifies the newer params the taproot fork used for
|
||||
// activation: a custom threshold and a min activation height.
|
||||
DeploymentTestDummyMinActivation
|
||||
|
||||
// DeploymentCSV defines the rule change deployment ID for the CSV
|
||||
// soft-fork package. The CSV package includes the deployment of BIPS
|
||||
// 68, 112, and 113.
|
||||
DeploymentCSV
|
||||
|
||||
// DeploymentSegwit defines the rule change deployment ID for the
|
||||
// Segregated Witness (segwit) soft-fork package. The segwit package
|
||||
// includes the deployment of BIPS 141, 142, 144, 145, 147 and 173.
|
||||
DeploymentSegwit
|
||||
|
||||
// DeploymentTaproot defines the rule change deployment ID for the
|
||||
// Taproot (+Schnorr) soft-fork package. The taproot package includes
|
||||
// the deployment of BIPS 340, 341 and 342.
|
||||
DeploymentTaproot
|
||||
|
||||
// NOTE: DefinedDeployments must always come last since it is used to
|
||||
// determine how many defined deployments there currently are.
|
||||
|
||||
// DefinedDeployments is the number of currently defined deployments.
|
||||
DefinedDeployments
|
||||
)
|
||||
|
||||
// ConsensusDeployment defines details related to a specific consensus rule
|
||||
// change that is voted in. This is part of BIP0009.
|
||||
type ConsensusDeployment struct {
|
||||
// BitNumber defines the specific bit number within the block version
|
||||
// this particular soft-fork deployment refers to.
|
||||
BitNumber uint8
|
||||
|
||||
// MinActivationHeight is an optional field that when set (default
|
||||
// value being zero), modifies the traditional BIP 9 state machine by
|
||||
// only transitioning from LockedIn to Active once the block height is
|
||||
// greater than (or equal to) thus specified height.
|
||||
MinActivationHeight uint32
|
||||
|
||||
// CustomActivationThreshold if set (non-zero), will _override_ the
|
||||
// existing RuleChangeActivationThreshold value set at the
|
||||
// network/chain level. This value divided by the active
|
||||
// MinerConfirmationWindow denotes the threshold required for
|
||||
// activation. A value of 1815 block denotes a 90% threshold.
|
||||
CustomActivationThreshold uint32
|
||||
|
||||
// DeploymentStarter is used to determine if the given
|
||||
// ConsensusDeployment has started or not.
|
||||
DeploymentStarter ConsensusDeploymentStarter
|
||||
|
||||
// DeploymentEnder is used to determine if the given
|
||||
// ConsensusDeployment has ended or not.
|
||||
DeploymentEnder ConsensusDeploymentEnder
|
||||
}
|
||||
|
||||
// Checkpoint identifies a known good point in the block chain. Using
|
||||
// checkpoints allows a few optimizations for old blocks during initial download
|
||||
// and also prevents forks from old blocks.
|
||||
//
|
||||
// Each checkpoint is selected based upon several factors. See the
|
||||
// documentation for blockchain.IsCheckpointCandidate for details on the
|
||||
// selection criteria.
|
||||
type Checkpoint struct {
|
||||
Height int32
|
||||
Hash *chainhash.Hash
|
||||
}
|
||||
|
||||
// DNSSeed identifies a DNS seed.
|
||||
type DNSSeed struct {
|
||||
// Host defines the hostname of the seed.
|
||||
Host string
|
||||
|
||||
// HasFiltering defines whether the seed supports filtering
|
||||
// by service flags (wire.ServiceFlag).
|
||||
HasFiltering bool
|
||||
}
|
||||
|
||||
// Params defines a Bitcoin network by its parameters. These parameters may be
|
||||
// used by Bitcoin applications to differentiate networks as well as addresses
|
||||
// and keys for one network from those intended for use on another network.
|
||||
type Params struct {
|
||||
// Name defines a human-readable identifier for the network.
|
||||
Name string
|
||||
|
||||
// Net defines the magic bytes used to identify the network.
|
||||
Net wire.BitcoinNet
|
||||
|
||||
// DefaultPort defines the default peer-to-peer port for the network.
|
||||
DefaultPort string
|
||||
|
||||
// DNSSeeds defines a list of DNS seeds for the network that are used
|
||||
// as one method to discover peers.
|
||||
DNSSeeds []DNSSeed
|
||||
|
||||
// GenesisBlock defines the first block of the chain.
|
||||
GenesisBlock *wire.MsgBlock
|
||||
|
||||
// GenesisHash is the starting block hash.
|
||||
GenesisHash *chainhash.Hash
|
||||
|
||||
// PowLimit defines the highest allowed proof of work value for a block
|
||||
// as a uint256.
|
||||
PowLimit *big.Int
|
||||
|
||||
// PowLimitBits defines the highest allowed proof of work value for a
|
||||
// block in compact form.
|
||||
PowLimitBits uint32
|
||||
|
||||
// PoWNoRetargeting defines whether the network has difficulty
|
||||
// retargeting enabled or not. This should only be set to true for
|
||||
// regtest like networks.
|
||||
PoWNoRetargeting bool
|
||||
|
||||
// These fields define the block heights at which the specified softfork
|
||||
// BIP became active.
|
||||
BIP0034Height int32
|
||||
BIP0065Height int32
|
||||
BIP0066Height int32
|
||||
|
||||
// CoinbaseMaturity is the number of blocks required before newly mined
|
||||
// coins (coinbase transactions) can be spent.
|
||||
CoinbaseMaturity uint16
|
||||
|
||||
// SubsidyReductionInterval is the interval of blocks before the subsidy
|
||||
// is reduced.
|
||||
SubsidyReductionInterval int32
|
||||
|
||||
// TargetTimespan is the desired amount of time that should elapse
|
||||
// before the block difficulty requirement is examined to determine how
|
||||
// it should be changed in order to maintain the desired block
|
||||
// generation rate.
|
||||
TargetTimespan time.Duration
|
||||
|
||||
// TargetTimePerBlock is the desired amount of time to generate each
|
||||
// block.
|
||||
TargetTimePerBlock time.Duration
|
||||
|
||||
// RetargetAdjustmentFactor is the adjustment factor used to limit
|
||||
// the minimum and maximum amount of adjustment that can occur between
|
||||
// difficulty retargets.
|
||||
RetargetAdjustmentFactor int64
|
||||
|
||||
// ReduceMinDifficulty defines whether the network should reduce the
|
||||
// minimum required difficulty after a long enough period of time has
|
||||
// passed without finding a block. This is really only useful for test
|
||||
// networks and should not be set on a main network.
|
||||
ReduceMinDifficulty bool
|
||||
|
||||
// MinDiffReductionTime is the amount of time after which the minimum
|
||||
// required difficulty should be reduced when a block hasn't been found.
|
||||
//
|
||||
// NOTE: This only applies if ReduceMinDifficulty is true.
|
||||
MinDiffReductionTime time.Duration
|
||||
|
||||
// GenerateSupported specifies whether or not CPU mining is allowed.
|
||||
GenerateSupported bool
|
||||
|
||||
// Checkpoints ordered from oldest to newest.
|
||||
Checkpoints []Checkpoint
|
||||
|
||||
// These fields are related to voting on consensus rule changes as
|
||||
// defined by BIP0009.
|
||||
//
|
||||
// RuleChangeActivationThreshold is the number of blocks in a threshold
|
||||
// state retarget window for which a positive vote for a rule change
|
||||
// must be cast in order to lock in a rule change. It should typically
|
||||
// be 95% for the main network and 75% for test networks.
|
||||
//
|
||||
// MinerConfirmationWindow is the number of blocks in each threshold
|
||||
// state retarget window.
|
||||
//
|
||||
// Deployments define the specific consensus rule changes to be voted
|
||||
// on.
|
||||
RuleChangeActivationThreshold uint32
|
||||
MinerConfirmationWindow uint32
|
||||
Deployments [DefinedDeployments]ConsensusDeployment
|
||||
|
||||
// Mempool parameters
|
||||
RelayNonStdTxs bool
|
||||
|
||||
// Human-readable part for Bech32 encoded segwit addresses, as defined
|
||||
// in BIP 173.
|
||||
Bech32HRPSegwit []byte
|
||||
|
||||
// Address encoding magics
|
||||
PubKeyHashAddrID byte // First byte of a P2PKH address
|
||||
ScriptHashAddrID byte // First byte of a P2SH address
|
||||
PrivateKeyID byte // First byte of a WIF private key
|
||||
WitnessPubKeyHashAddrID byte // First byte of a P2WPKH address
|
||||
WitnessScriptHashAddrID byte // First byte of a P2WSH address
|
||||
|
||||
// BIP32 hierarchical deterministic extended key magics
|
||||
HDPrivateKeyID [4]byte
|
||||
HDPublicKeyID [4]byte
|
||||
|
||||
// BIP44 coin type used in the hierarchical deterministic path for
|
||||
// address generation.
|
||||
HDCoinType uint32
|
||||
}
|
||||
|
||||
// MainNetParams defines the network parameters for the main Bitcoin network.
|
||||
var MainNetParams = Params{
|
||||
Name: "mainnet",
|
||||
Net: wire.MainNet,
|
||||
DefaultPort: "8333",
|
||||
DNSSeeds: []DNSSeed{
|
||||
{"seed.bitcoin.sipa.be", true},
|
||||
{"dnsseed.bluematt.me", true},
|
||||
{"dnsseed.bitcoin.dashjr.org", false},
|
||||
{"seed.bitcoinstats.com", true},
|
||||
{"seed.bitnodes.io", false},
|
||||
{"seed.bitcoin.jonasschnelli.ch", true},
|
||||
},
|
||||
|
||||
// Chain parameters
|
||||
GenesisBlock: &genesisBlock,
|
||||
GenesisHash: &genesisHash,
|
||||
PowLimit: mainPowLimit,
|
||||
PowLimitBits: 0x1d00ffff,
|
||||
BIP0034Height: 227931, // 000000000000024b89b42a942fe0d9fea3bb44ab7bd1b19115dd6a759c0808b8
|
||||
BIP0065Height: 388381, // 000000000000000004c2b624ed5d7756c508d90fd0da2c7c679febfa6c4735f0
|
||||
BIP0066Height: 363725, // 00000000000000000379eaa19dce8c9b722d46ae6a57c2f1a988119488b50931
|
||||
CoinbaseMaturity: 100,
|
||||
SubsidyReductionInterval: 210000,
|
||||
TargetTimespan: time.Hour * 24 * 14, // 14 days
|
||||
TargetTimePerBlock: time.Minute * 10, // 10 minutes
|
||||
RetargetAdjustmentFactor: 4, // 25% less, 400% more
|
||||
ReduceMinDifficulty: false,
|
||||
MinDiffReductionTime: 0,
|
||||
GenerateSupported: false,
|
||||
|
||||
// Checkpoints ordered from oldest to newest.
|
||||
Checkpoints: []Checkpoint{
|
||||
{
|
||||
11111,
|
||||
newHashFromStr("0000000069e244f73d78e8fd29ba2fd2ed618bd6fa2ee92559f542fdb26e7c1d"),
|
||||
},
|
||||
{
|
||||
33333,
|
||||
newHashFromStr("000000002dd5588a74784eaa7ab0507a18ad16a236e7b1ce69f00d7ddfb5d0a6"),
|
||||
},
|
||||
{
|
||||
74000,
|
||||
newHashFromStr("0000000000573993a3c9e41ce34471c079dcf5f52a0e824a81e7f953b8661a20"),
|
||||
},
|
||||
{
|
||||
105000,
|
||||
newHashFromStr("00000000000291ce28027faea320c8d2b054b2e0fe44a773f3eefb151d6bdc97"),
|
||||
},
|
||||
{
|
||||
134444,
|
||||
newHashFromStr("00000000000005b12ffd4cd315cd34ffd4a594f430ac814c91184a0d42d2b0fe"),
|
||||
},
|
||||
{
|
||||
168000,
|
||||
newHashFromStr("000000000000099e61ea72015e79632f216fe6cb33d7899acb35b75c8303b763"),
|
||||
},
|
||||
{
|
||||
193000,
|
||||
newHashFromStr("000000000000059f452a5f7340de6682a977387c17010ff6e6c3bd83ca8b1317"),
|
||||
},
|
||||
{
|
||||
210000,
|
||||
newHashFromStr("000000000000048b95347e83192f69cf0366076336c639f9b7228e9ba171342e"),
|
||||
},
|
||||
{
|
||||
216116,
|
||||
newHashFromStr("00000000000001b4f4b433e81ee46494af945cf96014816a4e2370f11b23df4e"),
|
||||
},
|
||||
{
|
||||
225430,
|
||||
newHashFromStr("00000000000001c108384350f74090433e7fcf79a606b8e797f065b130575932"),
|
||||
},
|
||||
{
|
||||
250000,
|
||||
newHashFromStr("000000000000003887df1f29024b06fc2200b55f8af8f35453d7be294df2d214"),
|
||||
},
|
||||
{
|
||||
267300,
|
||||
newHashFromStr("000000000000000a83fbd660e918f218bf37edd92b748ad940483c7c116179ac"),
|
||||
},
|
||||
{
|
||||
279000,
|
||||
newHashFromStr("0000000000000001ae8c72a0b0c301f67e3afca10e819efa9041e458e9bd7e40"),
|
||||
},
|
||||
{
|
||||
300255,
|
||||
newHashFromStr("0000000000000000162804527c6e9b9f0563a280525f9d08c12041def0a0f3b2"),
|
||||
},
|
||||
{
|
||||
319400,
|
||||
newHashFromStr("000000000000000021c6052e9becade189495d1c539aa37c58917305fd15f13b"),
|
||||
},
|
||||
{
|
||||
343185,
|
||||
newHashFromStr("0000000000000000072b8bf361d01a6ba7d445dd024203fafc78768ed4368554"),
|
||||
},
|
||||
{
|
||||
352940,
|
||||
newHashFromStr("000000000000000010755df42dba556bb72be6a32f3ce0b6941ce4430152c9ff"),
|
||||
},
|
||||
{
|
||||
382320,
|
||||
newHashFromStr("00000000000000000a8dc6ed5b133d0eb2fd6af56203e4159789b092defd8ab2"),
|
||||
},
|
||||
{
|
||||
400000,
|
||||
newHashFromStr("000000000000000004ec466ce4732fe6f1ed1cddc2ed4b328fff5224276e3f6f"),
|
||||
},
|
||||
{
|
||||
430000,
|
||||
newHashFromStr("000000000000000001868b2bb3a285f3cc6b33ea234eb70facf4dcdf22186b87"),
|
||||
},
|
||||
{
|
||||
460000,
|
||||
newHashFromStr("000000000000000000ef751bbce8e744ad303c47ece06c8d863e4d417efc258c"),
|
||||
},
|
||||
{
|
||||
490000,
|
||||
newHashFromStr("000000000000000000de069137b17b8d5a3dfbd5b145b2dcfb203f15d0c4de90"),
|
||||
},
|
||||
{
|
||||
520000,
|
||||
newHashFromStr("0000000000000000000d26984c0229c9f6962dc74db0a6d525f2f1640396f69c"),
|
||||
},
|
||||
{
|
||||
550000,
|
||||
newHashFromStr("000000000000000000223b7a2298fb1c6c75fb0efc28a4c56853ff4112ec6bc9"),
|
||||
},
|
||||
{
|
||||
560000,
|
||||
newHashFromStr("0000000000000000002c7b276daf6efb2b6aa68e2ce3be67ef925b3264ae7122"),
|
||||
},
|
||||
{
|
||||
563378,
|
||||
newHashFromStr("0000000000000000000f1c54590ee18d15ec70e68c8cd4cfbadb1b4f11697eee"),
|
||||
},
|
||||
{
|
||||
597379,
|
||||
newHashFromStr("00000000000000000005f8920febd3925f8272a6a71237563d78c2edfdd09ddf"),
|
||||
},
|
||||
{
|
||||
623950,
|
||||
newHashFromStr("0000000000000000000f2adce67e49b0b6bdeb9de8b7c3d7e93b21e7fc1e819d"),
|
||||
},
|
||||
{
|
||||
654683,
|
||||
newHashFromStr("0000000000000000000b9d2ec5a352ecba0592946514a92f14319dc2b367fc72"),
|
||||
},
|
||||
{
|
||||
691719,
|
||||
newHashFromStr("00000000000000000008a89e854d57e5667df88f1cdef6fde2fbca1de5b639ad"),
|
||||
},
|
||||
{
|
||||
724466,
|
||||
newHashFromStr("000000000000000000052d314a259755ca65944e68df6b12a067ea8f1f5a7091"),
|
||||
},
|
||||
{
|
||||
751565,
|
||||
newHashFromStr("00000000000000000009c97098b5295f7e5f183ac811fb5d1534040adb93cabd"),
|
||||
},
|
||||
},
|
||||
|
||||
// Consensus rule change deployments.
|
||||
//
|
||||
// The miner confirmation window is defined as:
|
||||
// target proof of work timespan / target proof of work spacing
|
||||
RuleChangeActivationThreshold: 1916, // 95% of MinerConfirmationWindow
|
||||
MinerConfirmationWindow: 2016, //
|
||||
Deployments: [DefinedDeployments]ConsensusDeployment{
|
||||
DeploymentTestDummy: {
|
||||
BitNumber: 28,
|
||||
DeploymentStarter: NewMedianTimeDeploymentStarter(
|
||||
time.Unix(11991456010, 0), // January 1, 2008 UTC
|
||||
),
|
||||
DeploymentEnder: NewMedianTimeDeploymentEnder(
|
||||
time.Unix(1230767999, 0), // December 31, 2008 UTC
|
||||
),
|
||||
},
|
||||
DeploymentTestDummyMinActivation: {
|
||||
BitNumber: 22,
|
||||
CustomActivationThreshold: 1815, // Only needs 90% hash rate.
|
||||
MinActivationHeight: 10_0000, // Can only activate after height 10k.
|
||||
DeploymentStarter: NewMedianTimeDeploymentStarter(
|
||||
time.Time{}, // Always available for vote
|
||||
),
|
||||
DeploymentEnder: NewMedianTimeDeploymentEnder(
|
||||
time.Time{}, // Never expires
|
||||
),
|
||||
},
|
||||
DeploymentCSV: {
|
||||
BitNumber: 0,
|
||||
DeploymentStarter: NewMedianTimeDeploymentStarter(
|
||||
time.Unix(1462060800, 0), // May 1st, 2016
|
||||
),
|
||||
DeploymentEnder: NewMedianTimeDeploymentEnder(
|
||||
time.Unix(1493596800, 0), // May 1st, 2017
|
||||
),
|
||||
},
|
||||
DeploymentSegwit: {
|
||||
BitNumber: 1,
|
||||
DeploymentStarter: NewMedianTimeDeploymentStarter(
|
||||
time.Unix(1479168000, 0), // November 15, 2016 UTC
|
||||
),
|
||||
DeploymentEnder: NewMedianTimeDeploymentEnder(
|
||||
time.Unix(1510704000, 0), // November 15, 2017 UTC.
|
||||
),
|
||||
},
|
||||
DeploymentTaproot: {
|
||||
BitNumber: 2,
|
||||
DeploymentStarter: NewMedianTimeDeploymentStarter(
|
||||
time.Unix(1619222400, 0), // April 24th, 2021 UTC.
|
||||
),
|
||||
DeploymentEnder: NewMedianTimeDeploymentEnder(
|
||||
time.Unix(1628640000, 0), // August 11th, 2021 UTC.
|
||||
),
|
||||
CustomActivationThreshold: 1815, // 90%
|
||||
MinActivationHeight: 709_632,
|
||||
},
|
||||
},
|
||||
|
||||
// Mempool parameters
|
||||
RelayNonStdTxs: false,
|
||||
|
||||
// Human-readable part for Bech32 encoded segwit addresses, as defined in
|
||||
// BIP 173.
|
||||
Bech32HRPSegwit: []byte("bc"), // always bc for main net
|
||||
|
||||
// Address encoding magics
|
||||
PubKeyHashAddrID: 0x00, // starts with 1
|
||||
ScriptHashAddrID: 0x05, // starts with 3
|
||||
PrivateKeyID: 0x80, // starts with 5 (uncompressed) or K (compressed)
|
||||
WitnessPubKeyHashAddrID: 0x06, // starts with p2
|
||||
WitnessScriptHashAddrID: 0x0A, // starts with 7Xh
|
||||
|
||||
// BIP32 hierarchical deterministic extended key magics
|
||||
HDPrivateKeyID: [4]byte{0x04, 0x88, 0xad, 0xe4}, // starts with xprv
|
||||
HDPublicKeyID: [4]byte{0x04, 0x88, 0xb2, 0x1e}, // starts with xpub
|
||||
|
||||
// BIP44 coin type used in the hierarchical deterministic path for
|
||||
// address generation.
|
||||
HDCoinType: 0,
|
||||
}
|
||||
|
||||
// newHashFromStr converts the passed big-endian hex string into a
|
||||
// chainhash.Hash. It only differs from the one available in chainhash in that
|
||||
// it panics on an error since it will only (and must only) be called with
|
||||
// hard-coded, and therefore known good, hashes.
|
||||
func newHashFromStr(hexStr string) *chainhash.Hash {
|
||||
hash, err := chainhash.NewHashFromStr(hexStr)
|
||||
if err != nil {
|
||||
// Ordinarily I don't like panics in library code since it
|
||||
// can take applications down without them having a chance to
|
||||
// recover which is extremely annoying, however an exception is
|
||||
// being made in this case because the only way this can panic
|
||||
// is if there is an error in the hard-coded hashes. Thus it
|
||||
// will only ever potentially panic on init and therefore is
|
||||
// 100% predictable.
|
||||
panic(err)
|
||||
}
|
||||
return hash
|
||||
}
|
||||
19
pkg/crypto/ec/chainhash/README.md
Normal file
19
pkg/crypto/ec/chainhash/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
chainhash
|
||||
=========
|
||||
|
||||
[](http://copyfree.org)
|
||||
=======
|
||||
|
||||
chainhash provides a generic hash type and associated functions that allows the
|
||||
specific hash algorithm to be abstracted.
|
||||
|
||||
## Installation and Updating
|
||||
|
||||
```bash
|
||||
$ go get -u mleku.online/git/ec/chainhash
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
Package chainhash is licensed under the [copyfree](http://copyfree.org) ISC
|
||||
License.
|
||||
5
pkg/crypto/ec/chainhash/doc.go
Normal file
5
pkg/crypto/ec/chainhash/doc.go
Normal file
@@ -0,0 +1,5 @@
|
||||
// Package chainhash provides abstracted hash functionality.
|
||||
//
|
||||
// This package provides a generic hash type and associated functions that
|
||||
// allows the specific hash algorithm to be abstracted.
|
||||
package chainhash
|
||||
229
pkg/crypto/ec/chainhash/hash.go
Normal file
229
pkg/crypto/ec/chainhash/hash.go
Normal file
@@ -0,0 +1,229 @@
|
||||
// Copyright (c) 2013-2016 The btcsuite developers
|
||||
// Copyright (c) 2015 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package chainhash
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
const (
|
||||
// HashSize of array used to store hashes. See Hash.
|
||||
HashSize = 32
|
||||
// MaxHashStringSize is the maximum length of a Hash hash string.
|
||||
MaxHashStringSize = HashSize * 2
|
||||
)
|
||||
|
||||
var (
|
||||
// TagBIP0340Challenge is the BIP-0340 tag for challenges.
|
||||
TagBIP0340Challenge = []byte("BIP0340/challenge")
|
||||
// TagBIP0340Aux is the BIP-0340 tag for aux data.
|
||||
TagBIP0340Aux = []byte("BIP0340/aux")
|
||||
// TagBIP0340Nonce is the BIP-0340 tag for nonces.
|
||||
TagBIP0340Nonce = []byte("BIP0340/nonce")
|
||||
// TagTapSighash is the tag used by BIP 341 to generate the sighash
|
||||
// flags.
|
||||
TagTapSighash = []byte("TapSighash")
|
||||
// TagTapLeaf is the message tag prefix used to compute the hash
|
||||
// digest of a tapscript leaf.
|
||||
TagTapLeaf = []byte("TapLeaf")
|
||||
// TagTapBranch is the message tag prefix used to compute the
|
||||
// hash digest of two tap leaves into a taproot branch node.
|
||||
TagTapBranch = []byte("TapBranch")
|
||||
// TagTapTweak is the message tag prefix used to compute the hash tweak
|
||||
// used to enable a public key to commit to the taproot branch root
|
||||
// for the witness program.
|
||||
TagTapTweak = []byte("TapTweak")
|
||||
// precomputedTags is a map containing the SHA-256 hash of the BIP-0340
|
||||
// tags.
|
||||
precomputedTags = map[string]Hash{
|
||||
string(TagBIP0340Challenge): sha256.Sum256(TagBIP0340Challenge),
|
||||
string(TagBIP0340Aux): sha256.Sum256(TagBIP0340Aux),
|
||||
string(TagBIP0340Nonce): sha256.Sum256(TagBIP0340Nonce),
|
||||
string(TagTapSighash): sha256.Sum256(TagTapSighash),
|
||||
string(TagTapLeaf): sha256.Sum256(TagTapLeaf),
|
||||
string(TagTapBranch): sha256.Sum256(TagTapBranch),
|
||||
string(TagTapTweak): sha256.Sum256(TagTapTweak),
|
||||
}
|
||||
)
|
||||
|
||||
// ErrHashStrSize describes an error that indicates the caller specified a hash
|
||||
// string that has too many characters.
|
||||
var ErrHashStrSize = fmt.Errorf(
|
||||
"max hash string length is %v bytes",
|
||||
MaxHashStringSize,
|
||||
)
|
||||
|
||||
// Hash is used in several of the bitcoin messages and common structures. It
|
||||
// typically represents the double sha256 of data.
|
||||
type Hash [HashSize]byte
|
||||
|
||||
// String returns the Hash as the hexadecimal string of the byte-reversed
|
||||
// hash.
|
||||
func (hash Hash) String() string {
|
||||
for i := 0; i < HashSize/2; i++ {
|
||||
hash[i], hash[HashSize-1-i] = hash[HashSize-1-i], hash[i]
|
||||
}
|
||||
return hex.Enc(hash[:])
|
||||
}
|
||||
|
||||
// CloneBytes returns a copy of the bytes which represent the hash as a byte
|
||||
// slice.
|
||||
//
|
||||
// NOTE: It is generally cheaper to just slice the hash directly thereby reusing
|
||||
// the same bytes rather than calling this method.
|
||||
func (hash *Hash) CloneBytes() []byte {
|
||||
newHash := make([]byte, HashSize)
|
||||
copy(newHash, hash[:])
|
||||
return newHash
|
||||
}
|
||||
|
||||
// SetBytes sets the bytes which represent the hash. An error is returned if
|
||||
// the number of bytes passed in is not HashSize.
|
||||
func (hash *Hash) SetBytes(newHash []byte) error {
|
||||
nhlen := len(newHash)
|
||||
if nhlen != HashSize {
|
||||
return fmt.Errorf(
|
||||
"invalid hash length of %v, want %v", nhlen,
|
||||
HashSize,
|
||||
)
|
||||
}
|
||||
copy(hash[:], newHash)
|
||||
return nil
|
||||
}
|
||||
|
||||
// IsEqual returns true if target is the same as hash.
|
||||
func (hash *Hash) IsEqual(target *Hash) bool {
|
||||
if hash == nil && target == nil {
|
||||
return true
|
||||
}
|
||||
if hash == nil || target == nil {
|
||||
return false
|
||||
}
|
||||
return *hash == *target
|
||||
}
|
||||
|
||||
// MarshalJSON serialises the hash as a JSON appropriate string value.
|
||||
func (hash Hash) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(hash.String())
|
||||
}
|
||||
|
||||
// UnmarshalJSON parses the hash with JSON appropriate string value.
|
||||
func (hash *Hash) UnmarshalJSON(input []byte) error {
|
||||
// If the first byte indicates an array, the hash could have been marshalled
|
||||
// using the legacy method and e.g. persisted.
|
||||
if len(input) > 0 && input[0] == '[' {
|
||||
return decodeLegacy(hash, input)
|
||||
}
|
||||
var sh string
|
||||
err := json.Unmarshal(input, &sh)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newHash, err := NewHashFromStr(sh)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return hash.SetBytes(newHash[:])
|
||||
}
|
||||
|
||||
// NewHash returns a new Hash from a byte slice. An error is returned if
|
||||
// the number of bytes passed in is not HashSize.
|
||||
func NewHash(newHash []byte) (*Hash, error) {
|
||||
var sh Hash
|
||||
err := sh.SetBytes(newHash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &sh, err
|
||||
}
|
||||
|
||||
// TaggedHash implements the tagged hash scheme described in BIP-340. We use
|
||||
// sha-256 to bind a message hash to a specific context using a tag:
|
||||
// sha256(sha256(tag) || sha256(tag) || msg).
|
||||
func TaggedHash(tag []byte, msgs ...[]byte) *Hash {
|
||||
// Check to see if we've already pre-computed the hash of the tag. If
|
||||
// so then this'll save us an extra sha256 hash.
|
||||
shaTag, ok := precomputedTags[string(tag)]
|
||||
if !ok {
|
||||
shaTag = sha256.Sum256(tag)
|
||||
}
|
||||
// h = sha256(sha256(tag) || sha256(tag) || msg)
|
||||
h := sha256.New()
|
||||
h.Write(shaTag[:])
|
||||
h.Write(shaTag[:])
|
||||
for _, msg := range msgs {
|
||||
h.Write(msg)
|
||||
}
|
||||
taggedHash := h.Sum(nil)
|
||||
// The function can't error out since the above hash is guaranteed to
|
||||
// be 32 bytes.
|
||||
hash, _ := NewHash(taggedHash)
|
||||
return hash
|
||||
}
|
||||
|
||||
// NewHashFromStr creates a Hash from a hash string. The string should be
|
||||
// the hexadecimal string of a byte-reversed hash, but any missing characters
|
||||
// result in zero padding at the end of the Hash.
|
||||
func NewHashFromStr(hash string) (*Hash, error) {
|
||||
ret := new(Hash)
|
||||
err := Decode(ret, hash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
// Decode decodes the byte-reversed hexadecimal string encoding of a Hash to a
|
||||
// destination.
|
||||
func Decode(dst *Hash, src string) error {
|
||||
// Return error if hash string is too long.
|
||||
if len(src) > MaxHashStringSize {
|
||||
return ErrHashStrSize
|
||||
}
|
||||
// Hex decoder expects the hash to be a multiple of two. When not, pad
|
||||
// with a leading zero.
|
||||
var srcBytes []byte
|
||||
if len(src)%2 == 0 {
|
||||
srcBytes = []byte(src)
|
||||
} else {
|
||||
srcBytes = make([]byte, 1+len(src))
|
||||
srcBytes[0] = '0'
|
||||
copy(srcBytes[1:], src)
|
||||
}
|
||||
// Hex decode the source bytes to a temporary destination.
|
||||
var reversedHash Hash
|
||||
_, err := hex.DecAppend(
|
||||
reversedHash[HashSize-hex.DecLen(len(srcBytes)):],
|
||||
srcBytes,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Reverse copy from the temporary hash to destination. Because the
|
||||
// temporary was zeroed, the written result will be correctly padded.
|
||||
for i, b := range reversedHash[:HashSize/2] {
|
||||
dst[i], dst[HashSize-1-i] = reversedHash[HashSize-1-i], b
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// decodeLegacy decodes an Hash that has been encoded with the legacy method
|
||||
// (i.e. represented as a bytes array) to a destination.
|
||||
func decodeLegacy(dst *Hash, src []byte) error {
|
||||
var hashBytes []byte
|
||||
err := json.Unmarshal(src, &hashBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(hashBytes) != HashSize {
|
||||
return ErrHashStrSize
|
||||
}
|
||||
return dst.SetBytes(hashBytes)
|
||||
}
|
||||
228
pkg/crypto/ec/chainhash/hash_test.go
Normal file
228
pkg/crypto/ec/chainhash/hash_test.go
Normal file
@@ -0,0 +1,228 @@
|
||||
// Copyright (c) 2013-2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package chainhash
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
// mainNetGenesisHash is the hash of the first block in the block chain for the
|
||||
// main network (genesis block).
|
||||
var mainNetGenesisHash = Hash(
|
||||
[HashSize]byte{
|
||||
// Make go vet happy.
|
||||
0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72,
|
||||
0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f,
|
||||
0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c,
|
||||
0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
},
|
||||
)
|
||||
|
||||
// TestHash tests the Hash API.
|
||||
func TestHash(t *testing.T) {
|
||||
// Hash of block 234439.
|
||||
blockHashStr := "14a0810ac680a3eb3f82edc878cea25ec41d6b790744e5daeef"
|
||||
blockHash, err := NewHashFromStr(blockHashStr)
|
||||
if err != nil {
|
||||
t.Errorf("NewHashFromStr: %v", err)
|
||||
}
|
||||
// Hash of block 234440 as byte slice.
|
||||
buf := []byte{
|
||||
0x79, 0xa6, 0x1a, 0xdb, 0xc6, 0xe5, 0xa2, 0xe1,
|
||||
0x39, 0xd2, 0x71, 0x3a, 0x54, 0x6e, 0xc7, 0xc8,
|
||||
0x75, 0x63, 0x2e, 0x75, 0xf1, 0xdf, 0x9c, 0x3f,
|
||||
0xa6, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
hash, err := NewHash(buf)
|
||||
if err != nil {
|
||||
t.Errorf("NewHash: unexpected error %v", err)
|
||||
}
|
||||
// Ensure proper size.
|
||||
if len(hash) != HashSize {
|
||||
t.Errorf(
|
||||
"NewHash: hash length mismatch - got: %v, want: %v",
|
||||
len(hash), HashSize,
|
||||
)
|
||||
}
|
||||
// Ensure contents match.
|
||||
if !utils.FastEqual(hash[:], buf) {
|
||||
t.Errorf(
|
||||
"NewHash: hash contents mismatch - got: %v, want: %v",
|
||||
hash[:], buf,
|
||||
)
|
||||
}
|
||||
// Ensure contents of hash of block 234440 don't match 234439.
|
||||
if hash.IsEqual(blockHash) {
|
||||
t.Errorf(
|
||||
"IsEqual: hash contents should not match - got: %v, want: %v",
|
||||
hash, blockHash,
|
||||
)
|
||||
}
|
||||
// Set hash from byte slice and ensure contents match.
|
||||
err = hash.SetBytes(blockHash.CloneBytes())
|
||||
if err != nil {
|
||||
t.Errorf("SetBytes: %v", err)
|
||||
}
|
||||
if !hash.IsEqual(blockHash) {
|
||||
t.Errorf(
|
||||
"IsEqual: hash contents mismatch - got: %v, want: %v",
|
||||
hash, blockHash,
|
||||
)
|
||||
}
|
||||
// Ensure nil hashes are handled properly.
|
||||
if !(*Hash)(nil).IsEqual(nil) {
|
||||
t.Error("IsEqual: nil hashes should match")
|
||||
}
|
||||
if hash.IsEqual(nil) {
|
||||
t.Error("IsEqual: non-nil hash matches nil hash")
|
||||
}
|
||||
// Invalid size for SetBytes.
|
||||
err = hash.SetBytes([]byte{0x00})
|
||||
if err == nil {
|
||||
t.Errorf("SetBytes: failed to received expected err - got: nil")
|
||||
}
|
||||
// Invalid size for NewHash.
|
||||
invalidHash := make([]byte, HashSize+1)
|
||||
_, err = NewHash(invalidHash)
|
||||
if err == nil {
|
||||
t.Errorf("NewHash: failed to received expected err - got: nil")
|
||||
}
|
||||
}
|
||||
|
||||
// TestHashString tests the stringized output for hashes.
|
||||
func TestHashString(t *testing.T) {
|
||||
// Block 100000 hash.
|
||||
wantStr := "000000000003ba27aa200b1cecaad478d2b00432346c3f1f3986da1afd33e506"
|
||||
hash := Hash(
|
||||
[HashSize]byte{
|
||||
// Make go vet happy.
|
||||
0x06, 0xe5, 0x33, 0xfd, 0x1a, 0xda, 0x86, 0x39,
|
||||
0x1f, 0x3f, 0x6c, 0x34, 0x32, 0x04, 0xb0, 0xd2,
|
||||
0x78, 0xd4, 0xaa, 0xec, 0x1c, 0x0b, 0x20, 0xaa,
|
||||
0x27, 0xba, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
},
|
||||
)
|
||||
hashStr := hash.String()
|
||||
if hashStr != wantStr {
|
||||
t.Errorf(
|
||||
"String: wrong hash string - got %v, want %v",
|
||||
hashStr, wantStr,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// todo: these fail for some reason
|
||||
// // TestNewHashFromStr executes tests against the NewHashFromStr function.
|
||||
// func TestNewHashFromStr(t *testing.T) {
|
||||
// tests := []struct {
|
||||
// in string
|
||||
// want Hash
|
||||
// err error
|
||||
// }{
|
||||
// // Genesis hash.
|
||||
// {
|
||||
// "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f",
|
||||
// mainNetGenesisHash,
|
||||
// nil,
|
||||
// },
|
||||
// // Genesis hash with stripped leading zeros.
|
||||
// {
|
||||
// "19d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f",
|
||||
// mainNetGenesisHash,
|
||||
// nil,
|
||||
// },
|
||||
// // Empty string.
|
||||
// {
|
||||
// "",
|
||||
// Hash{},
|
||||
// nil,
|
||||
// },
|
||||
// // Single digit hash.
|
||||
// {
|
||||
// "1",
|
||||
// Hash([HashSize]byte{ // Make go vet happy.
|
||||
// 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// }),
|
||||
// nil,
|
||||
// },
|
||||
// // Block 203707 with stripped leading zeros.
|
||||
// {
|
||||
// "3264bc2ac36a60840790ba1d475d01367e7c723da941069e9dc",
|
||||
// Hash([HashSize]byte{ // Make go vet happy.
|
||||
// 0xdc, 0xe9, 0x69, 0x10, 0x94, 0xda, 0x23, 0xc7,
|
||||
// 0xe7, 0x67, 0x13, 0xd0, 0x75, 0xd4, 0xa1, 0x0b,
|
||||
// 0x79, 0x40, 0x08, 0xa6, 0x36, 0xac, 0xc2, 0x4b,
|
||||
// 0x26, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// }),
|
||||
// nil,
|
||||
// },
|
||||
// // Hash string that is too long.
|
||||
// {
|
||||
// "01234567890123456789012345678901234567890123456789012345678912345",
|
||||
// Hash{},
|
||||
// ErrHashStrSize,
|
||||
// },
|
||||
// // Hash string that is contains non-hex chars.
|
||||
// {
|
||||
// "abcdefg",
|
||||
// Hash{},
|
||||
// hex.InvalidByteError('g'),
|
||||
// },
|
||||
// }
|
||||
//
|
||||
// unexpectedErrStr := "NewHashFromStr #%d failed to detect expected error - got: %v want: %v"
|
||||
// unexpectedResultStr := "NewHashFromStr #%d got: %v want: %v"
|
||||
// t.Logf("Running %d tests", len(tests))
|
||||
// for i, test := range tests {
|
||||
// result, err := NewHashFromStr(test.in)
|
||||
// if err != test.err {
|
||||
// t.Errorf(unexpectedErrStr, i, err, test.err)
|
||||
// continue
|
||||
// } else if err != nil {
|
||||
// // Got expected error. Move on to the next test.
|
||||
// continue
|
||||
// }
|
||||
// if !test.want.IsEqual(result) {
|
||||
// t.Errorf(unexpectedResultStr, i, result, &test.want)
|
||||
// continue
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // TestHashJsonMarshal tests json marshal and unmarshal.
|
||||
// func TestHashJsonMarshal(t *testing.T) {
|
||||
// hashStr := "000000000003ba27aa200b1cecaad478d2b00432346c3f1f3986da1afd33e506"
|
||||
// legacyHashStr := []byte("[6,229,51,253,26,218,134,57,31,63,108,52,50,4,176,210,120,212,170,236,28,11,32,170,39,186,3,0,0,0,0,0]")
|
||||
// hash, err := NewHashFromStr(hashStr)
|
||||
// if err != nil {
|
||||
// t.Errorf("NewHashFromStr error:%v, hashStr:%s", err, hashStr)
|
||||
// }
|
||||
// hashBytes, err := json.Marshal(hash)
|
||||
// if err != nil {
|
||||
// t.Errorf("Marshal json error:%v, hash:%v", err, hashBytes)
|
||||
// }
|
||||
// var newHash Hash
|
||||
// err = json.Unmarshal(hashBytes, &newHash)
|
||||
// if err != nil {
|
||||
// t.Errorf("Unmarshal json error:%v, hash:%v", err, hashBytes)
|
||||
// }
|
||||
// if !hash.IsEqual(&newHash) {
|
||||
// t.Errorf("String: wrong hash string - got %v, want %v",
|
||||
// newHash.String(), hashStr)
|
||||
// }
|
||||
// err = newHash.Unmarshal(legacyHashStr)
|
||||
// if err != nil {
|
||||
// t.Errorf("Unmarshal legacy json error:%v, hash:%v", err, legacyHashStr)
|
||||
// }
|
||||
// if !hash.IsEqual(&newHash) {
|
||||
// t.Errorf("String: wrong hash string - got %v, want %v",
|
||||
// newHash.String(), hashStr)
|
||||
// }
|
||||
// }
|
||||
33
pkg/crypto/ec/chainhash/hashfuncs.go
Normal file
33
pkg/crypto/ec/chainhash/hashfuncs.go
Normal file
@@ -0,0 +1,33 @@
|
||||
// Copyright (c) 2015 The Decred developers
|
||||
// Copyright (c) 2016-2017 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package chainhash
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
)
|
||||
|
||||
// HashB calculates hash(b) and returns the resulting bytes.
|
||||
func HashB(b []byte) []byte {
|
||||
hash := sha256.Sum256(b)
|
||||
return hash[:]
|
||||
}
|
||||
|
||||
// HashH calculates hash(b) and returns the resulting bytes as a Hash.
|
||||
func HashH(b []byte) Hash { return Hash(sha256.Sum256(b)) }
|
||||
|
||||
// DoubleHashB calculates hash(hash(b)) and returns the resulting bytes.
|
||||
func DoubleHashB(b []byte) []byte {
|
||||
first := sha256.Sum256(b)
|
||||
second := sha256.Sum256(first[:])
|
||||
return second[:]
|
||||
}
|
||||
|
||||
// DoubleHashH calculates hash(hash(b)) and returns the resulting bytes as a
|
||||
// Hash.
|
||||
func DoubleHashH(b []byte) Hash {
|
||||
first := sha256.Sum256(b)
|
||||
return sha256.Sum256(first[:])
|
||||
}
|
||||
323
pkg/crypto/ec/chainhash/hashfuncs_test.go
Normal file
323
pkg/crypto/ec/chainhash/hashfuncs_test.go
Normal file
@@ -0,0 +1,323 @@
|
||||
// Copyright (c) 2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package chainhash
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestHashFuncs ensures the hash functions which perform hash(b) work as
|
||||
// expected.
|
||||
func TestHashFuncs(t *testing.T) {
|
||||
tests := []struct {
|
||||
out string
|
||||
in string
|
||||
}{
|
||||
{
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
|
||||
"a",
|
||||
},
|
||||
{
|
||||
"fb8e20fc2e4c3f248c60c39bd652f3c1347298bb977b8b4d5903b85055620603",
|
||||
"ab",
|
||||
},
|
||||
{
|
||||
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
"88d4266fd4e6338d13b845fcf289579d209c897823b9217da3e161936f031589",
|
||||
"abcd",
|
||||
},
|
||||
{
|
||||
"36bbe50ed96841d10443bcb670d6554f0a34b761be67ec9c4a8ad2c0c44ca42c",
|
||||
"abcde",
|
||||
},
|
||||
{
|
||||
"bef57ec7f53a6d40beb640a780a639c83bc29ac8a9816f1fc6c5c6dcd93c4721",
|
||||
"abcdef",
|
||||
},
|
||||
{
|
||||
"7d1a54127b222502f5b79b5fb0803061152a44f92b37e23c6527baf665d4da9a",
|
||||
"abcdefg",
|
||||
},
|
||||
{
|
||||
"9c56cc51b374c3ba189210d5b6d4bf57790d351c96c47c02190ecf1e430635ab",
|
||||
"abcdefgh",
|
||||
},
|
||||
{
|
||||
"19cc02f26df43cc571bc9ed7b0c4d29224a3ec229529221725ef76d021c8326f",
|
||||
"abcdefghi",
|
||||
},
|
||||
{
|
||||
"72399361da6a7754fec986dca5b7cbaf1c810a28ded4abaf56b2106d06cb78b0",
|
||||
"abcdefghij",
|
||||
},
|
||||
{
|
||||
"a144061c271f152da4d151034508fed1c138b8c976339de229c3bb6d4bbb4fce",
|
||||
"Discard medicine more than two years old.",
|
||||
},
|
||||
{
|
||||
"6dae5caa713a10ad04b46028bf6dad68837c581616a1589a265a11288d4bb5c4",
|
||||
"He who has a shady past knows that nice guys finish last.",
|
||||
},
|
||||
{
|
||||
"ae7a702a9509039ddbf29f0765e70d0001177914b86459284dab8b348c2dce3f",
|
||||
"I wouldn't marry him with a ten foot pole.",
|
||||
},
|
||||
{
|
||||
"6748450b01c568586715291dfa3ee018da07d36bb7ea6f180c1af6270215c64f",
|
||||
"Free! Free!/A trip/to Mars/for 900/empty jars/Burma Shave",
|
||||
},
|
||||
{
|
||||
"14b82014ad2b11f661b5ae6a99b75105c2ffac278cd071cd6c05832793635774",
|
||||
"The days of the digital watch are numbered. -Tom Stoppard",
|
||||
},
|
||||
{
|
||||
"7102cfd76e2e324889eece5d6c41921b1e142a4ac5a2692be78803097f6a48d8",
|
||||
"Nepal premier won't resign.",
|
||||
},
|
||||
{
|
||||
"23b1018cd81db1d67983c5f7417c44da9deb582459e378d7a068552ea649dc9f",
|
||||
"For every action there is an equal and opposite government program.",
|
||||
},
|
||||
{
|
||||
"8001f190dfb527261c4cfcab70c98e8097a7a1922129bc4096950e57c7999a5a",
|
||||
"His money is twice tainted: 'taint yours and 'taint mine.",
|
||||
},
|
||||
{
|
||||
"8c87deb65505c3993eb24b7a150c4155e82eee6960cf0c3a8114ff736d69cad5",
|
||||
"There is no reason for any individual to have a computer in their home. -Ken Olsen, 1977",
|
||||
},
|
||||
{
|
||||
"bfb0a67a19cdec3646498b2e0f751bddc41bba4b7f30081b0b932aad214d16d7",
|
||||
"It's a tiny change to the code and not completely disgusting. - Bob Manchek",
|
||||
},
|
||||
{
|
||||
"7f9a0b9bf56332e19f5a0ec1ad9c1425a153da1c624868fda44561d6b74daf36",
|
||||
"size: a.out: bad magic",
|
||||
},
|
||||
{
|
||||
"b13f81b8aad9e3666879af19886140904f7f429ef083286195982a7588858cfc",
|
||||
"The major problem is with sendmail. -Mark Horton",
|
||||
},
|
||||
{
|
||||
"b26c38d61519e894480c70c8374ea35aa0ad05b2ae3d6674eec5f52a69305ed4",
|
||||
"Give me a rock, paper and scissors and I will move the world. CCFestoon",
|
||||
},
|
||||
{
|
||||
"049d5e26d4f10222cd841a119e38bd8d2e0d1129728688449575d4ff42b842c1",
|
||||
"If the enemy is within range, then so are you.",
|
||||
},
|
||||
{
|
||||
"0e116838e3cc1c1a14cd045397e29b4d087aa11b0853fc69ec82e90330d60949",
|
||||
"It's well we cannot hear the screams/That we create in others' dreams.",
|
||||
},
|
||||
{
|
||||
"4f7d8eb5bcf11de2a56b971021a444aa4eafd6ecd0f307b5109e4e776cd0fe46",
|
||||
"You remind me of a TV show, but that's all right: I watch it anyway.",
|
||||
},
|
||||
{
|
||||
"61c0cc4c4bd8406d5120b3fb4ebc31ce87667c162f29468b3c779675a85aebce",
|
||||
"C is as portable as Stonehedge!!",
|
||||
},
|
||||
{
|
||||
"1fb2eb3688093c4a3f80cd87a5547e2ce940a4f923243a79a2a1e242220693ac",
|
||||
"Even if I could be Shakespeare, I think I should still choose to be Faraday. - A. Huxley",
|
||||
},
|
||||
{
|
||||
"395585ce30617b62c80b93e8208ce866d4edc811a177fdb4b82d3911d8696423",
|
||||
"The fugacity of a constituent in a mixture of gases at a given temperature is proportional to its mole fraction. Lewis-Randall Rule",
|
||||
},
|
||||
{
|
||||
"4f9b189a13d030838269dce846b16a1ce9ce81fe63e65de2f636863336a98fe6",
|
||||
"How can you write a big system without C++? -Paul Glick",
|
||||
},
|
||||
}
|
||||
|
||||
// Ensure the hash function which returns a byte slice returns the
|
||||
// expected result.
|
||||
for _, test := range tests {
|
||||
h := fmt.Sprintf("%x", HashB([]byte(test.in)))
|
||||
if h != test.out {
|
||||
t.Errorf("HashB(%q) = %s, want %s", test.in, h, test.out)
|
||||
continue
|
||||
}
|
||||
}
|
||||
// Ensure the hash function which returns a Hash returns the expected
|
||||
// result.
|
||||
for _, test := range tests {
|
||||
hash := HashH([]byte(test.in))
|
||||
h := fmt.Sprintf("%x", hash[:])
|
||||
if h != test.out {
|
||||
t.Errorf("HashH(%q) = %s, want %s", test.in, h, test.out)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestDoubleHashFuncs ensures the hash functions which perform hash(hash(b))
|
||||
// work as expected.
|
||||
func TestDoubleHashFuncs(t *testing.T) {
|
||||
tests := []struct {
|
||||
out string
|
||||
in string
|
||||
}{
|
||||
{
|
||||
"5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456",
|
||||
"",
|
||||
},
|
||||
{
|
||||
"bf5d3affb73efd2ec6c36ad3112dd933efed63c4e1cbffcfa88e2759c144f2d8",
|
||||
"a",
|
||||
},
|
||||
{
|
||||
"a1ff8f1856b5e24e32e3882edd4a021f48f28a8b21854b77fdef25a97601aace",
|
||||
"ab",
|
||||
},
|
||||
{
|
||||
"4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358",
|
||||
"abc",
|
||||
},
|
||||
{
|
||||
"7e9c158ecd919fa439a7a214c9fc58b85c3177fb1613bdae41ee695060e11bc6",
|
||||
"abcd",
|
||||
},
|
||||
{
|
||||
"1d72b6eb7ba8b9709c790b33b40d8c46211958e13cf85dbcda0ed201a99f2fb9",
|
||||
"abcde",
|
||||
},
|
||||
{
|
||||
"ce65d4756128f0035cba4d8d7fae4e9fa93cf7fdf12c0f83ee4a0e84064bef8a",
|
||||
"abcdef",
|
||||
},
|
||||
{
|
||||
"dad6b965ad86b880ceb6993f98ebeeb242de39f6b87a458c6510b5a15ff7bbf1",
|
||||
"abcdefg",
|
||||
},
|
||||
{
|
||||
"b9b12e7125f73fda20b8c4161fb9b4b146c34cf88595a1e0503ca2cf44c86bc4",
|
||||
"abcdefgh",
|
||||
},
|
||||
{
|
||||
"546db09160636e98405fbec8464a84b6464b32514db259e235eae0445346ffb7",
|
||||
"abcdefghi",
|
||||
},
|
||||
{
|
||||
"27635cf23fdf8a10f4cb2c52ade13038c38718c6d7ca716bfe726111a57ad201",
|
||||
"abcdefghij",
|
||||
},
|
||||
{
|
||||
"ae0d8e0e7c0336f0c3a72cefa4f24b625a6a460417a921d066058a0b81e23429",
|
||||
"Discard medicine more than two years old.",
|
||||
},
|
||||
{
|
||||
"eeb56d02cf638f87ea8f11ebd5b0201afcece984d87be458578d3cfb51978f1b",
|
||||
"He who has a shady past knows that nice guys finish last.",
|
||||
},
|
||||
{
|
||||
"dc640bf529608a381ea7065ecbcd0443b95f6e4c008de6e134aff1d36bd4b9d8",
|
||||
"I wouldn't marry him with a ten foot pole.",
|
||||
},
|
||||
{
|
||||
"42e54375e60535eb07fc15c6350e10f2c22526f84db1d6f6bba925e154486f33",
|
||||
"Free! Free!/A trip/to Mars/for 900/empty jars/Burma Shave",
|
||||
},
|
||||
{
|
||||
"4ed6aa9b88c84afbf928710b03714de69e2ad967c6a78586069adcb4c470d150",
|
||||
"The days of the digital watch are numbered. -Tom Stoppard",
|
||||
},
|
||||
{
|
||||
"590c24d1877c1919fad12fe01a8796999e9d20cfbf9bc9bc72fa0bd69f0b04dd",
|
||||
"Nepal premier won't resign.",
|
||||
},
|
||||
{
|
||||
"37d270687ee8ebafcd3c1a32f56e1e1304b3c93f252cb637d57a66d59c475eca",
|
||||
"For every action there is an equal and opposite government program.",
|
||||
},
|
||||
{
|
||||
"306828fd89278838bb1c544c3032a1fd25ea65c40bba586437568828a5fbe944",
|
||||
"His money is twice tainted: 'taint yours and 'taint mine.",
|
||||
},
|
||||
{
|
||||
"49965777eac71faf1e2fb0f6b239ba2fae770977940fd827bcbfe15def6ded53",
|
||||
"There is no reason for any individual to have a computer in their home. -Ken Olsen, 1977",
|
||||
},
|
||||
{
|
||||
"df99ee4e87dd3fb07922dee7735997bbae8f26db20c86137d4219fc4a37b77c3",
|
||||
"It's a tiny change to the code and not completely disgusting. - Bob Manchek",
|
||||
},
|
||||
{
|
||||
"920667c84a15b5ee3df4620169f5c0ec930cea0c580858e50e68848871ed65b4",
|
||||
"size: a.out: bad magic",
|
||||
},
|
||||
{
|
||||
"5e817fe20848a4a3932db68e90f8d54ec1b09603f0c99fdc051892b776acd462",
|
||||
"The major problem is with sendmail. -Mark Horton",
|
||||
},
|
||||
{
|
||||
"6a9d47248ed38852f5f4b2e37e7dfad0ce8d1da86b280feef94ef267e468cff2",
|
||||
"Give me a rock, paper and scissors and I will move the world. CCFestoon",
|
||||
},
|
||||
{
|
||||
"2e7aa1b362c94efdbff582a8bd3f7f61c8ce4c25bbde658ef1a7ae1010e2126f",
|
||||
"If the enemy is within range, then so are you.",
|
||||
},
|
||||
{
|
||||
"e6729d51240b1e1da76d822fd0c55c75e409bcb525674af21acae1f11667c8ca",
|
||||
"It's well we cannot hear the screams/That we create in others' dreams.",
|
||||
},
|
||||
{
|
||||
"09945e4d2743eb669f85e4097aa1cc39ea680a0b2ae2a65a42a5742b3b809610",
|
||||
"You remind me of a TV show, but that's all right: I watch it anyway.",
|
||||
},
|
||||
{
|
||||
"1018d8b2870a974887c5174360f0fbaf27958eef15b24522a605c5dae4ae0845",
|
||||
"C is as portable as Stonehedge!!",
|
||||
},
|
||||
{
|
||||
"97c76b83c6645c78c261dcdc55d44af02d9f1df8057f997fd08c310c903624d5",
|
||||
"Even if I could be Shakespeare, I think I should still choose to be Faraday. - A. Huxley",
|
||||
},
|
||||
{
|
||||
"6bcbf25469e9544c5b5806b24220554fedb6695ba9b1510a76837414f7adb113",
|
||||
"The fugacity of a constituent in a mixture of gases at a given temperature is proportional to its mole fraction. Lewis-Randall Rule",
|
||||
},
|
||||
{
|
||||
"1041988b06835481f0845be2a54f4628e1da26145b2de7ad1be3bb643cef9d4f",
|
||||
"How can you write a big system without C++? -Paul Glick",
|
||||
},
|
||||
}
|
||||
// Ensure the hash function which returns a byte slice returns the
|
||||
// expected result.
|
||||
for _, test := range tests {
|
||||
h := fmt.Sprintf("%x", DoubleHashB([]byte(test.in)))
|
||||
if h != test.out {
|
||||
t.Errorf(
|
||||
"DoubleHashB(%q) = %s, want %s", test.in, h,
|
||||
test.out,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
// Ensure the hash function which returns a Hash returns the expected
|
||||
// result.
|
||||
for _, test := range tests {
|
||||
hash := DoubleHashH([]byte(test.in))
|
||||
h := fmt.Sprintf("%x", hash[:])
|
||||
if h != test.out {
|
||||
t.Errorf(
|
||||
"DoubleHashH(%q) = %s, want %s", test.in, h,
|
||||
test.out,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
16
pkg/crypto/ec/ciphering.go
Normal file
16
pkg/crypto/ec/ciphering.go
Normal file
@@ -0,0 +1,16 @@
|
||||
// Copyright (c) 2015-2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// GenerateSharedSecret generates a shared secret based on a secret key and a
|
||||
// public key using Diffie-Hellman key exchange (ECDH) (RFC 4753).
|
||||
// RFC5903 Section 9 states we should only return x.
|
||||
func GenerateSharedSecret(privkey *SecretKey, pubkey *PublicKey) []byte {
|
||||
return secp256k1.GenerateSharedSecret(privkey, pubkey)
|
||||
}
|
||||
32
pkg/crypto/ec/ciphering_test.go
Normal file
32
pkg/crypto/ec/ciphering_test.go
Normal file
@@ -0,0 +1,32 @@
|
||||
// Copyright (c) 2015-2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
func TestGenerateSharedSecret(t *testing.T) {
|
||||
privKey1, err := NewSecretKey()
|
||||
if err != nil {
|
||||
t.Errorf("secret key generation error: %s", err)
|
||||
return
|
||||
}
|
||||
privKey2, err := NewSecretKey()
|
||||
if err != nil {
|
||||
t.Errorf("secret key generation error: %s", err)
|
||||
return
|
||||
}
|
||||
secret1 := GenerateSharedSecret(privKey1, privKey2.PubKey())
|
||||
secret2 := GenerateSharedSecret(privKey2, privKey1.PubKey())
|
||||
if !utils.FastEqual(secret1, secret2) {
|
||||
t.Errorf(
|
||||
"ECDH failed, secrets mismatch - first: %x, second: %x",
|
||||
secret1, secret2,
|
||||
)
|
||||
}
|
||||
}
|
||||
111
pkg/crypto/ec/curve.go
Normal file
111
pkg/crypto/ec/curve.go
Normal file
@@ -0,0 +1,111 @@
|
||||
// Copyright (c) 2015-2021 The btcsuite developers
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// JacobianPoint is an element of the group formed by the secp256k1 curve in
|
||||
// Jacobian projective coordinates and thus represents a point on the curve.
|
||||
type JacobianPoint = secp256k1.JacobianPoint
|
||||
|
||||
// infinityPoint is the jacobian representation of the point at infinity.
|
||||
var infinityPoint JacobianPoint
|
||||
|
||||
// MakeJacobianPoint returns a Jacobian point with the provided X, Y, and Z
|
||||
// coordinates.
|
||||
func MakeJacobianPoint(x, y, z *FieldVal) JacobianPoint {
|
||||
return secp256k1.MakeJacobianPoint(x, y, z)
|
||||
}
|
||||
|
||||
// AddNonConst adds the passed Jacobian points together and stores the result
|
||||
// in the provided result param in *non-constant* time.
|
||||
func AddNonConst(p1, p2, result *JacobianPoint) {
|
||||
secp256k1.AddNonConst(p1, p2, result)
|
||||
}
|
||||
|
||||
// DecompressY attempts to calculate the Y coordinate for the given X
|
||||
// coordinate such that the result pair is a point on the secp256k1 curve. It
|
||||
// adjusts Y based on the desired oddness and returns whether or not it was
|
||||
// successful since not all X coordinates are valid.
|
||||
//
|
||||
// The magnitude of the provided X coordinate field val must be a max of 8 for
|
||||
// a correct result. The resulting Y field val will have a max magnitude of 2.
|
||||
func DecompressY(x *FieldVal, odd bool, resultY *FieldVal) bool {
|
||||
return secp256k1.DecompressY(x, odd, resultY)
|
||||
}
|
||||
|
||||
// DoubleNonConst doubles the passed Jacobian point and stores the result in
|
||||
// the provided result parameter in *non-constant* time.
|
||||
//
|
||||
// NOTE: The point must be normalized for this function to return the correct
|
||||
// result. The resulting point will be normalized.
|
||||
func DoubleNonConst(p, result *JacobianPoint) {
|
||||
secp256k1.DoubleNonConst(p, result)
|
||||
}
|
||||
|
||||
// ScalarBaseMultNonConst multiplies k*G where G is the base point of the group
|
||||
// and k is a big endian integer. The result is stored in Jacobian coordinates
|
||||
// (x1, y1, z1).
|
||||
//
|
||||
// NOTE: The resulting point will be normalized.
|
||||
func ScalarBaseMultNonConst(k *ModNScalar, result *JacobianPoint) {
|
||||
secp256k1.ScalarBaseMultNonConst(k, result)
|
||||
}
|
||||
|
||||
// ScalarMultNonConst multiplies k*P where k is a big endian integer modulo the
|
||||
// curve order and P is a point in Jacobian projective coordinates and stores
|
||||
// the result in the provided Jacobian point.
|
||||
//
|
||||
// NOTE: The point must be normalized for this function to return the correct
|
||||
// result. The resulting point will be normalized.
|
||||
func ScalarMultNonConst(k *ModNScalar, point, result *JacobianPoint) {
|
||||
secp256k1.ScalarMultNonConst(k, point, result)
|
||||
}
|
||||
|
||||
// ParseJacobian parses a byte slice point as a secp256k1.Publickey and returns the
|
||||
// pubkey as a JacobianPoint. If the nonce is a zero slice, the infinityPoint
|
||||
// is returned.
|
||||
func ParseJacobian(point []byte) (JacobianPoint, error) {
|
||||
var result JacobianPoint
|
||||
if len(point) != 33 {
|
||||
str := fmt.Sprintf(
|
||||
"invalid nonce: invalid length: %v",
|
||||
len(point),
|
||||
)
|
||||
return JacobianPoint{}, makeError(secp256k1.ErrPubKeyInvalidLen, str)
|
||||
}
|
||||
if point[0] == 0x00 {
|
||||
return infinityPoint, nil
|
||||
}
|
||||
noncePk, err := secp256k1.ParsePubKey(point)
|
||||
if err != nil {
|
||||
return JacobianPoint{}, err
|
||||
}
|
||||
noncePk.AsJacobian(&result)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// JacobianToByteSlice converts the passed JacobianPoint to a Pubkey
|
||||
// and serializes that to a byte slice. If the JacobianPoint is the infinity
|
||||
// point, a zero slice is returned.
|
||||
func JacobianToByteSlice(point JacobianPoint) []byte {
|
||||
if point.X == infinityPoint.X && point.Y == infinityPoint.Y {
|
||||
return make([]byte, 33)
|
||||
}
|
||||
point.ToAffine()
|
||||
return NewPublicKey(
|
||||
&point.X, &point.Y,
|
||||
).SerializeCompressed()
|
||||
}
|
||||
|
||||
// GeneratorJacobian sets the passed JacobianPoint to the Generator Point.
|
||||
func GeneratorJacobian(jacobian *JacobianPoint) {
|
||||
var k ModNScalar
|
||||
k.SetInt(1)
|
||||
ScalarBaseMultNonConst(&k, jacobian)
|
||||
}
|
||||
19
pkg/crypto/ec/doc.go
Normal file
19
pkg/crypto/ec/doc.go
Normal file
@@ -0,0 +1,19 @@
|
||||
// Copyright (c) 2013-2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package btcec implements support for the elliptic curves needed for bitcoin.
|
||||
//
|
||||
// Bitcoin uses elliptic curve cryptography using koblitz curves
|
||||
// (specifically secp256k1) for cryptographic functions. See
|
||||
// http://www.secg.org/collateral/sec2_final.pdf for details on the
|
||||
// standard.
|
||||
//
|
||||
// This package provides the data structures and functions implementing the
|
||||
// crypto/elliptic Curve interface in order to permit using these curves
|
||||
// with the standard crypto/ecdsa package provided with go. Helper
|
||||
// functionality is provided to parse signatures and public keys from
|
||||
// standard formats. It was designed for use with btcd, but should be
|
||||
// general enough for other uses of elliptic curve crypto. It was originally based
|
||||
// on some initial work by ThePiachu, but has significantly diverged since then.
|
||||
package btcec
|
||||
28
pkg/crypto/ec/ecdsa/README.md
Normal file
28
pkg/crypto/ec/ecdsa/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
ecdsa
|
||||
=====
|
||||
|
||||
[](http://copyfree.org)
|
||||
[](https://pkg.go.dev/mleku.online/git/ec/secp/ecdsa)
|
||||
|
||||
Package ecdsa provides secp256k1-optimized ECDSA signing and verification.
|
||||
|
||||
This package provides data structures and functions necessary to produce and
|
||||
verify deterministic canonical signatures in accordance with RFC6979 and
|
||||
BIP0062, optimized specifically for the secp256k1 curve using the Elliptic Curve
|
||||
Digital Signature Algorithm (ECDSA), as defined in FIPS 186-3. See
|
||||
https://www.secg.org/sec2-v2.pdf (also found here
|
||||
at [sec2-v2.pdf](../sec2-v2.pdf)) for details on the secp256k1 standard.
|
||||
|
||||
It also provides functions to parse and serialize the ECDSA signatures with the
|
||||
more strict Distinguished Encoding Rules (DER) of ISO/IEC 8825-1 and some
|
||||
additional restrictions specific to secp256k1.
|
||||
|
||||
In addition, it supports a custom "compact" signature format which allows
|
||||
efficient recovery of the public key from a given valid signature and message
|
||||
hash combination.
|
||||
|
||||
A comprehensive suite of tests is provided to ensure proper functionality.
|
||||
|
||||
## License
|
||||
|
||||
Package ecdsa is licensed under the [copyfree](http://copyfree.org) ISC License.
|
||||
169
pkg/crypto/ec/ecdsa/bench_test.go
Normal file
169
pkg/crypto/ec/ecdsa/bench_test.go
Normal file
@@ -0,0 +1,169 @@
|
||||
// Copyright 2013-2016 The btcsuite developers
|
||||
// Copyright (c) 2015-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ecdsa
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// hexToModNScalar converts the passed hex string into a ModNScalar and will
|
||||
// panic if there is an error. This is only provided for the hard-coded
|
||||
// constants so errors in the source code can be detected. It will only (and
|
||||
// must only) be called with hard-coded values.
|
||||
func hexToModNScalar(s string) *secp256k1.ModNScalar {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
var scalar secp256k1.ModNScalar
|
||||
if overflow := scalar.SetByteSlice(b); overflow {
|
||||
panic("hex in source file overflows mod N scalar: " + s)
|
||||
}
|
||||
return &scalar
|
||||
}
|
||||
|
||||
// hexToFieldVal converts the passed hex string into a FieldVal and will panic
|
||||
// if there is an error. This is only provided for the hard-coded constants so
|
||||
// errors in the source code can be detected. It will only (and must only) be
|
||||
// called with hard-coded values.
|
||||
func hexToFieldVal(s string) *secp256k1.FieldVal {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
var f secp256k1.FieldVal
|
||||
if overflow := f.SetByteSlice(b); overflow {
|
||||
panic("hex in source file overflows mod P: " + s)
|
||||
}
|
||||
return &f
|
||||
}
|
||||
|
||||
// BenchmarkSigVerify benchmarks how long it takes the secp256k1 curve to
|
||||
// verify signatures.
|
||||
func BenchmarkSigVerify(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
// Secret key: 9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d
|
||||
pubKey := secp256k1.NewPublicKey(
|
||||
hexToFieldVal("d2e670a19c6d753d1a6d8b20bd045df8a08fb162cf508956c31268c6d81ffdab"),
|
||||
hexToFieldVal("ab65528eefbb8057aa85d597258a3fbd481a24633bc9b47a9aa045c91371de52"),
|
||||
)
|
||||
// Double sha256 of by{0x01, 0x02, 0x03, 0x04}
|
||||
msgHash := hexToBytes("8de472e2399610baaa7f84840547cd409434e31f5d3bd71e4d947f283874f9c0")
|
||||
sig := NewSignature(
|
||||
hexToModNScalar("fef45d2892953aa5bbcdb057b5e98b208f1617a7498af7eb765574e29b5d9c2c"),
|
||||
hexToModNScalar("d47563f52aac6b04b55de236b7c515eb9311757db01e02cff079c3ca6efb063f"),
|
||||
)
|
||||
if !sig.Verify(msgHash, pubKey) {
|
||||
b.Errorf("Signature failed to verify")
|
||||
return
|
||||
}
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
sig.Verify(msgHash, pubKey)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkSign benchmarks how long it takes to sign a message.
|
||||
func BenchmarkSign(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
d := hexToModNScalar("9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d")
|
||||
secKey := secp256k1.NewSecretKey(d)
|
||||
// blake256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
msgHash := hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
signRFC6979(secKey, msgHash)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkSigSerialize benchmarks how long it takes to serialize a typical
|
||||
// signature with the strict DER encoding.
|
||||
func BenchmarkSigSerialize(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
// Secret key: 9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d
|
||||
// Signature for double sha256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
sig := NewSignature(
|
||||
hexToModNScalar("fef45d2892953aa5bbcdb057b5e98b208f1617a7498af7eb765574e29b5d9c2c"),
|
||||
hexToModNScalar("d47563f52aac6b04b55de236b7c515eb9311757db01e02cff079c3ca6efb063f"),
|
||||
)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
sig.Serialize()
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkNonceRFC6979 benchmarks how long it takes to generate a
|
||||
// deterministic nonce according to RFC6979.
|
||||
func BenchmarkNonceRFC6979(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
// Secret key: 9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d
|
||||
// X: d2e670a19c6d753d1a6d8b20bd045df8a08fb162cf508956c31268c6d81ffdab
|
||||
// Y: ab65528eefbb8057aa85d597258a3fbd481a24633bc9b47a9aa045c91371de52
|
||||
secKeyStr := "9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d"
|
||||
secKey := hexToBytes(secKeyStr)
|
||||
// BLAKE-256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
msgHash := hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var noElideNonce *secp256k1.ModNScalar
|
||||
for i := 0; i < b.N; i++ {
|
||||
noElideNonce = secp256k1.NonceRFC6979(secKey, msgHash, nil, nil, 0)
|
||||
}
|
||||
_ = noElideNonce
|
||||
}
|
||||
|
||||
// BenchmarkSignCompact benchmarks how long it takes to produce a compact
|
||||
// signature for a message.
|
||||
func BenchmarkSignCompact(b *testing.B) {
|
||||
d := hexToModNScalar("9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d")
|
||||
secKey := secp256k1.NewSecretKey(d)
|
||||
// blake256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
msgHash := hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = SignCompact(secKey, msgHash, true)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkRecoverCompact benchmarks how long it takes to recover a public key
|
||||
// given a compact signature and message.
|
||||
func BenchmarkRecoverCompact(b *testing.B) {
|
||||
// Secret key: 9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d
|
||||
wantPubKey := secp256k1.NewPublicKey(
|
||||
hexToFieldVal("d2e670a19c6d753d1a6d8b20bd045df8a08fb162cf508956c31268c6d81ffdab"),
|
||||
hexToFieldVal("ab65528eefbb8057aa85d597258a3fbd481a24633bc9b47a9aa045c91371de52"),
|
||||
)
|
||||
compactSig := hexToBytes(
|
||||
"205978b7896bc71676ba2e459882a8f52e1299449596c4f" +
|
||||
"93c59bf1fbfa2f9d3b76ecd0c99406f61a6de2bb5a8937c061c176ecf381d0231e0d" +
|
||||
"af73b922c8952c7",
|
||||
)
|
||||
// blake256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
msgHash := hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
// Ensure a valid compact signature is being benchmarked.
|
||||
pubKey, wasCompressed, err := RecoverCompact(compactSig, msgHash)
|
||||
if err != nil {
|
||||
b.Fatalf("unexpected err: %v", err)
|
||||
}
|
||||
if !wasCompressed {
|
||||
b.Fatal("recover claims uncompressed pubkey")
|
||||
}
|
||||
if !pubKey.IsEqual(wantPubKey) {
|
||||
b.Fatal("recover returned unexpected pubkey")
|
||||
}
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, _, _ = RecoverCompact(compactSig, msgHash)
|
||||
}
|
||||
}
|
||||
40
pkg/crypto/ec/ecdsa/doc.go
Normal file
40
pkg/crypto/ec/ecdsa/doc.go
Normal file
@@ -0,0 +1,40 @@
|
||||
// Copyright (c) 2020-2023 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package ecdsa provides secp256k1-optimized ECDSA signing and verification.
|
||||
//
|
||||
// This package provides data structures and functions necessary to produce and
|
||||
// verify deterministic canonical signatures in accordance with RFC6979 and
|
||||
// BIP0062, optimized specifically for the secp256k1 curve using the Elliptic Curve
|
||||
// Digital Signature Algorithm (ECDSA), as defined in FIPS 186-3. See
|
||||
// https://www.secg.org/sec2-v2.pdf for details on the secp256k1 standard.
|
||||
//
|
||||
// It also provides functions to parse and serialize the ECDSA signatures with the
|
||||
// more strict Distinguished Encoding Rules (DER) of ISO/IEC 8825-1 and some
|
||||
// additional restrictions specific to secp256k1.
|
||||
//
|
||||
// In addition, it supports a custom "compact" signature format which allows
|
||||
// efficient recovery of the public key from a given valid signature and message
|
||||
// hash combination.
|
||||
//
|
||||
// A comprehensive suite of tests is provided to ensure proper functionality.
|
||||
//
|
||||
// # ECDSA use in Decred
|
||||
//
|
||||
// At the time of this writing, ECDSA signatures are heavily used for proving coin
|
||||
// ownership in Decred as the vast majority of transactions consist of what is
|
||||
// effectively transferring ownership of coins to a public key associated with a
|
||||
// secret key only known to the recipient of the coins along with an encumbrance
|
||||
// that requires an ECDSA signature that proves the new owner possesses the secret
|
||||
// key without actually revealing it.
|
||||
//
|
||||
// # Errors
|
||||
//
|
||||
// The errors returned by this package are of type ecdsa.Error and fully support
|
||||
// the standard library errors.Is and errors.As functions. This allows the caller
|
||||
// to programmatically determine the specific error by examining the ErrorKind
|
||||
// field of the type asserted ecdsa.Error while still providing rich error messages
|
||||
// with contextual information. See ErrorKind in the package documentation for a
|
||||
// full list.
|
||||
package ecdsa
|
||||
106
pkg/crypto/ec/ecdsa/error.go
Normal file
106
pkg/crypto/ec/ecdsa/error.go
Normal file
@@ -0,0 +1,106 @@
|
||||
// Copyright (c) 2020-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ecdsa
|
||||
|
||||
// ErrorKind identifies a kind of error. It has full support for
|
||||
// errors.Is and errors.As, so the caller can directly check against
|
||||
// an error kind when determining the reason for an error.
|
||||
type ErrorKind string
|
||||
|
||||
// These constants are used to identify a specific Error.
|
||||
const (
|
||||
// ErrSigTooShort is returned when a signature that should be a DER
|
||||
// signature is too short.
|
||||
ErrSigTooShort = ErrorKind("ErrSigTooShort")
|
||||
// ErrSigTooLong is returned when a signature that should be a DER signature
|
||||
// is too long.
|
||||
ErrSigTooLong = ErrorKind("ErrSigTooLong")
|
||||
// ErrSigInvalidSeqID is returned when a signature that should be a DER
|
||||
// signature does not have the expected ASN.1 sequence ID.
|
||||
ErrSigInvalidSeqID = ErrorKind("ErrSigInvalidSeqID")
|
||||
// ErrSigInvalidDataLen is returned when a signature that should be a DER
|
||||
// signature does not specify the correct number of remaining bytes for the
|
||||
// R and S portions.
|
||||
ErrSigInvalidDataLen = ErrorKind("ErrSigInvalidDataLen")
|
||||
// ErrSigMissingSTypeID is returned when a signature that should be a DER
|
||||
// signature does not provide the ASN.1 type ID for S.
|
||||
ErrSigMissingSTypeID = ErrorKind("ErrSigMissingSTypeID")
|
||||
// ErrSigMissingSLen is returned when a signature that should be a DER
|
||||
// signature does not provide the length of S.
|
||||
ErrSigMissingSLen = ErrorKind("ErrSigMissingSLen")
|
||||
// ErrSigInvalidSLen is returned when a signature that should be a DER
|
||||
// signature does not specify the correct number of bytes for the S portion.
|
||||
ErrSigInvalidSLen = ErrorKind("ErrSigInvalidSLen")
|
||||
// ErrSigInvalidRIntID is returned when a signature that should be a DER
|
||||
// signature does not have the expected ASN.1 integer ID for R.
|
||||
ErrSigInvalidRIntID = ErrorKind("ErrSigInvalidRIntID")
|
||||
// ErrSigZeroRLen is returned when a signature that should be a DER
|
||||
// signature has an R length of zero.
|
||||
ErrSigZeroRLen = ErrorKind("ErrSigZeroRLen")
|
||||
// ErrSigNegativeR is returned when a signature that should be a DER
|
||||
// signature has a negative value for R.
|
||||
ErrSigNegativeR = ErrorKind("ErrSigNegativeR")
|
||||
// ErrSigTooMuchRPadding is returned when a signature that should be a DER
|
||||
// signature has too much padding for R.
|
||||
ErrSigTooMuchRPadding = ErrorKind("ErrSigTooMuchRPadding")
|
||||
// ErrSigRIsZero is returned when a signature has R set to the value zero.
|
||||
ErrSigRIsZero = ErrorKind("ErrSigRIsZero")
|
||||
// ErrSigRTooBig is returned when a signature has R with a value that is
|
||||
// greater than or equal to the group order.
|
||||
ErrSigRTooBig = ErrorKind("ErrSigRTooBig")
|
||||
// ErrSigInvalidSIntID is returned when a signature that should be a DER
|
||||
// signature does not have the expected ASN.1 integer ID for S.
|
||||
ErrSigInvalidSIntID = ErrorKind("ErrSigInvalidSIntID")
|
||||
// ErrSigZeroSLen is returned when a signature that should be a DER
|
||||
// signature has an S length of zero.
|
||||
ErrSigZeroSLen = ErrorKind("ErrSigZeroSLen")
|
||||
// ErrSigNegativeS is returned when a signature that should be a DER
|
||||
// signature has a negative value for S.
|
||||
ErrSigNegativeS = ErrorKind("ErrSigNegativeS")
|
||||
// ErrSigTooMuchSPadding is returned when a signature that should be a DER
|
||||
// signature has too much padding for S.
|
||||
ErrSigTooMuchSPadding = ErrorKind("ErrSigTooMuchSPadding")
|
||||
// ErrSigSIsZero is returned when a signature has S set to the value zero.
|
||||
ErrSigSIsZero = ErrorKind("ErrSigSIsZero")
|
||||
// ErrSigSTooBig is returned when a signature has S with a value that is
|
||||
// greater than or equal to the group order.
|
||||
ErrSigSTooBig = ErrorKind("ErrSigSTooBig")
|
||||
// ErrSigInvalidLen is returned when a signature that should be a compact
|
||||
// signature is not the required length.
|
||||
ErrSigInvalidLen = ErrorKind("ErrSigInvalidLen")
|
||||
// ErrSigInvalidRecoveryCode is returned when a signature that should be a
|
||||
// compact signature has an invalid value for the public key recovery code.
|
||||
ErrSigInvalidRecoveryCode = ErrorKind("ErrSigInvalidRecoveryCode")
|
||||
// ErrSigOverflowsPrime is returned when a signature that should be a
|
||||
// compact signature has the overflow bit set but adding the order to it
|
||||
// would overflow the underlying field prime.
|
||||
ErrSigOverflowsPrime = ErrorKind("ErrSigOverflowsPrime")
|
||||
// ErrPointNotOnCurve is returned when attempting to recover a public key
|
||||
// from a compact signature results in a point that is not on the elliptic
|
||||
// curve.
|
||||
ErrPointNotOnCurve = ErrorKind("ErrPointNotOnCurve")
|
||||
)
|
||||
|
||||
// Error satisfies the error interface and prints human-readable errors.
|
||||
func (e ErrorKind) Error() string { return string(e) }
|
||||
|
||||
// Error identifies an error related to an ECDSA signature. It has full
|
||||
// support for errors.Is and errors.As, so the caller can ascertain the
|
||||
// specific reason for the error by checking the underlying error.
|
||||
type Error struct {
|
||||
Err error
|
||||
Description string
|
||||
}
|
||||
|
||||
// Error satisfies the error interface and prints human-readable errors.
|
||||
func (e Error) Error() string { return e.Description }
|
||||
|
||||
// Unwrap returns the underlying wrapped error.
|
||||
func (e Error) Unwrap() error { return e.Err }
|
||||
|
||||
// signatureError creates an Error given a set of arguments.
|
||||
func signatureError(kind ErrorKind, desc string) Error {
|
||||
return Error{Err: kind, Description: desc}
|
||||
}
|
||||
154
pkg/crypto/ec/ecdsa/error_test.go
Normal file
154
pkg/crypto/ec/ecdsa/error_test.go
Normal file
@@ -0,0 +1,154 @@
|
||||
// Copyright (c) 2020-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ecdsa
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestErrorKindStringer tests the stringized output for the ErrorKind type.
|
||||
func TestErrorKindStringer(t *testing.T) {
|
||||
tests := []struct {
|
||||
in ErrorKind
|
||||
want string
|
||||
}{
|
||||
{ErrSigTooShort, "ErrSigTooShort"},
|
||||
{ErrSigTooLong, "ErrSigTooLong"},
|
||||
{ErrSigInvalidSeqID, "ErrSigInvalidSeqID"},
|
||||
{ErrSigInvalidDataLen, "ErrSigInvalidDataLen"},
|
||||
{ErrSigMissingSTypeID, "ErrSigMissingSTypeID"},
|
||||
{ErrSigMissingSLen, "ErrSigMissingSLen"},
|
||||
{ErrSigInvalidSLen, "ErrSigInvalidSLen"},
|
||||
{ErrSigInvalidRIntID, "ErrSigInvalidRIntID"},
|
||||
{ErrSigZeroRLen, "ErrSigZeroRLen"},
|
||||
{ErrSigNegativeR, "ErrSigNegativeR"},
|
||||
{ErrSigTooMuchRPadding, "ErrSigTooMuchRPadding"},
|
||||
{ErrSigRIsZero, "ErrSigRIsZero"},
|
||||
{ErrSigRTooBig, "ErrSigRTooBig"},
|
||||
{ErrSigInvalidSIntID, "ErrSigInvalidSIntID"},
|
||||
{ErrSigZeroSLen, "ErrSigZeroSLen"},
|
||||
{ErrSigNegativeS, "ErrSigNegativeS"},
|
||||
{ErrSigTooMuchSPadding, "ErrSigTooMuchSPadding"},
|
||||
{ErrSigSIsZero, "ErrSigSIsZero"},
|
||||
{ErrSigSTooBig, "ErrSigSTooBig"},
|
||||
{ErrSigInvalidLen, "ErrSigInvalidLen"},
|
||||
{ErrSigInvalidRecoveryCode, "ErrSigInvalidRecoveryCode"},
|
||||
{ErrSigOverflowsPrime, "ErrSigOverflowsPrime"},
|
||||
{ErrPointNotOnCurve, "ErrPointNotOnCurve"},
|
||||
}
|
||||
|
||||
for i, test := range tests {
|
||||
result := test.in.Error()
|
||||
if result != test.want {
|
||||
t.Errorf("#%d: got: %s want: %s", i, result, test.want)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestError tests the error output for the Error type.
|
||||
func TestError(t *testing.T) {
|
||||
tests := []struct {
|
||||
in Error
|
||||
want string
|
||||
}{
|
||||
{
|
||||
Error{Description: "some error"},
|
||||
"some error",
|
||||
}, {
|
||||
Error{Description: "human-readable error"},
|
||||
"human-readable error",
|
||||
},
|
||||
}
|
||||
for i, test := range tests {
|
||||
result := test.in.Error()
|
||||
if result != test.want {
|
||||
t.Errorf("#%d: got: %s want: %s", i, result, test.want)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestErrorKindIsAs ensures both ErrorKind and Error can be identified as being
|
||||
// a specific error kind via errors.Is and unwrapped via errors.As.
|
||||
func TestErrorKindIsAs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
err error
|
||||
target error
|
||||
wantMatch bool
|
||||
wantAs ErrorKind
|
||||
}{
|
||||
{
|
||||
name: "ErrSigTooShort == ErrSigTooShort",
|
||||
err: ErrSigTooShort,
|
||||
target: ErrSigTooShort,
|
||||
wantMatch: true,
|
||||
wantAs: ErrSigTooShort,
|
||||
}, {
|
||||
name: "Error.ErrSigTooShort == ErrSigTooShort",
|
||||
err: signatureError(ErrSigTooShort, ""),
|
||||
target: ErrSigTooShort,
|
||||
wantMatch: true,
|
||||
wantAs: ErrSigTooShort,
|
||||
}, {
|
||||
name: "Error.ErrSigTooShort == Error.ErrSigTooShort",
|
||||
err: signatureError(ErrSigTooShort, ""),
|
||||
target: signatureError(ErrSigTooShort, ""),
|
||||
wantMatch: true,
|
||||
wantAs: ErrSigTooShort,
|
||||
}, {
|
||||
name: "ErrSigTooLong != ErrSigTooShort",
|
||||
err: ErrSigTooLong,
|
||||
target: ErrSigTooShort,
|
||||
wantMatch: false,
|
||||
wantAs: ErrSigTooLong,
|
||||
}, {
|
||||
name: "Error.ErrSigTooLong != ErrSigTooShort",
|
||||
err: signatureError(ErrSigTooLong, ""),
|
||||
target: ErrSigTooShort,
|
||||
wantMatch: false,
|
||||
wantAs: ErrSigTooLong,
|
||||
}, {
|
||||
name: "ErrSigTooLong != Error.ErrSigTooShort",
|
||||
err: ErrSigTooLong,
|
||||
target: signatureError(ErrSigTooShort, ""),
|
||||
wantMatch: false,
|
||||
wantAs: ErrSigTooLong,
|
||||
}, {
|
||||
name: "Error.ErrSigTooLong != Error.ErrSigTooShort",
|
||||
err: signatureError(ErrSigTooLong, ""),
|
||||
target: signatureError(ErrSigTooShort, ""),
|
||||
wantMatch: false,
|
||||
wantAs: ErrSigTooLong,
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
// Ensure the error matches or not depending on the expected result.
|
||||
result := errors.Is(test.err, test.target)
|
||||
if result != test.wantMatch {
|
||||
t.Errorf(
|
||||
"%s: incorrect error identification -- got %v, want %v",
|
||||
test.name, result, test.wantMatch,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Ensure the underlying error kind can be unwrapped and is the
|
||||
// expected code.
|
||||
var kind ErrorKind
|
||||
if !errors.As(test.err, &kind) {
|
||||
t.Errorf("%s: unable to unwrap to error", test.name)
|
||||
continue
|
||||
}
|
||||
if !errors.Is(kind, test.wantAs) {
|
||||
t.Errorf(
|
||||
"%s: unexpected unwrapped error -- got %v, want %v",
|
||||
test.name, kind, test.wantAs,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
79
pkg/crypto/ec/ecdsa/example_test.go
Normal file
79
pkg/crypto/ec/ecdsa/example_test.go
Normal file
@@ -0,0 +1,79 @@
|
||||
// Copyright (c) 2014 The btcsuite developers
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// TODO: change this to work with sha256
|
||||
|
||||
package ecdsa_test
|
||||
|
||||
// // This example demonstrates signing a message with a secp256k1 secret key that
|
||||
// // is first parsed from raw bytes and serializing the generated signature.
|
||||
// func ExampleSign() {
|
||||
// // Decode a hex-encoded secret key.
|
||||
// pkBytes, err := hex.Dec("22a47fa09a223f2aa079edf85a7c2d4f87" +
|
||||
// "20ee63e502ee2869afab7de234b80c")
|
||||
// if err != nil {
|
||||
// fmt.Println(err)
|
||||
// return
|
||||
// }
|
||||
// secKey := secp256k1.SecKeyFromBytes(pkBytes)
|
||||
//
|
||||
// // Sign a message using the secret key.
|
||||
// message := "test message"
|
||||
// messageHash := blake256.Sum256(by(message))
|
||||
// signature := ecdsa.Sign(secKey, messageHash[:])
|
||||
//
|
||||
// // Serialize and display the signature.
|
||||
// fmt.Printf("Serialized Signature: %x\n", signature.Serialize())
|
||||
//
|
||||
// // Verify the signature for the message using the public key.
|
||||
// pubKey := secKey.Pubkey()
|
||||
// verified := signature.Verify(messageHash[:], pubKey)
|
||||
// fmt.Printf("Signature Verified? %v\n", verified)
|
||||
//
|
||||
// // Output:
|
||||
// // Serialized Signature: 3045022100fcc0a8768cfbcefcf2cadd7cfb0fb18ed08dd2e2ae84bef1a474a3d351b26f0302200fc1a350b45f46fa00101391302818d748c2b22615511a3ffd5bb638bd777207
|
||||
// // Signature Verified? true
|
||||
// }
|
||||
|
||||
// // This example demonstrates verifying a secp256k1 signature against a public
|
||||
// // key that is first parsed from raw bytes. The signature is also parsed from
|
||||
// // raw bytes.
|
||||
// func ExampleSignature_Verify() {
|
||||
// // Decode hex-encoded serialized public key.
|
||||
// pubKeyBytes, err := hex.Dec("02a673638cb9587cb68ea08dbef685c" +
|
||||
// "6f2d2a751a8b3c6f2a7e9a4999e6e4bfaf5")
|
||||
// if err != nil {
|
||||
// fmt.Println(err)
|
||||
// return
|
||||
// }
|
||||
// pubKey, err := secp256k1.ParsePubKey(pubKeyBytes)
|
||||
// if err != nil {
|
||||
// fmt.Println(err)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// // Decode hex-encoded serialized signature.
|
||||
// sigBytes, err := hex.Dec("3045022100fcc0a8768cfbcefcf2cadd7cfb0" +
|
||||
// "fb18ed08dd2e2ae84bef1a474a3d351b26f0302200fc1a350b45f46fa0010139130" +
|
||||
// "2818d748c2b22615511a3ffd5bb638bd777207")
|
||||
// if err != nil {
|
||||
// fmt.Println(err)
|
||||
// return
|
||||
// }
|
||||
// signature, err := ecdsa.ParseDERSignature(sigBytes)
|
||||
// if err != nil {
|
||||
// fmt.Println(err)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// // Verify the signature for the message using the public key.
|
||||
// message := "test message"
|
||||
// messageHash := blake256.Sum256(by(message))
|
||||
// verified := signature.Verify(messageHash[:], pubKey)
|
||||
// fmt.Println("Signature Verified?", verified)
|
||||
//
|
||||
// // Output:
|
||||
// // Signature Verified? true
|
||||
// }
|
||||
954
pkg/crypto/ec/ecdsa/signature.go
Normal file
954
pkg/crypto/ec/ecdsa/signature.go
Normal file
@@ -0,0 +1,954 @@
|
||||
// Copyright (c) 2013-2014 The btcsuite developers
|
||||
// Copyright (c) 2015-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ecdsa
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// References:
|
||||
// [GECC]: Guide to Elliptic Curve Cryptography (Hankerson, Menezes, Vanstone)
|
||||
//
|
||||
// [ISO/IEC 8825-1]: Information technology — ASN.1 encoding rules:
|
||||
// Specification of Basic Encoding Rules (BER), Canonical Encoding Rules
|
||||
// (CER) and Distinguished Encoding Rules (DER)
|
||||
//
|
||||
// [SEC1]: Elliptic Curve Cryptography (May 31, 2009, Version 2.0)
|
||||
// https://www.secg.org/sec1-v2.pdf
|
||||
|
||||
var (
|
||||
// zero32 is an array of 32 bytes used for the purposes of zeroing and is
|
||||
// defined here to avoid extra allocations.
|
||||
zero32 = [32]byte{}
|
||||
// orderAsFieldVal is the order of the secp256k1 curve group stored as a
|
||||
// field value. It is provided here to avoid the need to create it multiple
|
||||
// times.
|
||||
orderAsFieldVal = func() secp256k1.FieldVal {
|
||||
var f secp256k1.FieldVal
|
||||
f.SetByteSlice(secp256k1.Params().N.Bytes())
|
||||
return f
|
||||
}()
|
||||
)
|
||||
|
||||
const (
|
||||
// asn1SequenceID is the ASN.1 identifier for a sequence and is used when
|
||||
// parsing and serializing signatures encoded with the Distinguished
|
||||
// Encoding Rules (DER) format per section 10 of [ISO/IEC 8825-1].
|
||||
asn1SequenceID = 0x30
|
||||
// asn1IntegerID is the ASN.1 identifier for an integer and is used when
|
||||
// parsing and serializing signatures encoded with the Distinguished
|
||||
// Encoding Rules (DER) format per section 10 of [ISO/IEC 8825-1].
|
||||
asn1IntegerID = 0x02
|
||||
)
|
||||
|
||||
// Signature is a type representing an ECDSA signature.
|
||||
type Signature struct {
|
||||
r secp256k1.ModNScalar
|
||||
s secp256k1.ModNScalar
|
||||
}
|
||||
|
||||
// NewSignature instantiates a new signature given some r and s values.
|
||||
func NewSignature(r, s *secp256k1.ModNScalar) *Signature {
|
||||
return &Signature{*r, *s}
|
||||
}
|
||||
|
||||
// Serialize returns the ECDSA signature in the Distinguished Encoding Rules
|
||||
// (DER) format per section 10 of [ISO/IEC 8825-1] and such that the S component
|
||||
// of the signature is less than or equal to the half order of the group.
|
||||
//
|
||||
// Note that the serialized bytes returned do not include the appended hash type
|
||||
// used in Decred signature scripts.
|
||||
func (sig *Signature) Serialize() []byte {
|
||||
// The format of a DER encoded signature is as follows:
|
||||
//
|
||||
// 0x30 <total length> 0x02 <length of R> <R> 0x02 <length of S> <S>
|
||||
// - 0x30 is the ASN.1 identifier for a sequence.
|
||||
// - Total length is 1 byte and specifies length of all remaining data.
|
||||
// - 0x02 is the ASN.1 identifier that specifies an integer follows.
|
||||
// - Length of R is 1 byte and specifies how many bytes R occupies.
|
||||
// - R is the arbitrary length big-endian encoded number which
|
||||
// represents the R value of the signature. DER encoding dictates
|
||||
// that the value must be encoded using the minimum possible number
|
||||
// of bytes. This implies the first byte can only be null if the
|
||||
// highest bit of the next byte is set in order to prevent it from
|
||||
// being interpreted as a negative number.
|
||||
// - 0x02 is once again the ASN.1 integer identifier.
|
||||
// - Length of S is 1 byte and specifies how many bytes S occupies.
|
||||
// - S is the arbitrary length big-endian encoded number which
|
||||
// represents the S value of the signature. The encoding rules are
|
||||
// identical as those for R.
|
||||
|
||||
// Ensure the S component of the signature is less than or equal to the half
|
||||
// order of the group because both S and its negation are valid signatures
|
||||
// modulo the order, so this forces a consistent choice to reduce signature
|
||||
// malleability.
|
||||
sigS := new(secp256k1.ModNScalar).Set(&sig.s)
|
||||
if sigS.IsOverHalfOrder() {
|
||||
sigS.Negate()
|
||||
}
|
||||
|
||||
// Serialize the R and S components of the signature into their fixed
|
||||
// 32-byte big-endian encoding. Note that the extra leading zero byte is
|
||||
// used to ensure it is canonical per DER and will be stripped if needed
|
||||
// below.
|
||||
var rBuf, sBuf [33]byte
|
||||
sig.r.PutBytesUnchecked(rBuf[1:33])
|
||||
sigS.PutBytesUnchecked(sBuf[1:33])
|
||||
// Ensure the encoded bytes for the R and S components are canonical per DER
|
||||
// by trimming all leading zero bytes so long as the next byte does not have
|
||||
// the high bit set and it's not the final byte.
|
||||
canonR, canonS := rBuf[:], sBuf[:]
|
||||
for len(canonR) > 1 && canonR[0] == 0x00 && canonR[1]&0x80 == 0 {
|
||||
canonR = canonR[1:]
|
||||
}
|
||||
for len(canonS) > 1 && canonS[0] == 0x00 && canonS[1]&0x80 == 0 {
|
||||
canonS = canonS[1:]
|
||||
}
|
||||
// Total length of returned signature is 1 byte for each magic and length
|
||||
// (6 total), plus lengths of R and S.
|
||||
totalLen := 6 + len(canonR) + len(canonS)
|
||||
b := make([]byte, 0, totalLen)
|
||||
b = append(b, asn1SequenceID)
|
||||
b = append(b, byte(totalLen-2))
|
||||
b = append(b, asn1IntegerID)
|
||||
b = append(b, byte(len(canonR)))
|
||||
b = append(b, canonR...)
|
||||
b = append(b, asn1IntegerID)
|
||||
b = append(b, byte(len(canonS)))
|
||||
b = append(b, canonS...)
|
||||
return b
|
||||
}
|
||||
|
||||
// zeroArray32 zeroes the provided 32-byte buffer.
|
||||
func zeroArray32(b *[32]byte) {
|
||||
copy(b[:], zero32[:])
|
||||
}
|
||||
|
||||
// fieldToModNScalar converts a field value to scalar modulo the group order and
|
||||
// returns the scalar along with either 1 if it was reduced (aka it overflowed)
|
||||
// or 0 otherwise.
|
||||
//
|
||||
// Note that a bool is not used here because it is not possible in Go to convert
|
||||
// from a bool to numeric value in constant time and many constant-time
|
||||
// operations require a numeric value.
|
||||
func fieldToModNScalar(v *secp256k1.FieldVal) (secp256k1.ModNScalar, uint32) {
|
||||
var buf [32]byte
|
||||
v.PutBytes(&buf)
|
||||
var s secp256k1.ModNScalar
|
||||
overflow := s.SetBytes(&buf)
|
||||
zeroArray32(&buf)
|
||||
return s, overflow
|
||||
}
|
||||
|
||||
// modNScalarToField converts a scalar modulo the group order to a field value.
|
||||
func modNScalarToField(v *secp256k1.ModNScalar) secp256k1.FieldVal {
|
||||
var buf [32]byte
|
||||
v.PutBytes(&buf)
|
||||
var fv secp256k1.FieldVal
|
||||
fv.SetBytes(&buf)
|
||||
return fv
|
||||
}
|
||||
|
||||
// Verify returns whether the signature is valid for the provided hash
|
||||
// and secp256k1 public key.
|
||||
func (sig *Signature) Verify(hash []byte, pubKey *secp256k1.PublicKey) bool {
|
||||
// The algorithm for verifying an ECDSA signature is given as algorithm 4.30
|
||||
// in [GECC].
|
||||
//
|
||||
// The following is a paraphrased version for reference:
|
||||
//
|
||||
// G = curve generator
|
||||
// N = curve order
|
||||
// Q = public key
|
||||
// m = message
|
||||
// R, S = signature
|
||||
//
|
||||
// 1. Fail if R and S are not in [1, N-1]
|
||||
// 2. e = H(m)
|
||||
// 3. w = S^-1 mod N
|
||||
// 4. u1 = e * w mod N
|
||||
// u2 = R * w mod N
|
||||
// 5. X = u1G + u2Q
|
||||
// 6. Fail if X is the point at infinity
|
||||
// 7. x = X.x mod N (X.x is the x coordinate of X)
|
||||
// 8. Verified if x == R
|
||||
//
|
||||
// However, since all group operations are done internally in Jacobian
|
||||
// projective space, the algorithm is modified slightly here in order to
|
||||
// avoid an expensive inversion back into affine coordinates at step 7.
|
||||
// Credits to Greg Maxwell for originally suggesting this optimization.
|
||||
//
|
||||
// Ordinarily, step 7 involves converting the x coordinate to affine by
|
||||
// calculating x = x / z^2 (mod P) and then calculating the remainder as
|
||||
// x = x (mod N). Then step 8 compares it to R.
|
||||
//
|
||||
// Note that since R is the x coordinate mod N from a random point that was
|
||||
// originally mod P, and the cofactor of the secp256k1 curve is 1, there are
|
||||
// only two possible x coordinates that the original random point could have
|
||||
// been to produce R: x, where x < N, and x+N, where x+N < P.
|
||||
//
|
||||
// This implies that the signature is valid if either:
|
||||
// a) R == X.x / X.z^2 (mod P)
|
||||
// => R * X.z^2 == X.x (mod P)
|
||||
// --or--
|
||||
// b) R + N < P && R + N == X.x / X.z^2 (mod P)
|
||||
// => R + N < P && (R + N) * X.z^2 == X.x (mod P)
|
||||
//
|
||||
// Therefore the following modified algorithm is used:
|
||||
//
|
||||
// 1. Fail if R and S are not in [1, N-1]
|
||||
// 2. e = H(m)
|
||||
// 3. w = S^-1 mod N
|
||||
// 4. u1 = e * w mod N
|
||||
// u2 = R * w mod N
|
||||
// 5. X = u1G + u2Q
|
||||
// 6. Fail if X is the point at infinity
|
||||
// 7. z = (X.z)^2 mod P (X.z is the z coordinate of X)
|
||||
// 8. Verified if R * z == X.x (mod P)
|
||||
// 9. Fail if R + N >= P
|
||||
// 10. Verified if (R + N) * z == X.x (mod P)
|
||||
//
|
||||
// Step 1.
|
||||
//
|
||||
// Fail if R and S are not in [1, N-1].
|
||||
if sig.r.IsZero() || sig.s.IsZero() {
|
||||
return false
|
||||
}
|
||||
// Step 2.
|
||||
//
|
||||
// e = H(m)
|
||||
var e secp256k1.ModNScalar
|
||||
e.SetByteSlice(hash)
|
||||
// Step 3.
|
||||
//
|
||||
// w = S^-1 mod N
|
||||
w := new(secp256k1.ModNScalar).InverseValNonConst(&sig.s)
|
||||
// Step 4.
|
||||
//
|
||||
// u1 = e * w mod N
|
||||
// u2 = R * w mod N
|
||||
u1 := new(secp256k1.ModNScalar).Mul2(&e, w)
|
||||
u2 := new(secp256k1.ModNScalar).Mul2(&sig.r, w)
|
||||
// Step 5.
|
||||
//
|
||||
// X = u1G + u2Q
|
||||
var X, Q, u1G, u2Q secp256k1.JacobianPoint
|
||||
pubKey.AsJacobian(&Q)
|
||||
secp256k1.ScalarBaseMultNonConst(u1, &u1G)
|
||||
secp256k1.ScalarMultNonConst(u2, &Q, &u2Q)
|
||||
secp256k1.AddNonConst(&u1G, &u2Q, &X)
|
||||
// Step 6.
|
||||
//
|
||||
// Fail if X is the point at infinity
|
||||
if (X.X.IsZero() && X.Y.IsZero()) || X.Z.IsZero() {
|
||||
return false
|
||||
}
|
||||
// Step 7.
|
||||
//
|
||||
// z = (X.z)^2 mod P (X.z is the z coordinate of X)
|
||||
z := new(secp256k1.FieldVal).SquareVal(&X.Z)
|
||||
// Step 8.
|
||||
//
|
||||
// Verified if R * z == X.x (mod P)
|
||||
sigRModP := modNScalarToField(&sig.r)
|
||||
result := new(secp256k1.FieldVal).Mul2(&sigRModP, z).Normalize()
|
||||
if result.Equals(&X.X) {
|
||||
return true
|
||||
}
|
||||
// Step 9.
|
||||
//
|
||||
// Fail if R + N >= P
|
||||
if sigRModP.IsGtOrEqPrimeMinusOrder() {
|
||||
return false
|
||||
}
|
||||
// Step 10.
|
||||
//
|
||||
// Verified if (R + N) * z == X.x (mod P)
|
||||
sigRModP.Add(&orderAsFieldVal)
|
||||
result.Mul2(&sigRModP, z).Normalize()
|
||||
return result.Equals(&X.X)
|
||||
}
|
||||
|
||||
// IsEqual compares this Signature instance to the one passed, returning true if
|
||||
// both Signatures are equivalent. A signature is equivalent to another, if
|
||||
// they both have the same scalar value for R and S.
|
||||
func (sig *Signature) IsEqual(otherSig *Signature) bool {
|
||||
return sig.r.Equals(&otherSig.r) && sig.s.Equals(&otherSig.s)
|
||||
}
|
||||
|
||||
// ParseDERSignature parses a signature in the Distinguished Encoding Rules
|
||||
// (DER) format per section 10 of [ISO/IEC 8825-1] and enforces the following
|
||||
// additional restrictions specific to secp256k1:
|
||||
//
|
||||
// - The R and S values must be in the valid range for secp256k1 scalars:
|
||||
// - Negative values are rejected
|
||||
// - Zero is rejected
|
||||
// - Values greater than or equal to the secp256k1 group order are rejected
|
||||
func ParseDERSignature(sig []byte) (*Signature, error) {
|
||||
// The format of a DER encoded signature for secp256k1 is as follows:
|
||||
//
|
||||
// 0x30 <total length> 0x02 <length of R> <R> 0x02 <length of S> <S>
|
||||
// - 0x30 is the ASN.1 identifier for a sequence
|
||||
// - Total length is 1 byte and specifies length of all remaining data
|
||||
// - 0x02 is the ASN.1 identifier that specifies an integer follows
|
||||
// - Length of R is 1 byte and specifies how many bytes R occupies
|
||||
// - R is the arbitrary length big-endian encoded number which
|
||||
// represents the R value of the signature. DER encoding dictates
|
||||
// that the value must be encoded using the minimum possible number
|
||||
// of bytes. This implies the first byte can only be null if the
|
||||
// highest bit of the next byte is set in order to prevent it from
|
||||
// being interpreted as a negative number.
|
||||
// - 0x02 is once again the ASN.1 integer identifier
|
||||
// - Length of S is 1 byte and specifies how many bytes S occupies
|
||||
// - S is the arbitrary length big-endian encoded number which
|
||||
// represents the S value of the signature. The encoding rules are
|
||||
// identical as those for R.
|
||||
//
|
||||
// NOTE: The DER specification supports specifying lengths that can occupy
|
||||
// more than 1 byte, however, since this is specific to secp256k1
|
||||
// signatures, all lengths will be a single byte.
|
||||
const (
|
||||
// minSigLen is the minimum length of a DER encoded signature and is
|
||||
// when both R and S are 1 byte each.
|
||||
//
|
||||
// 0x30 + <1-byte> + 0x02 + 0x01 + <byte> + 0x2 + 0x01 + <byte>
|
||||
minSigLen = 8
|
||||
// maxSigLen is the maximum length of a DER encoded signature and is
|
||||
// when both R and S are 33 bytes each. It is 33 bytes because a
|
||||
// 256-bit integer requires 32 bytes and an additional leading null byte
|
||||
// might be required if the high bit is set in the value.
|
||||
//
|
||||
// 0x30 + <1-byte> + 0x02 + 0x21 + <33 bytes> + 0x2 + 0x21 + <33 bytes>
|
||||
maxSigLen = 72
|
||||
// sequenceOffset is the byte offset within the signature of the
|
||||
// expected ASN.1 sequence identifier.
|
||||
sequenceOffset = 0
|
||||
// dataLenOffset is the byte offset within the signature of the expected
|
||||
// total length of all remaining data in the signature.
|
||||
dataLenOffset = 1
|
||||
// rTypeOffset is the byte offset within the signature of the ASN.1
|
||||
// identifier for R and is expected to indicate an ASN.1 integer.
|
||||
rTypeOffset = 2
|
||||
// rLenOffset is the byte offset within the signature of the length of
|
||||
// R.
|
||||
rLenOffset = 3
|
||||
// rOffset is the byte offset within the signature of R.
|
||||
rOffset = 4
|
||||
)
|
||||
// The signature must adhere to the minimum and maximum allowed length.
|
||||
sigLen := len(sig)
|
||||
if sigLen < minSigLen {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: too short: %d < %d", sigLen,
|
||||
minSigLen,
|
||||
)
|
||||
return nil, signatureError(ErrSigTooShort, str)
|
||||
}
|
||||
if sigLen > maxSigLen {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: too long: %d > %d", sigLen,
|
||||
maxSigLen,
|
||||
)
|
||||
return nil, signatureError(ErrSigTooLong, str)
|
||||
}
|
||||
// The signature must start with the ASN.1 sequence identifier.
|
||||
if sig[sequenceOffset] != asn1SequenceID {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: format has wrong type: %#x",
|
||||
sig[sequenceOffset],
|
||||
)
|
||||
return nil, signatureError(ErrSigInvalidSeqID, str)
|
||||
}
|
||||
// The signature must indicate the correct amount of data for all elements
|
||||
// related to R and S.
|
||||
if int(sig[dataLenOffset]) != sigLen-2 {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: bad length: %d != %d",
|
||||
sig[dataLenOffset], sigLen-2,
|
||||
)
|
||||
return nil, signatureError(ErrSigInvalidDataLen, str)
|
||||
}
|
||||
// Calculate the offsets of the elements related to S and ensure S is inside
|
||||
// the signature.
|
||||
//
|
||||
// rLen specifies the length of the big-endian encoded number which
|
||||
// represents the R value of the signature.
|
||||
//
|
||||
// sTypeOffset is the offset of the ASN.1 identifier for S and, like its R
|
||||
// counterpart, is expected to indicate an ASN.1 integer.
|
||||
//
|
||||
// sLenOffset and sOffset are the byte offsets within the signature of the
|
||||
// length of S and S itself, respectively.
|
||||
rLen := int(sig[rLenOffset])
|
||||
sTypeOffset := rOffset + rLen
|
||||
sLenOffset := sTypeOffset + 1
|
||||
if sTypeOffset >= sigLen {
|
||||
str := "malformed signature: S type indicator missing"
|
||||
return nil, signatureError(ErrSigMissingSTypeID, str)
|
||||
}
|
||||
if sLenOffset >= sigLen {
|
||||
str := "malformed signature: S length missing"
|
||||
return nil, signatureError(ErrSigMissingSLen, str)
|
||||
}
|
||||
// The lengths of R and S must match the overall length of the signature.
|
||||
//
|
||||
// sLen specifies the length of the big-endian encoded number which
|
||||
// represents the S value of the signature.
|
||||
sOffset := sLenOffset + 1
|
||||
sLen := int(sig[sLenOffset])
|
||||
if sOffset+sLen != sigLen {
|
||||
str := "malformed signature: invalid S length"
|
||||
return nil, signatureError(ErrSigInvalidSLen, str)
|
||||
}
|
||||
// R elements must be ASN.1 integers.
|
||||
if sig[rTypeOffset] != asn1IntegerID {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: R integer marker: %#x != %#x",
|
||||
sig[rTypeOffset], asn1IntegerID,
|
||||
)
|
||||
return nil, signatureError(ErrSigInvalidRIntID, str)
|
||||
}
|
||||
// Zero-length integers are not allowed for R.
|
||||
if rLen == 0 {
|
||||
str := "malformed signature: R length is zero"
|
||||
return nil, signatureError(ErrSigZeroRLen, str)
|
||||
}
|
||||
// R must not be negative.
|
||||
if sig[rOffset]&0x80 != 0 {
|
||||
str := "malformed signature: R is negative"
|
||||
return nil, signatureError(ErrSigNegativeR, str)
|
||||
}
|
||||
// Null bytes at the start of R are not allowed, unless R would otherwise be
|
||||
// interpreted as a negative number.
|
||||
if rLen > 1 && sig[rOffset] == 0x00 && sig[rOffset+1]&0x80 == 0 {
|
||||
str := "malformed signature: R value has too much padding"
|
||||
return nil, signatureError(ErrSigTooMuchRPadding, str)
|
||||
}
|
||||
// S elements must be ASN.1 integers.
|
||||
if sig[sTypeOffset] != asn1IntegerID {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: S integer marker: %#x != %#x",
|
||||
sig[sTypeOffset], asn1IntegerID,
|
||||
)
|
||||
return nil, signatureError(ErrSigInvalidSIntID, str)
|
||||
}
|
||||
// Zero-length integers are not allowed for S.
|
||||
if sLen == 0 {
|
||||
str := "malformed signature: S length is zero"
|
||||
return nil, signatureError(ErrSigZeroSLen, str)
|
||||
}
|
||||
// S must not be negative.
|
||||
if sig[sOffset]&0x80 != 0 {
|
||||
str := "malformed signature: S is negative"
|
||||
return nil, signatureError(ErrSigNegativeS, str)
|
||||
}
|
||||
// Null bytes at the start of S are not allowed, unless S would otherwise be
|
||||
// interpreted as a negative number.
|
||||
if sLen > 1 && sig[sOffset] == 0x00 && sig[sOffset+1]&0x80 == 0 {
|
||||
str := "malformed signature: S value has too much padding"
|
||||
return nil, signatureError(ErrSigTooMuchSPadding, str)
|
||||
}
|
||||
// The signature is validly encoded per DER at this point, however, enforce
|
||||
// additional restrictions to ensure R and S are in the range [1, N-1] since
|
||||
// valid ECDSA signatures are required to be in that range per spec.
|
||||
//
|
||||
// Also note that while the overflow checks are required to make use of the
|
||||
// specialized mod N scalar type, rejecting zero here is not strictly
|
||||
// required because it is also checked when verifying the signature, but
|
||||
// there really isn't a good reason not to fail early here on signatures
|
||||
// that do not conform to the ECDSA spec.
|
||||
//
|
||||
// Strip leading zeroes from R.
|
||||
rBytes := sig[rOffset : rOffset+rLen]
|
||||
for len(rBytes) > 0 && rBytes[0] == 0x00 {
|
||||
rBytes = rBytes[1:]
|
||||
}
|
||||
// R must be in the range [1, N-1]. Notice the check for the maximum number
|
||||
// of bytes is required because SetByteSlice truncates as noted in its
|
||||
// comment so it could otherwise fail to detect the overflow.
|
||||
var r secp256k1.ModNScalar
|
||||
if len(rBytes) > 32 {
|
||||
str := "invalid signature: R is larger than 256 bits"
|
||||
return nil, signatureError(ErrSigRTooBig, str)
|
||||
}
|
||||
if overflow := r.SetByteSlice(rBytes); overflow {
|
||||
str := "invalid signature: R >= group order"
|
||||
return nil, signatureError(ErrSigRTooBig, str)
|
||||
}
|
||||
if r.IsZero() {
|
||||
str := "invalid signature: R is 0"
|
||||
return nil, signatureError(ErrSigRIsZero, str)
|
||||
}
|
||||
// Strip leading zeroes from S.
|
||||
sBytes := sig[sOffset : sOffset+sLen]
|
||||
for len(sBytes) > 0 && sBytes[0] == 0x00 {
|
||||
sBytes = sBytes[1:]
|
||||
}
|
||||
// S must be in the range [1, N-1]. Notice the check for the maximum number
|
||||
// of bytes is required because SetByteSlice truncates as noted in its
|
||||
// comment so it could otherwise fail to detect the overflow.
|
||||
var s secp256k1.ModNScalar
|
||||
if len(sBytes) > 32 {
|
||||
str := "invalid signature: S is larger than 256 bits"
|
||||
return nil, signatureError(ErrSigSTooBig, str)
|
||||
}
|
||||
if overflow := s.SetByteSlice(sBytes); overflow {
|
||||
str := "invalid signature: S >= group order"
|
||||
return nil, signatureError(ErrSigSTooBig, str)
|
||||
}
|
||||
if s.IsZero() {
|
||||
str := "invalid signature: S is 0"
|
||||
return nil, signatureError(ErrSigSIsZero, str)
|
||||
}
|
||||
// Create and return the signature.
|
||||
return NewSignature(&r, &s), nil
|
||||
}
|
||||
|
||||
// sign generates an ECDSA signature over the secp256k1 curve for the provided
|
||||
// hash (which should be the result of hashing a larger message) using the given
|
||||
// nonce and secret key and returns it along with an additional public key
|
||||
// recovery code and success indicator. Upon success, the produced signature is
|
||||
// deterministic (same message, nonce, and key yield the same signature) and
|
||||
// canonical in accordance with BIP0062.
|
||||
//
|
||||
// Note that signRFC6979 makes use of this function as it is the primary ECDSA
|
||||
// signing logic. It differs in that it accepts a nonce to use when signing and
|
||||
// may not successfully produce a valid signature for the given nonce. It is
|
||||
// primarily separated for testing purposes.
|
||||
func sign(secKey, nonce *secp256k1.ModNScalar, hash []byte) (
|
||||
*Signature, byte,
|
||||
bool,
|
||||
) {
|
||||
// The algorithm for producing an ECDSA signature is given as algorithm 4.29
|
||||
// in [GECC].
|
||||
//
|
||||
// The following is a paraphrased version for reference:
|
||||
//
|
||||
// G = curve generator
|
||||
// N = curve order
|
||||
// d = secret key
|
||||
// m = message
|
||||
// r, s = signature
|
||||
//
|
||||
// 1. Select random nonce k in [1, N-1]
|
||||
// 2. Compute kG
|
||||
// 3. r = kG.x mod N (kG.x is the x coordinate of the point kG)
|
||||
// Repeat from step 1 if r = 0
|
||||
// 4. e = H(m)
|
||||
// 5. s = k^-1(e + dr) mod N
|
||||
// Repeat from step 1 if s = 0
|
||||
// 6. Return (r,s)
|
||||
//
|
||||
// This is slightly modified here to conform to RFC6979 and BIP 62 as
|
||||
// follows:
|
||||
//
|
||||
// A. Instead of selecting a random nonce in step 1, use RFC6979 to generate
|
||||
// a deterministic nonce in [1, N-1] parameterized by the secret key,
|
||||
// message being signed, and an iteration count for the repeat cases
|
||||
// B. Negate s calculated in step 5 if it is > N/2
|
||||
// This is done because both s and its negation are valid signatures
|
||||
// modulo the curve order N, so it forces a consistent choice to reduce
|
||||
// signature malleability
|
||||
//
|
||||
// NOTE: Step 1 is performed by the caller.
|
||||
//
|
||||
// Step 2.
|
||||
//
|
||||
// Compute kG
|
||||
//
|
||||
// Note that the point must be in affine coordinates.
|
||||
k := nonce
|
||||
var kG secp256k1.JacobianPoint
|
||||
secp256k1.ScalarBaseMultNonConst(k, &kG)
|
||||
kG.ToAffine()
|
||||
// Step 3.
|
||||
//
|
||||
// r = kG.x mod N
|
||||
// Repeat from step 1 if r = 0
|
||||
r, overflow := fieldToModNScalar(&kG.X)
|
||||
if r.IsZero() {
|
||||
return nil, 0, false
|
||||
}
|
||||
// Since the secp256k1 curve has a cofactor of 1, when recovering a
|
||||
// public key from an ECDSA signature over it, there are four possible
|
||||
// candidates corresponding to the following cases:
|
||||
//
|
||||
// 1) The X coord of the random point is < N and its Y coord even
|
||||
// 2) The X coord of the random point is < N and its Y coord is odd
|
||||
// 3) The X coord of the random point is >= N and its Y coord is even
|
||||
// 4) The X coord of the random point is >= N and its Y coord is odd
|
||||
//
|
||||
// Rather than forcing the recovery procedure to check all possible
|
||||
// cases, this creates a recovery code that uniquely identifies which of
|
||||
// the cases apply by making use of 2 bits. Bit 0 identifies the
|
||||
// oddness case and Bit 1 identifies the overflow case (aka when the X
|
||||
// coord >= N).
|
||||
//
|
||||
// It is also worth noting that making use of Hasse's theorem shows
|
||||
// there are around log_2((p-n)/p) ~= -127.65 ~= 1 in 2^127 points where
|
||||
// the X coordinate is >= N. It is not possible to calculate these
|
||||
// points since that would require breaking the ECDLP, but, in practice
|
||||
// this strongly implies with extremely high probability that there are
|
||||
// only a few actual points for which this case is true.
|
||||
pubKeyRecoveryCode := byte(overflow<<1) | byte(kG.Y.IsOddBit())
|
||||
// Step 4.
|
||||
//
|
||||
// e = H(m)
|
||||
//
|
||||
// Note that this actually sets e = H(m) mod N which is correct since
|
||||
// it is only used in step 5 which itself is mod N.
|
||||
var e secp256k1.ModNScalar
|
||||
e.SetByteSlice(hash)
|
||||
// Step 5 with modification B.
|
||||
//
|
||||
// s = k^-1(e + dr) mod N
|
||||
// Repeat from step 1 if s = 0
|
||||
// s = -s if s > N/2
|
||||
kinv := new(secp256k1.ModNScalar).InverseValNonConst(k)
|
||||
s := new(secp256k1.ModNScalar).Mul2(secKey, &r).Add(&e).Mul(kinv)
|
||||
if s.IsZero() {
|
||||
return nil, 0, false
|
||||
}
|
||||
if s.IsOverHalfOrder() {
|
||||
s.Negate()
|
||||
// Negating s corresponds to the random point that would have been
|
||||
// generated by -k (mod N), which necessarily has the opposite
|
||||
// oddness since N is prime, thus flip the pubkey recovery code
|
||||
// oddness bit accordingly.
|
||||
pubKeyRecoveryCode ^= 0x01
|
||||
}
|
||||
// Step 6.
|
||||
//
|
||||
// Return (r,s)
|
||||
return NewSignature(&r, s), pubKeyRecoveryCode, true
|
||||
}
|
||||
|
||||
// signRFC6979 generates a deterministic ECDSA signature according to RFC 6979
|
||||
// and BIP0062 and returns it along with an additional public key recovery code
|
||||
// for efficiently recovering the public key from the signature.
|
||||
func signRFC6979(secKey *secp256k1.SecretKey, hash []byte) (
|
||||
*Signature,
|
||||
byte,
|
||||
) {
|
||||
// The algorithm for producing an ECDSA signature is given as algorithm 4.29
|
||||
// in [GECC].
|
||||
//
|
||||
// The following is a paraphrased version for reference:
|
||||
//
|
||||
// G = curve generator
|
||||
// N = curve order
|
||||
// d = secret key
|
||||
// m = message
|
||||
// r, s = signature
|
||||
//
|
||||
// 1. Select random nonce k in [1, N-1]
|
||||
// 2. Compute kG
|
||||
// 3. r = kG.x mod N (kG.x is the x coordinate of the point kG)
|
||||
// Repeat from step 1 if r = 0
|
||||
// 4. e = H(m)
|
||||
// 5. s = k^-1(e + dr) mod N
|
||||
// Repeat from step 1 if s = 0
|
||||
// 6. Return (r,s)
|
||||
//
|
||||
// This is slightly modified here to conform to RFC6979 and BIP 62 as
|
||||
// follows:
|
||||
//
|
||||
// A. Instead of selecting a random nonce in step 1, use RFC6979 to generate
|
||||
// a deterministic nonce in [1, N-1] parameterized by the secret key,
|
||||
// message being signed, and an iteration count for the repeat cases
|
||||
// B. Negate s calculated in step 5 if it is > N/2
|
||||
// This is done because both s and its negation are valid signatures
|
||||
// modulo the curve order N, so it forces a consistent choice to reduce
|
||||
// signature malleability
|
||||
secKeyScalar := &secKey.Key
|
||||
var secKeyBytes [32]byte
|
||||
secKeyScalar.PutBytes(&secKeyBytes)
|
||||
defer zeroArray32(&secKeyBytes)
|
||||
for iteration := uint32(0); ; iteration++ {
|
||||
// Step 1 with modification A.
|
||||
//
|
||||
// Generate a deterministic nonce in [1, N-1] parameterized by the
|
||||
// secret key, message being signed, and iteration count.
|
||||
k := secp256k1.NonceRFC6979(secKeyBytes[:], hash, nil, nil, iteration)
|
||||
// Steps 2-6.
|
||||
sig, pubKeyRecoveryCode, success := sign(secKeyScalar, k, hash)
|
||||
k.Zero()
|
||||
if !success {
|
||||
continue
|
||||
}
|
||||
return sig, pubKeyRecoveryCode
|
||||
}
|
||||
}
|
||||
|
||||
// Sign generates an ECDSA signature over the secp256k1 curve for the provided
|
||||
// hash (which should be the result of hashing a larger message) using the given
|
||||
// secret key. The produced signature is deterministic (same message and same
|
||||
// key yield the same signature) and canonical in accordance with RFC6979 and
|
||||
// BIP0062.
|
||||
func Sign(key *secp256k1.SecretKey, hash []byte) *Signature {
|
||||
signature, _ := signRFC6979(key, hash)
|
||||
return signature
|
||||
}
|
||||
|
||||
const (
|
||||
// compactSigSize is the size of a compact signature. It consists of a
|
||||
// compact signature recovery code byte followed by the R and S components
|
||||
// serialized as 32-byte big-endian values. 1+32*2 = 65.
|
||||
// for the R and S components. 1+32+32=65.
|
||||
compactSigSize = 65
|
||||
// compactSigMagicOffset is a value used when creating the compact signature
|
||||
// recovery code inherited from Bitcoin and has no meaning, but has been
|
||||
// retained for compatibility. For historical purposes, it was originally
|
||||
// picked to avoid a binary representation that would allow compact
|
||||
// signatures to be mistaken for other components.
|
||||
compactSigMagicOffset = 27
|
||||
// compactSigCompPubKey is a value used when creating the compact signature
|
||||
// recovery code to indicate the original public key was compressed.
|
||||
compactSigCompPubKey = 4
|
||||
// pubKeyRecoveryCodeOddnessBit specifies the bit that indicates the oddess
|
||||
// of the Y coordinate of the random point calculated when creating a
|
||||
// signature.
|
||||
pubKeyRecoveryCodeOddnessBit = 1 << 0
|
||||
// pubKeyRecoveryCodeOverflowBit specifies the bit that indicates the X
|
||||
// coordinate of the random point calculated when creating a signature was
|
||||
// >= N, where N is the order of the group.
|
||||
pubKeyRecoveryCodeOverflowBit = 1 << 1
|
||||
)
|
||||
|
||||
// SignCompact produces a compact ECDSA signature over the secp256k1 curve for
|
||||
// the provided hash (which should be the result of hashing a larger message)
|
||||
// using the given secret key. The isCompressedKey parameter specifies if the
|
||||
// produced signature should reference a compressed public key or not.
|
||||
//
|
||||
// Compact signature format:
|
||||
// <1-byte compact sig recovery code><32-byte R><32-byte S>
|
||||
//
|
||||
// The compact sig recovery code is the value 27 + public key recovery code + 4
|
||||
// if the compact signature was created with a compressed public key.
|
||||
func SignCompact(
|
||||
key *secp256k1.SecretKey, hash []byte,
|
||||
isCompressedKey bool,
|
||||
) []byte {
|
||||
// Create the signature and associated pubkey recovery code and calculate
|
||||
// the compact signature recovery code.
|
||||
sig, pubKeyRecoveryCode := signRFC6979(key, hash)
|
||||
compactSigRecoveryCode := compactSigMagicOffset + pubKeyRecoveryCode
|
||||
if isCompressedKey {
|
||||
compactSigRecoveryCode += compactSigCompPubKey
|
||||
}
|
||||
// Output <compactSigRecoveryCode><32-byte R><32-byte S>.
|
||||
var b [compactSigSize]byte
|
||||
b[0] = compactSigRecoveryCode
|
||||
sig.r.PutBytesUnchecked(b[1:33])
|
||||
sig.s.PutBytesUnchecked(b[33:65])
|
||||
return b[:]
|
||||
}
|
||||
|
||||
// RecoverCompact attempts to recover the secp256k1 public key from the provided
|
||||
// compact signature and message hash. It first verifies the signature, and, if
|
||||
// the signature matches then the recovered public key will be returned as well
|
||||
// as a boolean indicating whether or not the original key was compressed.
|
||||
func RecoverCompact(signature, hash []byte) (
|
||||
*secp256k1.PublicKey, bool, error,
|
||||
) {
|
||||
// The following is very loosely based on the information and algorithm that
|
||||
// describes recovering a public key from and ECDSA signature in section
|
||||
// 4.1.6 of [SEC1].
|
||||
//
|
||||
// Given the following parameters:
|
||||
//
|
||||
// G = curve generator
|
||||
// N = group order
|
||||
// P = field prime
|
||||
// Q = public key
|
||||
// m = message
|
||||
// e = hash of the message
|
||||
// r, s = signature
|
||||
// X = random point used when creating signature whose x coordinate is r
|
||||
//
|
||||
// The equation to recover a public key candidate from an ECDSA signature
|
||||
// is:
|
||||
// Q = r^-1(sX - eG).
|
||||
//
|
||||
// This can be verified by plugging it in for Q in the sig verification
|
||||
// equation:
|
||||
// X = s^-1(eG + rQ) (mod N)
|
||||
// => s^-1(eG + r(r^-1(sX - eG))) (mod N)
|
||||
// => s^-1(eG + sX - eG) (mod N)
|
||||
// => s^-1(sX) (mod N)
|
||||
// => X (mod N)
|
||||
//
|
||||
// However, note that since r is the x coordinate mod N from a random point
|
||||
// that was originally mod P, and the cofactor of the secp256k1 curve is 1,
|
||||
// there are four possible points that the original random point could have
|
||||
// been to produce r: (r,y), (r,-y), (r+N,y), and (r+N,-y). At least 2 of
|
||||
// those points will successfully verify, and all 4 will successfully verify
|
||||
// when the original x coordinate was in the range [N+1, P-1], but in any
|
||||
// case, only one of them corresponds to the original secret key used.
|
||||
//
|
||||
// The method described by section 4.1.6 of [SEC1] to determine which one is
|
||||
// the correct one involves calculating each possibility as a candidate
|
||||
// public key and comparing the candidate to the authentic public key. It
|
||||
// also hints that it is possible to generate the signature in a such a
|
||||
// way that only one of the candidate public keys is viable.
|
||||
//
|
||||
// A more efficient approach that is specific to the secp256k1 curve is used
|
||||
// here instead which is to produce a "pubkey recovery code" when signing
|
||||
// that uniquely identifies which of the 4 possibilities is correct for the
|
||||
// original random point and using that to recover the pubkey directly as
|
||||
// follows:
|
||||
//
|
||||
// 1. Fail if r and s are not in [1, N-1]
|
||||
// 2. Convert r to integer mod P
|
||||
// 3. If pubkey recovery code overflow bit is set:
|
||||
// 3.1 Fail if r + N >= P
|
||||
// 3.2 r = r + N (mod P)
|
||||
// 4. y = +sqrt(r^3 + 7) (mod P)
|
||||
// 4.1 Fail if y does not exist
|
||||
// 4.2 y = -y if needed to match pubkey recovery code oddness bit
|
||||
// 5. X = (r, y)
|
||||
// 6. e = H(m) mod N
|
||||
// 7. w = r^-1 mod N
|
||||
// 8. u1 = -(e * w) mod N
|
||||
// u2 = s * w mod N
|
||||
// 9. Q = u1G + u2X
|
||||
// 10. Fail if Q is the point at infinity
|
||||
//
|
||||
// A compact signature consists of a recovery byte followed by the R and
|
||||
// S components serialized as 32-byte big-endian values.
|
||||
if len(signature) != compactSigSize {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: wrong size: %d != %d",
|
||||
len(signature), compactSigSize,
|
||||
)
|
||||
return nil, false, signatureError(ErrSigInvalidLen, str)
|
||||
}
|
||||
// Parse and validate the compact signature recovery code.
|
||||
const (
|
||||
minValidCode = compactSigMagicOffset
|
||||
maxValidCode = compactSigMagicOffset + compactSigCompPubKey + 3
|
||||
)
|
||||
sigRecoveryCode := signature[0]
|
||||
if sigRecoveryCode < minValidCode || sigRecoveryCode > maxValidCode {
|
||||
str := fmt.Sprintf(
|
||||
"invalid signature: public key recovery code %d is "+
|
||||
"not in the valid range [%d, %d]", sigRecoveryCode,
|
||||
minValidCode,
|
||||
maxValidCode,
|
||||
)
|
||||
return nil, false, signatureError(ErrSigInvalidRecoveryCode, str)
|
||||
}
|
||||
sigRecoveryCode -= compactSigMagicOffset
|
||||
wasCompressed := sigRecoveryCode&compactSigCompPubKey != 0
|
||||
pubKeyRecoveryCode := sigRecoveryCode & 3
|
||||
// Step 1.
|
||||
//
|
||||
// Parse and validate the R and S signature components.
|
||||
//
|
||||
// Fail if r and s are not in [1, N-1].
|
||||
var r, s secp256k1.ModNScalar
|
||||
if overflow := r.SetByteSlice(signature[1:33]); overflow {
|
||||
str := "invalid signature: R >= group order"
|
||||
return nil, false, signatureError(ErrSigRTooBig, str)
|
||||
}
|
||||
if r.IsZero() {
|
||||
str := "invalid signature: R is 0"
|
||||
return nil, false, signatureError(ErrSigRIsZero, str)
|
||||
}
|
||||
if overflow := s.SetByteSlice(signature[33:]); overflow {
|
||||
str := "invalid signature: S >= group order"
|
||||
return nil, false, signatureError(ErrSigSTooBig, str)
|
||||
}
|
||||
if s.IsZero() {
|
||||
str := "invalid signature: S is 0"
|
||||
return nil, false, signatureError(ErrSigSIsZero, str)
|
||||
}
|
||||
// Step 2.
|
||||
//
|
||||
// Convert r to integer mod P.
|
||||
fieldR := modNScalarToField(&r)
|
||||
// Step 3.
|
||||
//
|
||||
// If pubkey recovery code overflow bit is set:
|
||||
if pubKeyRecoveryCode&pubKeyRecoveryCodeOverflowBit != 0 {
|
||||
// Step 3.1.
|
||||
//
|
||||
// Fail if r + N >= P
|
||||
//
|
||||
// Either the signature or the recovery code must be invalid if the
|
||||
// recovery code overflow bit is set and adding N to the R component
|
||||
// would exceed the field prime since R originally came from the X
|
||||
// coordinate of a random point on the curve.
|
||||
if fieldR.IsGtOrEqPrimeMinusOrder() {
|
||||
str := "invalid signature: signature R + N >= P"
|
||||
return nil, false, signatureError(ErrSigOverflowsPrime, str)
|
||||
}
|
||||
// Step 3.2.
|
||||
//
|
||||
// r = r + N (mod P)
|
||||
fieldR.Add(&orderAsFieldVal)
|
||||
}
|
||||
// Step 4.
|
||||
//
|
||||
// y = +sqrt(r^3 + 7) (mod P)
|
||||
// Fail if y does not exist.
|
||||
// y = -y if needed to match pubkey recovery code oddness bit
|
||||
//
|
||||
// The signature must be invalid if the calculation fails because the X
|
||||
// coord originally came from a random point on the curve which means there
|
||||
// must be a Y coord that satisfies the equation for a valid signature.
|
||||
oddY := pubKeyRecoveryCode&pubKeyRecoveryCodeOddnessBit != 0
|
||||
var y secp256k1.FieldVal
|
||||
if valid := secp256k1.DecompressY(&fieldR, oddY, &y); !valid {
|
||||
str := "invalid signature: not for a valid curve point"
|
||||
return nil, false, signatureError(ErrPointNotOnCurve, str)
|
||||
}
|
||||
// Step 5.
|
||||
//
|
||||
// X = (r, y)
|
||||
var X secp256k1.JacobianPoint
|
||||
X.X.Set(fieldR.Normalize())
|
||||
X.Y.Set(y.Normalize())
|
||||
X.Z.SetInt(1)
|
||||
// Step 6.
|
||||
//
|
||||
// e = H(m) mod N
|
||||
var e secp256k1.ModNScalar
|
||||
e.SetByteSlice(hash)
|
||||
// Step 7.
|
||||
//
|
||||
// w = r^-1 mod N
|
||||
w := new(secp256k1.ModNScalar).InverseValNonConst(&r)
|
||||
// Step 8.
|
||||
//
|
||||
// u1 = -(e * w) mod N
|
||||
// u2 = s * w mod N
|
||||
u1 := new(secp256k1.ModNScalar).Mul2(&e, w).Negate()
|
||||
u2 := new(secp256k1.ModNScalar).Mul2(&s, w)
|
||||
// Step 9.
|
||||
//
|
||||
// Q = u1G + u2X
|
||||
var Q, u1G, u2X secp256k1.JacobianPoint
|
||||
secp256k1.ScalarBaseMultNonConst(u1, &u1G)
|
||||
secp256k1.ScalarMultNonConst(u2, &X, &u2X)
|
||||
secp256k1.AddNonConst(&u1G, &u2X, &Q)
|
||||
// Step 10.
|
||||
//
|
||||
// Fail if Q is the point at infinity.
|
||||
//
|
||||
// Either the signature or the pubkey recovery code must be invalid if the
|
||||
// recovered pubkey is the point at infinity.
|
||||
if (Q.X.IsZero() && Q.Y.IsZero()) || Q.Z.IsZero() {
|
||||
str := "invalid signature: recovered pubkey is the point at infinity"
|
||||
return nil, false, signatureError(ErrPointNotOnCurve, str)
|
||||
}
|
||||
// Notice that the public key is in affine coordinates.
|
||||
Q.ToAffine()
|
||||
pubKey := secp256k1.NewPublicKey(&Q.X, &Q.Y)
|
||||
return pubKey, wasCompressed, nil
|
||||
}
|
||||
1146
pkg/crypto/ec/ecdsa/signature_test.go
Normal file
1146
pkg/crypto/ec/ecdsa/signature_test.go
Normal file
File diff suppressed because it is too large
Load Diff
24
pkg/crypto/ec/error.go
Normal file
24
pkg/crypto/ec/error.go
Normal file
@@ -0,0 +1,24 @@
|
||||
// Copyright (c) 2013-2021 The btcsuite developers
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// Error identifies an error related to public key cryptography using a
|
||||
// sec256k1 curve. It has full support for errors.Is and errors.As, so the
|
||||
// caller can ascertain the specific reason for the error by checking the
|
||||
// underlying error.
|
||||
type Error = secp256k1.Error
|
||||
|
||||
// ErrorKind identifies a kind of error. It has full support for errors.Is and
|
||||
// errors.As, so the caller can directly check against an error kind when
|
||||
// determining the reason for an error.
|
||||
type ErrorKind = secp256k1.ErrorKind
|
||||
|
||||
// makeError creates an secp256k1.Error given a set of arguments.
|
||||
func makeError(kind ErrorKind, desc string) Error {
|
||||
return Error{Err: kind, Description: desc}
|
||||
}
|
||||
45
pkg/crypto/ec/field.go
Normal file
45
pkg/crypto/ec/field.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// FieldVal implements optimized fixed-precision arithmetic over the secp256k1
|
||||
// finite field. This means all arithmetic is performed modulo
|
||||
// '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f'.
|
||||
//
|
||||
// WARNING: Since it is so important for the field arithmetic to be extremely
|
||||
// fast for high performance crypto, this type does not perform any validation
|
||||
// of documented preconditions where it ordinarily would. As a result, it is
|
||||
// IMPERATIVE for callers to understand some key concepts that are described
|
||||
// below and ensure the methods are called with the necessary preconditions
|
||||
// that each method is documented with. For example, some methods only give the
|
||||
// correct result if the field value is normalized and others require the field
|
||||
// values involved to have a maximum magnitude and THERE ARE NO EXPLICIT CHECKS
|
||||
// TO ENSURE THOSE PRECONDITIONS ARE SATISFIED. This does, unfortunately, make
|
||||
// the type more difficult to use correctly and while I typically prefer to
|
||||
// ensure all state and input is valid for most code, this is a bit of an
|
||||
// exception because those extra checks really add up in what ends up being
|
||||
// critical hot paths.
|
||||
//
|
||||
// The first key concept when working with this type is normalization. In order
|
||||
// to avoid the need to propagate a ton of carries, the internal representation
|
||||
// provides additional overflow bits for each word of the overall 256-bit
|
||||
// value. This means that there are multiple internal representations for the
|
||||
// same value and, as a result, any methods that rely on comparison of the
|
||||
// value, such as equality and oddness determination, require the caller to
|
||||
// provide a normalized value.
|
||||
//
|
||||
// The second key concept when working with this type is magnitude. As
|
||||
// previously mentioned, the internal representation provides additional
|
||||
// overflow bits which means that the more math operations that are performed
|
||||
// on the field value between normalizations, the more those overflow bits
|
||||
// accumulate. The magnitude is effectively that maximum possible number of
|
||||
// those overflow bits that could possibly be required as a result of a given
|
||||
// operation. Since there are only a limited number of overflow bits available,
|
||||
// this implies that the max possible magnitude MUST be tracked by the caller
|
||||
// and the caller MUST normalize the field value if a given operation would
|
||||
// cause the magnitude of the result to exceed the max allowed value.
|
||||
//
|
||||
// IMPORTANT: The max allowed magnitude of a field value is 64.
|
||||
type FieldVal = secp256k1.FieldVal
|
||||
1196
pkg/crypto/ec/field_test.go
Normal file
1196
pkg/crypto/ec/field_test.go
Normal file
File diff suppressed because it is too large
Load Diff
48
pkg/crypto/ec/fuzz_test.go
Normal file
48
pkg/crypto/ec/fuzz_test.go
Normal file
@@ -0,0 +1,48 @@
|
||||
//go:build gofuzz || go1.18
|
||||
// +build gofuzz go1.18
|
||||
|
||||
// Copyright (c) 2013-2017 The btcsuite developers
|
||||
// Copyright (c) 2015-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
func FuzzParsePubKey(f *testing.F) {
|
||||
// 1. Seeds from pubkey tests.
|
||||
for _, test := range pubKeyTests {
|
||||
if test.isValid {
|
||||
f.Add(test.key)
|
||||
}
|
||||
}
|
||||
// 2. Seeds from recovery tests.
|
||||
var recoveryTestPubKeys = []string{
|
||||
"04E32DF42865E97135ACFB65F3BAE71BDC86F4D49150AD6A440B6F15878109880A0A2B2667F7E725CEEA70C673093BF67663E0312623C8E091B13CF2C0F11EF652",
|
||||
"04A7640409AA2083FDAD38B2D8DE1263B2251799591D840653FB02DBBA503D7745FCB83D80E08A1E02896BE691EA6AFFB8A35939A646F1FC79052A744B1C82EDC3",
|
||||
}
|
||||
for _, pubKey := range recoveryTestPubKeys {
|
||||
seed, err := hex.Dec(pubKey)
|
||||
if err != nil {
|
||||
f.Fatal(err)
|
||||
}
|
||||
f.Add(seed)
|
||||
}
|
||||
// Now run the fuzzer.
|
||||
f.Fuzz(
|
||||
func(t *testing.T, input []byte) {
|
||||
key, err := ParsePubKey(input)
|
||||
if key == nil && err == nil {
|
||||
panic("key==nil && err==nil")
|
||||
}
|
||||
if key != nil && err != nil {
|
||||
panic("key!=nil yet err!=nil")
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
50
pkg/crypto/ec/modnscalar.go
Normal file
50
pkg/crypto/ec/modnscalar.go
Normal file
@@ -0,0 +1,50 @@
|
||||
// Copyright (c) 2013-2021 The btcsuite developers
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// ModNScalar implements optimized 256-bit constant-time fixed-precision
|
||||
// arithmetic over the secp256k1 group order. This means all arithmetic is
|
||||
// performed modulo:
|
||||
//
|
||||
// 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
|
||||
//
|
||||
// It only implements the arithmetic needed for elliptic curve operations,
|
||||
// however, the operations that are not implemented can typically be worked
|
||||
// around if absolutely needed. For example, subtraction can be performed by
|
||||
// adding the negation.
|
||||
//
|
||||
// Should it be absolutely necessary, conversion to the standard library
|
||||
// math/big.Int can be accomplished by using the Bytes method, slicing the
|
||||
// resulting fixed-size array, and feeding it to big.Int.SetBytes. However,
|
||||
// that should typically be avoided when possible as conversion to big.Ints
|
||||
// requires allocations, is not constant time, and is slower when working modulo
|
||||
// the group order.
|
||||
type ModNScalar = secp256k1.ModNScalar
|
||||
|
||||
// NonceRFC6979 generates a nonce deterministically according to RFC 6979 using
|
||||
// HMAC-SHA256 for the hashing function. It takes a 32-byte hash as an input
|
||||
// and returns a 32-byte nonce to be used for deterministic signing. The extra
|
||||
// and version arguments are optional, but allow additional data to be added to
|
||||
// the input of the HMAC. When provided, the extra data must be 32-bytes and
|
||||
// version must be 16 bytes or they will be ignored.
|
||||
//
|
||||
// Finally, the extraIterations parameter provides a method to produce a stream
|
||||
// of deterministic nonces to ensure the signing code is able to produce a nonce
|
||||
// that results in a valid signature in the extremely unlikely event the
|
||||
// original nonce produced results in an invalid signature (e.g. R == 0).
|
||||
// Signing code should start with 0 and increment it if necessary.
|
||||
func NonceRFC6979(
|
||||
privKey []byte, hash []byte, extra []byte, version []byte,
|
||||
extraIterations uint32,
|
||||
) *ModNScalar {
|
||||
|
||||
return secp256k1.NonceRFC6979(
|
||||
privKey, hash, extra, version,
|
||||
extraIterations,
|
||||
)
|
||||
}
|
||||
280
pkg/crypto/ec/musig2/bench_test.go
Normal file
280
pkg/crypto/ec/musig2/bench_test.go
Normal file
@@ -0,0 +1,280 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
var (
|
||||
testPrivBytes = hexToModNScalar("9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d")
|
||||
testMsg = hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
)
|
||||
|
||||
func hexToBytes(s string) []byte {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func hexToModNScalar(s string) *btcec.ModNScalar {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
var scalar btcec.ModNScalar
|
||||
if overflow := scalar.SetByteSlice(b); overflow {
|
||||
panic("hex in source file overflows mod N scalar: " + s)
|
||||
}
|
||||
return &scalar
|
||||
}
|
||||
|
||||
func genSigner(t *testing.B) signer {
|
||||
privKey, err := btcec.NewSecretKey()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to gen priv key: %v", err)
|
||||
}
|
||||
pubKey := privKey.PubKey()
|
||||
nonces, err := GenNonces(WithPublicKey(pubKey))
|
||||
if err != nil {
|
||||
t.Fatalf("unable to gen nonces: %v", err)
|
||||
}
|
||||
return signer{
|
||||
privKey: privKey,
|
||||
pubKey: pubKey,
|
||||
nonces: nonces,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
testSig *PartialSignature
|
||||
testErr error
|
||||
)
|
||||
|
||||
// BenchmarkPartialSign benchmarks how long it takes to generate a partial
|
||||
// signature factoring in if the keys are sorted and also if we're in fast sign
|
||||
// mode.
|
||||
func BenchmarkPartialSign(b *testing.B) {
|
||||
for _, numSigners := range []int{10, 100} {
|
||||
for _, fastSign := range []bool{true, false} {
|
||||
for _, sortKeys := range []bool{true, false} {
|
||||
name := fmt.Sprintf(
|
||||
"num_signers=%v/fast_sign=%v/sort=%v",
|
||||
numSigners, fastSign, sortKeys,
|
||||
)
|
||||
signers := make(signerSet, numSigners)
|
||||
for i := 0; i < numSigners; i++ {
|
||||
signers[i] = genSigner(b)
|
||||
}
|
||||
combinedNonce, err := AggregateNonces(signers.pubNonces())
|
||||
if err != nil {
|
||||
b.Fatalf("unable to generate combined nonce: %v", err)
|
||||
}
|
||||
var sig *PartialSignature
|
||||
var msg [32]byte
|
||||
copy(msg[:], testMsg[:])
|
||||
keys := signers.keys()
|
||||
b.Run(
|
||||
name, func(b *testing.B) {
|
||||
var signOpts []SignOption
|
||||
if fastSign {
|
||||
signOpts = append(signOpts, WithFastSign())
|
||||
}
|
||||
if sortKeys {
|
||||
signOpts = append(signOpts, WithSortedKeys())
|
||||
}
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
sig, err = Sign(
|
||||
signers[0].nonces.SecNonce, signers[0].privKey,
|
||||
combinedNonce, keys, msg, signOpts...,
|
||||
)
|
||||
if err != nil {
|
||||
b.Fatalf("unable to generate sig: %v", err)
|
||||
}
|
||||
}
|
||||
testSig = sig
|
||||
testErr = err
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this fails
|
||||
// // TODO(roasbeef): add impact of sorting ^
|
||||
//
|
||||
// var sigOk bo
|
||||
//
|
||||
// // BenchmarkPartialVerify benchmarks how long it takes to verify a partial
|
||||
// // signature.
|
||||
// func BenchmarkPartialVerify(b *testing.B) {
|
||||
// for _, numSigners := range []int{10, 100} {
|
||||
// for _, sortKeys := range []bool{true, false} {
|
||||
// name := fmt.Sprintf("sort_keys=%v/num_signers=%v",
|
||||
// sortKeys, numSigners)
|
||||
// signers := make(signerSet, numSigners)
|
||||
// for i := 0; i < numSigners; i++ {
|
||||
// signers[i] = genSigner(b)
|
||||
// }
|
||||
// combinedNonce, err := AggregateNonces(
|
||||
// signers.pubNonces(),
|
||||
// )
|
||||
// if err != nil {
|
||||
// b.Fatalf("unable to generate combined "+
|
||||
// "nonce: %v", err)
|
||||
// }
|
||||
// var sig *PartialSignature
|
||||
// var msg [32]byte
|
||||
// copy(msg[:], testMsg[:])
|
||||
// b.ReportAllocs()
|
||||
// b.ResetTimer()
|
||||
// sig, err = Sign(
|
||||
// signers[0].nonces.SecNonce, signers[0].privKey,
|
||||
// combinedNonce, signers.keys(), msg,
|
||||
// )
|
||||
// if err != nil {
|
||||
// b.Fatalf("unable to generate sig: %v", err)
|
||||
// }
|
||||
// keys := signers.keys()
|
||||
// pubKey := signers[0].pubKey
|
||||
// b.Run(name, func(b *testing.B) {
|
||||
// var signOpts []SignOption
|
||||
// if sortKeys {
|
||||
// signOpts = append(
|
||||
// signOpts, WithSortedKeys(),
|
||||
// )
|
||||
// }
|
||||
// b.ResetTimer()
|
||||
// b.ReportAllocs()
|
||||
// var ok bo
|
||||
// for i := 0; i < b.no; i++ {
|
||||
// ok = sig.Verify(
|
||||
// signers[0].nonces.PubNonce, combinedNonce,
|
||||
// keys, pubKey, msg, signOpts...,
|
||||
// )
|
||||
// if !ok {
|
||||
// b.Fatalf("generated invalid sig!")
|
||||
// }
|
||||
// }
|
||||
// sigOk = ok
|
||||
// })
|
||||
//
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
var finalSchnorrSig *schnorr.Signature
|
||||
|
||||
// BenchmarkCombineSigs benchmarks how long it takes to combine a set amount of
|
||||
// signatures.
|
||||
func BenchmarkCombineSigs(b *testing.B) {
|
||||
for _, numSigners := range []int{10, 100} {
|
||||
signers := make(signerSet, numSigners)
|
||||
for i := 0; i < numSigners; i++ {
|
||||
signers[i] = genSigner(b)
|
||||
}
|
||||
combinedNonce, err := AggregateNonces(signers.pubNonces())
|
||||
if err != nil {
|
||||
b.Fatalf("unable to generate combined nonce: %v", err)
|
||||
}
|
||||
var msg [32]byte
|
||||
copy(msg[:], testMsg[:])
|
||||
var finalNonce *btcec.PublicKey
|
||||
for i := range signers {
|
||||
signer := signers[i]
|
||||
partialSig, err := Sign(
|
||||
signer.nonces.SecNonce, signer.privKey,
|
||||
combinedNonce, signers.keys(), msg,
|
||||
)
|
||||
if err != nil {
|
||||
b.Fatalf(
|
||||
"unable to generate partial sig: %v",
|
||||
err,
|
||||
)
|
||||
}
|
||||
signers[i].partialSig = partialSig
|
||||
if finalNonce == nil {
|
||||
finalNonce = partialSig.R
|
||||
}
|
||||
}
|
||||
sigs := signers.partialSigs()
|
||||
name := fmt.Sprintf("num_signers=%v", numSigners)
|
||||
b.Run(
|
||||
name, func(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
finalSig := CombineSigs(finalNonce, sigs)
|
||||
finalSchnorrSig = finalSig
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
var testNonce [PubNonceSize]byte
|
||||
|
||||
// BenchmarkAggregateNonces benchmarks how long it takes to combine nonces.
|
||||
func BenchmarkAggregateNonces(b *testing.B) {
|
||||
for _, numSigners := range []int{10, 100} {
|
||||
signers := make(signerSet, numSigners)
|
||||
for i := 0; i < numSigners; i++ {
|
||||
signers[i] = genSigner(b)
|
||||
}
|
||||
nonces := signers.pubNonces()
|
||||
name := fmt.Sprintf("num_signers=%v", numSigners)
|
||||
b.Run(
|
||||
name, func(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
pubNonce, err := AggregateNonces(nonces)
|
||||
if err != nil {
|
||||
b.Fatalf("unable to generate nonces: %v", err)
|
||||
}
|
||||
testNonce = pubNonce
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
var testKey *btcec.PublicKey
|
||||
|
||||
// BenchmarkAggregateKeys benchmarks how long it takes to aggregate public
|
||||
// keys.
|
||||
func BenchmarkAggregateKeys(b *testing.B) {
|
||||
for _, numSigners := range []int{10, 100} {
|
||||
for _, sortKeys := range []bool{true, false} {
|
||||
signers := make(signerSet, numSigners)
|
||||
for i := 0; i < numSigners; i++ {
|
||||
signers[i] = genSigner(b)
|
||||
}
|
||||
signerKeys := signers.keys()
|
||||
name := fmt.Sprintf(
|
||||
"num_signers=%v/sort_keys=%v",
|
||||
numSigners, sortKeys,
|
||||
)
|
||||
uniqueKeyIndex := secondUniqueKeyIndex(signerKeys, false)
|
||||
b.Run(
|
||||
name, func(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
aggKey, _, _, _ := AggregateKeys(
|
||||
signerKeys, sortKeys,
|
||||
WithUniqueKeyIndex(uniqueKeyIndex),
|
||||
)
|
||||
testKey = aggKey.FinalKey
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
584
pkg/crypto/ec/musig2/context.go
Normal file
584
pkg/crypto/ec/musig2/context.go
Normal file
@@ -0,0 +1,584 @@
|
||||
// Copyright (c) 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrSignersNotSpecified is returned when a caller attempts to create
|
||||
// a context without specifying either the total number of signers, or
|
||||
// the complete set of singers.
|
||||
ErrSignersNotSpecified = fmt.Errorf(
|
||||
"total number of signers or all " +
|
||||
"signers must be known",
|
||||
)
|
||||
// ErrSignerNotInKeySet is returned when a the secret key for a signer
|
||||
// isn't included in the set of signing public keys.
|
||||
ErrSignerNotInKeySet = fmt.Errorf(
|
||||
"signing key is not found in key" +
|
||||
" set",
|
||||
)
|
||||
// ErrAlreadyHaveAllNonces is called when RegisterPubNonce is called too
|
||||
// many times for a given signing session.
|
||||
//
|
||||
// ErrAlreadyHaveAllNonces is returned when a caller attempts to
|
||||
// register a signer, once we already have the total set of known
|
||||
// signers.
|
||||
ErrAlreadyHaveAllNonces = fmt.Errorf("already have all nonces")
|
||||
// ErrNotEnoughSigners is returned when a caller attempts to create a
|
||||
// session from a context, but before all the required signers are
|
||||
// known.
|
||||
//
|
||||
// ErrNotEnoughSigners is returned if a caller attempts to obtain an
|
||||
// early nonce when it wasn't specified
|
||||
ErrNotEnoughSigners = fmt.Errorf("not enough signers")
|
||||
ErrAlreadyHaveAllSigners = fmt.Errorf("all signers registered")
|
||||
// ErrAlredyHaveAllSigs is called when CombineSig is called too many
|
||||
// times for a given signing session.
|
||||
ErrAlredyHaveAllSigs = fmt.Errorf("already have all sigs")
|
||||
// ErrSigningContextReuse is returned if a user attempts to sign using
|
||||
// the same signing context more than once.
|
||||
ErrSigningContextReuse = fmt.Errorf("nonce already used")
|
||||
// ErrFinalSigInvalid is returned when the combined signature turns out
|
||||
// to be invalid.
|
||||
ErrFinalSigInvalid = fmt.Errorf("final signature is invalid")
|
||||
// ErrCombinedNonceUnavailable is returned when a caller attempts to
|
||||
// sign a partial signature, without first having collected all the
|
||||
// required combined nonces.
|
||||
ErrCombinedNonceUnavailable = fmt.Errorf("missing combined nonce")
|
||||
// ErrTaprootInternalKeyUnavailable is returned when a user attempts to
|
||||
// obtain the
|
||||
ErrTaprootInternalKeyUnavailable = fmt.Errorf("taproot tweak not used")
|
||||
ErrNoEarlyNonce = fmt.Errorf("no early nonce available")
|
||||
)
|
||||
|
||||
// Context is a managed signing context for musig2. It takes care of things
|
||||
// like securely generating secret nonces, aggregating keys and nonces, etc.
|
||||
type Context struct {
|
||||
// signingKey is the key we'll use for signing.
|
||||
signingKey *btcec.SecretKey
|
||||
// pubKey is our even-y coordinate public key.
|
||||
pubKey *btcec.PublicKey
|
||||
// combinedKey is the aggregated public key.
|
||||
combinedKey *AggregateKey
|
||||
// uniqueKeyIndex is the index of the second unique key in the keySet.
|
||||
// This is used to speed up signing and verification computations.
|
||||
uniqueKeyIndex int
|
||||
// keysHash is the hash of all the keys as defined in musig2.
|
||||
keysHash []byte
|
||||
// opts is the set of options for the context.
|
||||
opts *contextOptions
|
||||
// shouldSort keeps track of if the public keys should be sorted before
|
||||
// any operations.
|
||||
shouldSort bool
|
||||
// sessionNonce will be populated if the earlyNonce option is true.
|
||||
// After the first session is created, this nonce will be blanked out.
|
||||
sessionNonce *Nonces
|
||||
}
|
||||
|
||||
// ContextOption is a functional option argument that allows callers to modify
|
||||
// the musig2 signing is done within a context.
|
||||
type ContextOption func(*contextOptions)
|
||||
|
||||
// contextOptions houses the set of functional options that can be used to
|
||||
// musig2 signing protocol.
|
||||
type contextOptions struct {
|
||||
// tweaks is the set of optinoal tweaks to apply to the combined public
|
||||
// key.
|
||||
tweaks []KeyTweakDesc
|
||||
// taprootTweak specifies the taproot tweak. If specified, then we'll
|
||||
// use this as the script root for the BIP 341 taproot (x-only) tweak.
|
||||
// Normally we'd just apply the raw 32 byte tweak, but for taproot, we
|
||||
// first need to compute the aggregated key before tweaking, and then
|
||||
// use it as the internal key. This is required as the taproot tweak
|
||||
// also commits to the public key, which in this case is the aggregated
|
||||
// key before the tweak.
|
||||
taprootTweak []byte
|
||||
// bip86Tweak if true, then the weak will just be
|
||||
// h_tapTweak(internalKey) as there is no true script root.
|
||||
bip86Tweak bool
|
||||
// keySet is the complete set of signers for this context.
|
||||
keySet []*btcec.PublicKey
|
||||
// numSigners is the total number of signers that will eventually be a
|
||||
// part of the context.
|
||||
numSigners int
|
||||
// earlyNonce determines if a nonce should be generated during context
|
||||
// creation, to be automatically passed to the created session.
|
||||
earlyNonce bool
|
||||
}
|
||||
|
||||
// defaultContextOptions returns the default context options.
|
||||
func defaultContextOptions() *contextOptions { return &contextOptions{} }
|
||||
|
||||
// WithTweakedContext specifies that within the context, the aggregated public
|
||||
// key should be tweaked with the specified tweaks.
|
||||
func WithTweakedContext(tweaks ...KeyTweakDesc) ContextOption {
|
||||
return func(o *contextOptions) { o.tweaks = tweaks }
|
||||
}
|
||||
|
||||
// WithTaprootTweakCtx specifies that within this context, the final key should
|
||||
// use the taproot tweak as defined in BIP 341: outputKey = internalKey +
|
||||
// h_tapTweak(internalKey || scriptRoot). In this case, the aggreaged key
|
||||
// before the tweak will be used as the internal key.
|
||||
func WithTaprootTweakCtx(scriptRoot []byte) ContextOption {
|
||||
return func(o *contextOptions) { o.taprootTweak = scriptRoot }
|
||||
}
|
||||
|
||||
// WithBip86TweakCtx specifies that within this context, the final key should
|
||||
// use the taproot tweak as defined in BIP 341, with the BIP 86 modification:
|
||||
// outputKey = internalKey + h_tapTweak(internalKey)*G. In this case, the
|
||||
// aggreaged key before the tweak will be used as the internal key.
|
||||
func WithBip86TweakCtx() ContextOption {
|
||||
return func(o *contextOptions) { o.bip86Tweak = true }
|
||||
}
|
||||
|
||||
// WithKnownSigners is an optional parameter that should be used if a session
|
||||
// can be created as soon as all the singers are known.
|
||||
func WithKnownSigners(signers []*btcec.PublicKey) ContextOption {
|
||||
return func(o *contextOptions) {
|
||||
o.keySet = signers
|
||||
o.numSigners = len(signers)
|
||||
}
|
||||
}
|
||||
|
||||
// WithNumSigners is a functional option used to specify that a context should
|
||||
// be created without knowing all the signers. Instead the total number of
|
||||
// signers is specified to ensure that a session can only be created once all
|
||||
// the signers are known.
|
||||
//
|
||||
// NOTE: Either WithKnownSigners or WithNumSigners MUST be specified.
|
||||
func WithNumSigners(n int) ContextOption {
|
||||
return func(o *contextOptions) { o.numSigners = n }
|
||||
}
|
||||
|
||||
// WithEarlyNonceGen allow a caller to specify that a nonce should be generated
|
||||
// early, before the session is created. This should be used in protocols that
|
||||
// require some partial nonce exchange before all the signers are known.
|
||||
//
|
||||
// NOTE: This option must only be specified with the WithNumSigners option.
|
||||
func WithEarlyNonceGen() ContextOption {
|
||||
return func(o *contextOptions) { o.earlyNonce = true }
|
||||
}
|
||||
|
||||
// NewContext creates a new signing context with the passed singing key and set
|
||||
// of public keys for each of the other signers.
|
||||
//
|
||||
// NOTE: This struct should be used over the raw Sign API whenever possible.
|
||||
func NewContext(
|
||||
signingKey *btcec.SecretKey, shouldSort bool,
|
||||
ctxOpts ...ContextOption,
|
||||
) (*Context, error) {
|
||||
|
||||
// First, parse the set of optional context options.
|
||||
opts := defaultContextOptions()
|
||||
for _, option := range ctxOpts {
|
||||
option(opts)
|
||||
}
|
||||
pubKey := signingKey.PubKey()
|
||||
ctx := &Context{
|
||||
signingKey: signingKey,
|
||||
pubKey: pubKey,
|
||||
opts: opts,
|
||||
shouldSort: shouldSort,
|
||||
}
|
||||
switch {
|
||||
// We know all the signers, so we can compute the aggregated key, along
|
||||
// with all the other intermediate state we need to do signing and
|
||||
// verification.
|
||||
case opts.keySet != nil:
|
||||
if err := ctx.combineSignerKeys(); chk.T(err) {
|
||||
return nil, err
|
||||
}
|
||||
// The total signers are known, so we add ourselves, and skip key
|
||||
// aggregation.
|
||||
case opts.numSigners != 0:
|
||||
// Otherwise, we'll add ourselves as the only known signer, and
|
||||
// await further calls to RegisterSigner before a session can
|
||||
// be created.
|
||||
opts.keySet = make([]*btcec.PublicKey, 0, opts.numSigners)
|
||||
opts.keySet = append(opts.keySet, pubKey)
|
||||
default:
|
||||
return nil, ErrSignersNotSpecified
|
||||
}
|
||||
// If early nonce generation is specified, then we'll generate the
|
||||
// nonce now to pass in to the session once all the callers are known.
|
||||
if opts.earlyNonce {
|
||||
var err error
|
||||
ctx.sessionNonce, err = GenNonces(
|
||||
WithPublicKey(ctx.pubKey),
|
||||
WithNonceSecretKeyAux(signingKey),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return ctx, nil
|
||||
}
|
||||
|
||||
// combineSignerKeys is used to compute the aggregated signer key once all the
|
||||
// signers are known.
|
||||
func (c *Context) combineSignerKeys() error {
|
||||
// As a sanity check, make sure the signing key is actually
|
||||
// amongst the sit of signers.
|
||||
var keyFound bool
|
||||
for _, key := range c.opts.keySet {
|
||||
if key.IsEqual(c.pubKey) {
|
||||
keyFound = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !keyFound {
|
||||
return ErrSignerNotInKeySet
|
||||
}
|
||||
|
||||
// Now that we know that we're actually a signer, we'll
|
||||
// generate the key hash finger print and second unique key
|
||||
// index so we can speed up signing later.
|
||||
c.keysHash = keyHashFingerprint(c.opts.keySet, c.shouldSort)
|
||||
c.uniqueKeyIndex = secondUniqueKeyIndex(
|
||||
c.opts.keySet, c.shouldSort,
|
||||
)
|
||||
keyAggOpts := []KeyAggOption{
|
||||
WithKeysHash(c.keysHash),
|
||||
WithUniqueKeyIndex(c.uniqueKeyIndex),
|
||||
}
|
||||
switch {
|
||||
case c.opts.bip86Tweak:
|
||||
keyAggOpts = append(
|
||||
keyAggOpts, WithBIP86KeyTweak(),
|
||||
)
|
||||
case c.opts.taprootTweak != nil:
|
||||
keyAggOpts = append(
|
||||
keyAggOpts, WithTaprootKeyTweak(c.opts.taprootTweak),
|
||||
)
|
||||
case len(c.opts.tweaks) != 0:
|
||||
keyAggOpts = append(keyAggOpts, WithKeyTweaks(c.opts.tweaks...))
|
||||
}
|
||||
// Next, we'll use this information to compute the aggregated
|
||||
// public key that'll be used for signing in practice.
|
||||
var err error
|
||||
c.combinedKey, _, _, err = AggregateKeys(
|
||||
c.opts.keySet, c.shouldSort, keyAggOpts...,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// EarlySessionNonce returns the early session nonce, if available.
|
||||
func (c *Context) EarlySessionNonce() (*Nonces, error) {
|
||||
if c.sessionNonce == nil {
|
||||
return nil, ErrNoEarlyNonce
|
||||
}
|
||||
return c.sessionNonce, nil
|
||||
}
|
||||
|
||||
// RegisterSigner allows a caller to register a signer after the context has
|
||||
// been created. This will be used in scenarios where the total number of
|
||||
// signers is known, but nonce exchange needs to happen before all the signers
|
||||
// are known.
|
||||
//
|
||||
// A bool is returned which indicates if all the signers have been registered.
|
||||
//
|
||||
// NOTE: If the set of keys are not to be sorted during signing, then the
|
||||
// ordering each key is registered with MUST match the desired ordering.
|
||||
func (c *Context) RegisterSigner(pub *btcec.PublicKey) (bool, error) {
|
||||
haveAllSigners := len(c.opts.keySet) == c.opts.numSigners
|
||||
if haveAllSigners {
|
||||
return false, ErrAlreadyHaveAllSigners
|
||||
}
|
||||
c.opts.keySet = append(c.opts.keySet, pub)
|
||||
// If we have the expected number of signers at this point, then we can
|
||||
// generate the aggregated key and other necessary information.
|
||||
haveAllSigners = len(c.opts.keySet) == c.opts.numSigners
|
||||
if haveAllSigners {
|
||||
if err := c.combineSignerKeys(); chk.T(err) {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
return haveAllSigners, nil
|
||||
}
|
||||
|
||||
// NumRegisteredSigners returns the total number of registered signers.
|
||||
func (c *Context) NumRegisteredSigners() int { return len(c.opts.keySet) }
|
||||
|
||||
// CombinedKey returns the combined public key that will be used to generate
|
||||
// multi-signatures against.
|
||||
func (c *Context) CombinedKey() (*btcec.PublicKey, error) {
|
||||
// If the caller hasn't registered all the signers at this point, then
|
||||
// the combined key won't be available.
|
||||
if c.combinedKey == nil {
|
||||
return nil, ErrNotEnoughSigners
|
||||
}
|
||||
return c.combinedKey.FinalKey, nil
|
||||
}
|
||||
|
||||
// PubKey returns the public key of the signer of this session.
|
||||
func (c *Context) PubKey() btcec.PublicKey { return *c.pubKey }
|
||||
|
||||
// SigningKeys returns the set of keys used for signing.
|
||||
func (c *Context) SigningKeys() []*btcec.PublicKey {
|
||||
keys := make([]*btcec.PublicKey, len(c.opts.keySet))
|
||||
copy(keys, c.opts.keySet)
|
||||
return keys
|
||||
}
|
||||
|
||||
// TaprootInternalKey returns the internal taproot key, which is the aggregated
|
||||
// key _before_ the tweak is applied. If a taproot tweak was specified, then
|
||||
// CombinedKey() will return the fully tweaked output key, with this method
|
||||
// returning the internal key. If a taproot tweak wasn't specified, then this
|
||||
// method will return an error.
|
||||
func (c *Context) TaprootInternalKey() (*btcec.PublicKey, error) {
|
||||
// If the caller hasn't registered all the signers at this point, then
|
||||
// the combined key won't be available.
|
||||
if c.combinedKey == nil {
|
||||
return nil, ErrNotEnoughSigners
|
||||
}
|
||||
if c.opts.taprootTweak == nil && !c.opts.bip86Tweak {
|
||||
return nil, ErrTaprootInternalKeyUnavailable
|
||||
}
|
||||
return c.combinedKey.PreTweakedKey, nil
|
||||
}
|
||||
|
||||
// SessionOption is a functional option argument that allows callers to modify
|
||||
// the musig2 signing is done within a session.
|
||||
type SessionOption func(*sessionOptions)
|
||||
|
||||
// sessionOptions houses the set of functional options that can be used to
|
||||
// modify the musig2 signing protocol.
|
||||
type sessionOptions struct {
|
||||
externalNonce *Nonces
|
||||
}
|
||||
|
||||
// defaultSessionOptions returns the default session options.
|
||||
func defaultSessionOptions() *sessionOptions { return &sessionOptions{} }
|
||||
|
||||
// WithPreGeneratedNonce allows a caller to start a session using a nonce
|
||||
// they've generated themselves. This may be useful in protocols where all the
|
||||
// signer keys may not be known before nonce exchange needs to occur.
|
||||
func WithPreGeneratedNonce(nonce *Nonces) SessionOption {
|
||||
return func(o *sessionOptions) { o.externalNonce = nonce }
|
||||
}
|
||||
|
||||
// Session represents a musig2 signing session. A new instance should be
|
||||
// created each time a multi-signature is needed. The session struct handles
|
||||
// nonces management, incremental partial sig vitrifaction, as well as final
|
||||
// signature combination. Errors are returned when unsafe behavior such as
|
||||
// nonce re-use is attempted.
|
||||
//
|
||||
// NOTE: This struct should be used over the raw Sign API whenever possible.
|
||||
type Session struct {
|
||||
opts *sessionOptions
|
||||
ctx *Context
|
||||
localNonces *Nonces
|
||||
pubNonces [][PubNonceSize]byte
|
||||
combinedNonce *[PubNonceSize]byte
|
||||
msg [32]byte
|
||||
ourSig *PartialSignature
|
||||
sigs []*PartialSignature
|
||||
finalSig *schnorr.Signature
|
||||
}
|
||||
|
||||
// NewSession creates a new musig2 signing session.
|
||||
func (c *Context) NewSession(options ...SessionOption) (*Session, error) {
|
||||
opts := defaultSessionOptions()
|
||||
for _, opt := range options {
|
||||
opt(opts)
|
||||
}
|
||||
// At this point we verify that we know of all the signers, as
|
||||
// otherwise we can't proceed with the session. This check is intended
|
||||
// to catch misuse of the API wherein a caller forgets to register the
|
||||
// remaining signers if they're doing nonce generation ahead of time.
|
||||
if len(c.opts.keySet) != c.opts.numSigners {
|
||||
return nil, ErrNotEnoughSigners
|
||||
}
|
||||
// If an early nonce was specified, then we'll automatically add the
|
||||
// corresponding session option for the caller.
|
||||
var localNonces *Nonces
|
||||
if c.sessionNonce != nil {
|
||||
// Apply the early nonce to the session, and also blank out the
|
||||
// session nonce on the context to ensure it isn't ever re-used
|
||||
// for another session.
|
||||
localNonces = c.sessionNonce
|
||||
c.sessionNonce = nil
|
||||
} else if opts.externalNonce != nil {
|
||||
// Otherwise if there's a custom nonce passed in via the
|
||||
// session options, then use that instead.
|
||||
localNonces = opts.externalNonce
|
||||
}
|
||||
// Now that we know we have enough signers, we'll either use the caller
|
||||
// specified nonce, or generate a fresh set.
|
||||
var err error
|
||||
if localNonces == nil {
|
||||
// At this point we need to generate a fresh nonce. We'll pass
|
||||
// in some auxiliary information to strengthen the nonce
|
||||
// generated.
|
||||
localNonces, err = GenNonces(
|
||||
WithPublicKey(c.pubKey),
|
||||
WithNonceSecretKeyAux(c.signingKey),
|
||||
WithNonceCombinedKeyAux(c.combinedKey.FinalKey),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
s := &Session{
|
||||
opts: opts,
|
||||
ctx: c,
|
||||
localNonces: localNonces,
|
||||
pubNonces: make([][PubNonceSize]byte, 0, c.opts.numSigners),
|
||||
sigs: make([]*PartialSignature, 0, c.opts.numSigners),
|
||||
}
|
||||
s.pubNonces = append(s.pubNonces, localNonces.PubNonce)
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// PublicNonce returns the public nonce for a signer. This should be sent to
|
||||
// other parties before signing begins, so they can compute the aggregated
|
||||
// public nonce.
|
||||
func (s *Session) PublicNonce() [PubNonceSize]byte {
|
||||
return s.localNonces.PubNonce
|
||||
}
|
||||
|
||||
// NumRegisteredNonces returns the total number of nonces that have been
|
||||
// regsitered so far.
|
||||
func (s *Session) NumRegisteredNonces() int { return len(s.pubNonces) }
|
||||
|
||||
// RegisterPubNonce should be called for each public nonce from the set of
|
||||
// signers. This method returns true once all the public nonces have been
|
||||
// accounted for.
|
||||
func (s *Session) RegisterPubNonce(nonce [PubNonceSize]byte) (bool, error) {
|
||||
// If we already have all the nonces, then this method was called too
|
||||
// many times.
|
||||
haveAllNonces := len(s.pubNonces) == s.ctx.opts.numSigners
|
||||
if haveAllNonces {
|
||||
return false, ErrAlreadyHaveAllNonces
|
||||
}
|
||||
// Add this nonce and check again if we already have tall the nonces we
|
||||
// need.
|
||||
s.pubNonces = append(s.pubNonces, nonce)
|
||||
haveAllNonces = len(s.pubNonces) == s.ctx.opts.numSigners
|
||||
// If we have all the nonces, then we can go ahead and combine them
|
||||
// now.
|
||||
if haveAllNonces {
|
||||
combinedNonce, err := AggregateNonces(s.pubNonces)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
s.combinedNonce = &combinedNonce
|
||||
}
|
||||
return haveAllNonces, nil
|
||||
}
|
||||
|
||||
// Sign generates a partial signature for the target message, using the target
|
||||
// context. If this method is called more than once per context, then an error
|
||||
// is returned, as that means a nonce was re-used.
|
||||
func (s *Session) Sign(
|
||||
msg [32]byte,
|
||||
signOpts ...SignOption,
|
||||
) (*PartialSignature, error) {
|
||||
|
||||
switch {
|
||||
// If no local nonce is present, then this means we already signed, so
|
||||
// we'll return an error to prevent nonce re-use.
|
||||
case s.localNonces == nil:
|
||||
return nil, ErrSigningContextReuse
|
||||
// We also need to make sure we have the combined nonce, otherwise this
|
||||
// funciton was called too early.
|
||||
case s.combinedNonce == nil:
|
||||
return nil, ErrCombinedNonceUnavailable
|
||||
}
|
||||
switch {
|
||||
case s.ctx.opts.bip86Tweak:
|
||||
signOpts = append(
|
||||
signOpts, WithBip86SignTweak(),
|
||||
)
|
||||
case s.ctx.opts.taprootTweak != nil:
|
||||
signOpts = append(
|
||||
signOpts, WithTaprootSignTweak(s.ctx.opts.taprootTweak),
|
||||
)
|
||||
case len(s.ctx.opts.tweaks) != 0:
|
||||
signOpts = append(signOpts, WithTweaks(s.ctx.opts.tweaks...))
|
||||
}
|
||||
partialSig, err := Sign(
|
||||
s.localNonces.SecNonce, s.ctx.signingKey, *s.combinedNonce,
|
||||
s.ctx.opts.keySet, msg, signOpts...,
|
||||
)
|
||||
// Now that we've generated our signature, we'll make sure to blank out
|
||||
// our signing nonce.
|
||||
s.localNonces = nil
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.msg = msg
|
||||
s.ourSig = partialSig
|
||||
s.sigs = append(s.sigs, partialSig)
|
||||
return partialSig, nil
|
||||
}
|
||||
|
||||
// CombineSig buffers a partial signature received from a signing party. The
|
||||
// method returns true once all the signatures are available, and can be
|
||||
// combined into the final signature.
|
||||
func (s *Session) CombineSig(sig *PartialSignature) (bool, error) {
|
||||
// First check if we already have all the signatures we need. We
|
||||
// already accumulated our own signature when we generated the sig.
|
||||
haveAllSigs := len(s.sigs) == len(s.ctx.opts.keySet)
|
||||
if haveAllSigs {
|
||||
return false, ErrAlredyHaveAllSigs
|
||||
}
|
||||
// TODO(roasbeef): incremental check for invalid sig, or just detect at
|
||||
// the very end?
|
||||
//
|
||||
// Accumulate this sig, and check again if we have all the sigs we
|
||||
// need.
|
||||
s.sigs = append(s.sigs, sig)
|
||||
haveAllSigs = len(s.sigs) == len(s.ctx.opts.keySet)
|
||||
// If we have all the signatures, then we can combine them all into the
|
||||
// final signature.
|
||||
if haveAllSigs {
|
||||
var combineOpts []CombineOption
|
||||
switch {
|
||||
case s.ctx.opts.bip86Tweak:
|
||||
combineOpts = append(
|
||||
combineOpts, WithBip86TweakedCombine(
|
||||
s.msg, s.ctx.opts.keySet,
|
||||
s.ctx.shouldSort,
|
||||
),
|
||||
)
|
||||
case s.ctx.opts.taprootTweak != nil:
|
||||
combineOpts = append(
|
||||
combineOpts, WithTaprootTweakedCombine(
|
||||
s.msg, s.ctx.opts.keySet,
|
||||
s.ctx.opts.taprootTweak, s.ctx.shouldSort,
|
||||
),
|
||||
)
|
||||
case len(s.ctx.opts.tweaks) != 0:
|
||||
combineOpts = append(
|
||||
combineOpts, WithTweakedCombine(
|
||||
s.msg, s.ctx.opts.keySet,
|
||||
s.ctx.opts.tweaks, s.ctx.shouldSort,
|
||||
),
|
||||
)
|
||||
}
|
||||
finalSig := CombineSigs(s.ourSig.R, s.sigs, combineOpts...)
|
||||
// We'll also verify the signature at this point to ensure it's
|
||||
// valid.
|
||||
//
|
||||
// TODO(roasbef): allow skipping?
|
||||
if !finalSig.Verify(s.msg[:], s.ctx.combinedKey.FinalKey) {
|
||||
return false, ErrFinalSigInvalid
|
||||
}
|
||||
s.finalSig = finalSig
|
||||
}
|
||||
return haveAllSigs, nil
|
||||
}
|
||||
|
||||
// FinalSig returns the final combined multi-signature, if present.
|
||||
func (s *Session) FinalSig() *schnorr.Signature { return s.finalSig }
|
||||
127
pkg/crypto/ec/musig2/data/key_agg_vectors.json
Normal file
127
pkg/crypto/ec/musig2/data/key_agg_vectors.json
Normal file
@@ -0,0 +1,127 @@
|
||||
{
|
||||
"pubkeys": [
|
||||
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
"03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
"023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66",
|
||||
"020000000000000000000000000000000000000000000000000000000000000005",
|
||||
"02FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30",
|
||||
"04F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9"
|
||||
],
|
||||
"tweaks": [
|
||||
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",
|
||||
"252E4BD67410A76CDF933D30EAA1608214037F1B105A013ECCD3C5C184A6110B"
|
||||
],
|
||||
"valid_test_cases": [
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"expected": "90539EEDE565F5D054F32CC0C220126889ED1E5D193BAF15AEF344FE59D4610C"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
2,
|
||||
1,
|
||||
0
|
||||
],
|
||||
"expected": "6204DE8B083426DC6EAF9502D27024D53FC826BF7D2012148A0575435DF54B2B"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"expected": "B436E3BAD62B8CD409969A224731C193D051162D8C5AE8B109306127DA3AA935"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1
|
||||
],
|
||||
"expected": "69BC22BFA5D106306E48A20679DE1D7389386124D07571D0D872686028C26A3E"
|
||||
}
|
||||
],
|
||||
"error_test_cases": [
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
3
|
||||
],
|
||||
"tweak_indices": [],
|
||||
"is_xonly": [],
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 1,
|
||||
"contrib": "pubkey"
|
||||
},
|
||||
"comment": "Invalid public key"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
4
|
||||
],
|
||||
"tweak_indices": [],
|
||||
"is_xonly": [],
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 1,
|
||||
"contrib": "pubkey"
|
||||
},
|
||||
"comment": "Public key exceeds field size"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
5,
|
||||
0
|
||||
],
|
||||
"tweak_indices": [],
|
||||
"is_xonly": [],
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 0,
|
||||
"contrib": "pubkey"
|
||||
},
|
||||
"comment": "First byte of public key is not 2 or 3"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"tweak_indices": [
|
||||
0
|
||||
],
|
||||
"is_xonly": [
|
||||
true
|
||||
],
|
||||
"error": {
|
||||
"type": "value",
|
||||
"message": "The tweak must be less than n."
|
||||
},
|
||||
"comment": "Tweak is out of range"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
6
|
||||
],
|
||||
"tweak_indices": [
|
||||
1
|
||||
],
|
||||
"is_xonly": [
|
||||
false
|
||||
],
|
||||
"error": {
|
||||
"type": "value",
|
||||
"message": "The result of tweaking cannot be infinity."
|
||||
},
|
||||
"comment": "Intermediate tweaking result is point at infinity"
|
||||
}
|
||||
]
|
||||
}
|
||||
16
pkg/crypto/ec/musig2/data/key_sort_vectors.json
Normal file
16
pkg/crypto/ec/musig2/data/key_sort_vectors.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"pubkeys": [
|
||||
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8",
|
||||
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
"03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
"023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66",
|
||||
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8"
|
||||
],
|
||||
"sorted_pubkeys": [
|
||||
"023590A94E768F8E1815C2F24B4D80A8E3149316C3518CE7B7AD338368D038CA66",
|
||||
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8",
|
||||
"02DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8",
|
||||
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
"03DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659"
|
||||
]
|
||||
}
|
||||
69
pkg/crypto/ec/musig2/data/nonce_agg_vectors.json
Normal file
69
pkg/crypto/ec/musig2/data/nonce_agg_vectors.json
Normal file
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"pnonces": [
|
||||
"020151C80F435648DF67A22B749CD798CE54E0321D034B92B709B567D60A42E66603BA47FBC1834437B3212E89A84D8425E7BF12E0245D98262268EBDCB385D50641",
|
||||
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833",
|
||||
"020151C80F435648DF67A22B749CD798CE54E0321D034B92B709B567D60A42E6660279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60379BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||
"04FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B833",
|
||||
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A60248C264CDD57D3C24D79990B0F865674EB62A0F9018277A95011B41BFC193B831",
|
||||
"03FF406FFD8ADB9CD29877E4985014F66A59F6CD01C0E88CAA8E5F3166B1F676A602FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30"
|
||||
],
|
||||
"valid_test_cases": [
|
||||
{
|
||||
"pnonce_indices": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"expected": "035FE1873B4F2967F52FEA4A06AD5A8ECCBE9D0FD73068012C894E2E87CCB5804B024725377345BDE0E9C33AF3C43C0A29A9249F2F2956FA8CFEB55C8573D0262DC8"
|
||||
},
|
||||
{
|
||||
"pnonce_indices": [
|
||||
2,
|
||||
3
|
||||
],
|
||||
"expected": "035FE1873B4F2967F52FEA4A06AD5A8ECCBE9D0FD73068012C894E2E87CCB5804B000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"comment": "Sum of second points encoded in the nonces is point at infinity which is serialized as 33 zero bytes"
|
||||
}
|
||||
],
|
||||
"error_test_cases": [
|
||||
{
|
||||
"pnonce_indices": [
|
||||
0,
|
||||
4
|
||||
],
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 1,
|
||||
"contrib": "pubnonce"
|
||||
},
|
||||
"comment": "Public nonce from signer 1 is invalid due wrong tag, 0x04, in the first half",
|
||||
"btcec_err": "invalid public key: unsupported format: 4"
|
||||
},
|
||||
{
|
||||
"pnonce_indices": [
|
||||
5,
|
||||
1
|
||||
],
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 0,
|
||||
"contrib": "pubnonce"
|
||||
},
|
||||
"comment": "Public nonce from signer 0 is invalid because the second half does not correspond to an X coordinate",
|
||||
"btcec_err": "invalid public key: x coordinate 48c264cdd57d3c24d79990b0f865674eb62a0f9018277a95011b41bfc193b831 is not on the secp256k1 curve"
|
||||
},
|
||||
{
|
||||
"pnonce_indices": [
|
||||
6,
|
||||
1
|
||||
],
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 0,
|
||||
"contrib": "pubnonce"
|
||||
},
|
||||
"comment": "Public nonce from signer 0 is invalid because second half exceeds field size",
|
||||
"btcec_err": "invalid public key: x >= field prime"
|
||||
}
|
||||
]
|
||||
}
|
||||
40
pkg/crypto/ec/musig2/data/nonce_gen_vectors.json
Normal file
40
pkg/crypto/ec/musig2/data/nonce_gen_vectors.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"test_cases": [
|
||||
{
|
||||
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"sk": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||
"pk": "024D4B6CD1361032CA9BD2AEB9D900AA4D45D9EAD80AC9423374C451A7254D0766",
|
||||
"aggpk": "0707070707070707070707070707070707070707070707070707070707070707",
|
||||
"msg": "0101010101010101010101010101010101010101010101010101010101010101",
|
||||
"extra_in": "0808080808080808080808080808080808080808080808080808080808080808",
|
||||
"expected": "227243DCB40EF2A13A981DB188FA433717B506BDFA14B1AE47D5DC027C9C3B9EF2370B2AD206E724243215137C86365699361126991E6FEC816845F837BDDAC3024D4B6CD1361032CA9BD2AEB9D900AA4D45D9EAD80AC9423374C451A7254D0766"
|
||||
},
|
||||
{
|
||||
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"sk": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||
"pk": "024D4B6CD1361032CA9BD2AEB9D900AA4D45D9EAD80AC9423374C451A7254D0766",
|
||||
"aggpk": "0707070707070707070707070707070707070707070707070707070707070707",
|
||||
"msg": "",
|
||||
"extra_in": "0808080808080808080808080808080808080808080808080808080808080808",
|
||||
"expected": "CD0F47FE471D6788FF3243F47345EA0A179AEF69476BE8348322EF39C2723318870C2065AFB52DEDF02BF4FDBF6D2F442E608692F50C2374C08FFFE57042A61C024D4B6CD1361032CA9BD2AEB9D900AA4D45D9EAD80AC9423374C451A7254D0766"
|
||||
},
|
||||
{
|
||||
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"sk": "0202020202020202020202020202020202020202020202020202020202020202",
|
||||
"pk": "024D4B6CD1361032CA9BD2AEB9D900AA4D45D9EAD80AC9423374C451A7254D0766",
|
||||
"aggpk": "0707070707070707070707070707070707070707070707070707070707070707",
|
||||
"msg": "2626262626262626262626262626262626262626262626262626262626262626262626262626",
|
||||
"extra_in": "0808080808080808080808080808080808080808080808080808080808080808",
|
||||
"expected": "011F8BC60EF061DEEF4D72A0A87200D9994B3F0CD9867910085C38D5366E3E6B9FF03BC0124E56B24069E91EC3F162378983F194E8BD0ED89BE3059649EAE262024D4B6CD1361032CA9BD2AEB9D900AA4D45D9EAD80AC9423374C451A7254D0766"
|
||||
},
|
||||
{
|
||||
"rand_": "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"sk": null,
|
||||
"pk": "02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
"aggpk": null,
|
||||
"msg": null,
|
||||
"extra_in": null,
|
||||
"expected": "890E83616A3BC4640AB9B6374F21C81FF89CDDDBAFAA7475AE2A102A92E3EDB29FD7E874E23342813A60D9646948242646B7951CA046B4B36D7D6078506D3C9402F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9"
|
||||
}
|
||||
]
|
||||
}
|
||||
151
pkg/crypto/ec/musig2/data/sig_agg_vectors.json
Normal file
151
pkg/crypto/ec/musig2/data/sig_agg_vectors.json
Normal file
@@ -0,0 +1,151 @@
|
||||
{
|
||||
"pubkeys": [
|
||||
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||
"02D2DC6F5DF7C56ACF38C7FA0AE7A759AE30E19B37359DFDE015872324C7EF6E05",
|
||||
"03C7FB101D97FF930ACD0C6760852EF64E69083DE0B06AC6335724754BB4B0522C",
|
||||
"02352433B21E7E05D3B452B81CAE566E06D2E003ECE16D1074AABA4289E0E3D581"
|
||||
],
|
||||
"pnonces": [
|
||||
"036E5EE6E28824029FEA3E8A9DDD2C8483F5AF98F7177C3AF3CB6F47CAF8D94AE902DBA67E4A1F3680826172DA15AFB1A8CA85C7C5CC88900905C8DC8C328511B53E",
|
||||
"03E4F798DA48A76EEC1C9CC5AB7A880FFBA201A5F064E627EC9CB0031D1D58FC5103E06180315C5A522B7EC7C08B69DCD721C313C940819296D0A7AB8E8795AC1F00",
|
||||
"02C0068FD25523A31578B8077F24F78F5BD5F2422AFF47C1FADA0F36B3CEB6C7D202098A55D1736AA5FCC21CF0729CCE852575C06C081125144763C2C4C4A05C09B6",
|
||||
"031F5C87DCFBFCF330DEE4311D85E8F1DEA01D87A6F1C14CDFC7E4F1D8C441CFA40277BF176E9F747C34F81B0D9F072B1B404A86F402C2D86CF9EA9E9C69876EA3B9",
|
||||
"023F7042046E0397822C4144A17F8B63D78748696A46C3B9F0A901D296EC3406C302022B0B464292CF9751D699F10980AC764E6F671EFCA15069BBE62B0D1C62522A",
|
||||
"02D97DDA5988461DF58C5897444F116A7C74E5711BF77A9446E27806563F3B6C47020CBAD9C363A7737F99FA06B6BE093CEAFF5397316C5AC46915C43767AE867C00"
|
||||
],
|
||||
"tweaks": [
|
||||
"B511DA492182A91B0FFB9A98020D55F260AE86D7ECBD0399C7383D59A5F2AF7C",
|
||||
"A815FE049EE3C5AAB66310477FBC8BCCCAC2F3395F59F921C364ACD78A2F48DC",
|
||||
"75448A87274B056468B977BE06EB1E9F657577B7320B0A3376EA51FD420D18A8"
|
||||
],
|
||||
"psigs": [
|
||||
"B15D2CD3C3D22B04DAE438CE653F6B4ECF042F42CFDED7C41B64AAF9B4AF53FB",
|
||||
"6193D6AC61B354E9105BBDC8937A3454A6D705B6D57322A5A472A02CE99FCB64",
|
||||
"9A87D3B79EC67228CB97878B76049B15DBD05B8158D17B5B9114D3C226887505",
|
||||
"66F82EA90923689B855D36C6B7E032FB9970301481B99E01CDB4D6AC7C347A15",
|
||||
"4F5AEE41510848A6447DCD1BBC78457EF69024944C87F40250D3EF2C25D33EFE",
|
||||
"DDEF427BBB847CC027BEFF4EDB01038148917832253EBC355FC33F4A8E2FCCE4",
|
||||
"97B890A26C981DA8102D3BC294159D171D72810FDF7C6A691DEF02F0F7AF3FDC",
|
||||
"53FA9E08BA5243CBCB0D797C5EE83BC6728E539EB76C2D0BF0F971EE4E909971",
|
||||
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141"
|
||||
],
|
||||
"msg": "599C67EA410D005B9DA90817CF03ED3B1C868E4DA4EDF00A5880B0082C237869",
|
||||
"valid_test_cases": [
|
||||
{
|
||||
"aggnonce": "0341432722C5CD0268D829C702CF0D1CBCE57033EED201FD335191385227C3210C03D377F2D258B64AADC0E16F26462323D701D286046A2EA93365656AFD9875982B",
|
||||
"nonce_indices": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"key_indices": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"tweak_indices": [],
|
||||
"is_xonly": [],
|
||||
"psig_indices": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"expected": "041DA22223CE65C92C9A0D6C2CAC828AAF1EEE56304FEC371DDF91EBB2B9EF0912F1038025857FEDEB3FF696F8B99FA4BB2C5812F6095A2E0004EC99CE18DE1E"
|
||||
},
|
||||
{
|
||||
"aggnonce": "0224AFD36C902084058B51B5D36676BBA4DC97C775873768E58822F87FE437D792028CB15929099EEE2F5DAE404CD39357591BA32E9AF4E162B8D3E7CB5EFE31CB20",
|
||||
"nonce_indices": [
|
||||
0,
|
||||
2
|
||||
],
|
||||
"key_indices": [
|
||||
0,
|
||||
2
|
||||
],
|
||||
"tweak_indices": [],
|
||||
"is_xonly": [],
|
||||
"psig_indices": [
|
||||
2,
|
||||
3
|
||||
],
|
||||
"expected": "1069B67EC3D2F3C7C08291ACCB17A9C9B8F2819A52EB5DF8726E17E7D6B52E9F01800260A7E9DAC450F4BE522DE4CE12BA91AEAF2B4279219EF74BE1D286ADD9"
|
||||
},
|
||||
{
|
||||
"aggnonce": "0208C5C438C710F4F96A61E9FF3C37758814B8C3AE12BFEA0ED2C87FF6954FF186020B1816EA104B4FCA2D304D733E0E19CEAD51303FF6420BFD222335CAA402916D",
|
||||
"nonce_indices": [
|
||||
0,
|
||||
3
|
||||
],
|
||||
"key_indices": [
|
||||
0,
|
||||
2
|
||||
],
|
||||
"tweak_indices": [
|
||||
0
|
||||
],
|
||||
"is_xonly": [
|
||||
false
|
||||
],
|
||||
"psig_indices": [
|
||||
4,
|
||||
5
|
||||
],
|
||||
"expected": "5C558E1DCADE86DA0B2F02626A512E30A22CF5255CAEA7EE32C38E9A71A0E9148BA6C0E6EC7683B64220F0298696F1B878CD47B107B81F7188812D593971E0CC"
|
||||
},
|
||||
{
|
||||
"aggnonce": "02B5AD07AFCD99B6D92CB433FBD2A28FDEB98EAE2EB09B6014EF0F8197CD58403302E8616910F9293CF692C49F351DB86B25E352901F0E237BAFDA11F1C1CEF29FFD",
|
||||
"nonce_indices": [
|
||||
0,
|
||||
4
|
||||
],
|
||||
"key_indices": [
|
||||
0,
|
||||
3
|
||||
],
|
||||
"tweak_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"is_xonly": [
|
||||
true,
|
||||
false,
|
||||
true
|
||||
],
|
||||
"psig_indices": [
|
||||
6,
|
||||
7
|
||||
],
|
||||
"expected": "839B08820B681DBA8DAF4CC7B104E8F2638F9388F8D7A555DC17B6E6971D7426CE07BF6AB01F1DB50E4E33719295F4094572B79868E440FB3DEFD3FAC1DB589E"
|
||||
}
|
||||
],
|
||||
"error_test_cases": [
|
||||
{
|
||||
"aggnonce": "02B5AD07AFCD99B6D92CB433FBD2A28FDEB98EAE2EB09B6014EF0F8197CD58403302E8616910F9293CF692C49F351DB86B25E352901F0E237BAFDA11F1C1CEF29FFD",
|
||||
"nonce_indices": [
|
||||
0,
|
||||
4
|
||||
],
|
||||
"key_indices": [
|
||||
0,
|
||||
3
|
||||
],
|
||||
"tweak_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"is_xonly": [
|
||||
true,
|
||||
false,
|
||||
true
|
||||
],
|
||||
"psig_indices": [
|
||||
7,
|
||||
8
|
||||
],
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 1
|
||||
},
|
||||
"comment": "Partial signature is invalid because it exceeds group size"
|
||||
}
|
||||
]
|
||||
}
|
||||
287
pkg/crypto/ec/musig2/data/sign_verify_vectors.json
Normal file
287
pkg/crypto/ec/musig2/data/sign_verify_vectors.json
Normal file
@@ -0,0 +1,287 @@
|
||||
{
|
||||
"sk": "7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671",
|
||||
"pubkeys": [
|
||||
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
"02DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA661",
|
||||
"020000000000000000000000000000000000000000000000000000000000000007"
|
||||
],
|
||||
"secnonces": [
|
||||
"508B81A611F100A6B2B6B29656590898AF488BCF2E1F55CF22E5CFB84421FE61FA27FD49B1D50085B481285E1CA205D55C82CC1B31FF5CD54A489829355901F703935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||
"0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9"
|
||||
],
|
||||
"pnonces": [
|
||||
"0337C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0287BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480",
|
||||
"0279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||
"032DE2662628C90B03F5E720284EB52FF7D71F4284F627B68A853D78C78E1FFE9303E4C5524E83FFE1493B9077CF1CA6BEB2090C93D930321071AD40B2F44E599046",
|
||||
"0237C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0387BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480",
|
||||
"020000000000000000000000000000000000000000000000000000000000000009"
|
||||
],
|
||||
"aggnonces": [
|
||||
"028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9",
|
||||
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"048465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9",
|
||||
"028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61020000000000000000000000000000000000000000000000000000000000000009",
|
||||
"028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD6102FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30"
|
||||
],
|
||||
"msgs": [
|
||||
"F95466D086770E689964664219266FE5ED215C92AE20BAB5C9D79ADDDDF3C0CF",
|
||||
"",
|
||||
"2626262626262626262626262626262626262626262626262626262626262626262626262626"
|
||||
],
|
||||
"valid_test_cases": [
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"nonce_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"aggnonce_index": 0,
|
||||
"msg_index": 0,
|
||||
"signer_index": 0,
|
||||
"expected": "012ABBCB52B3016AC03AD82395A1A415C48B93DEF78718E62A7A90052FE224FB"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
0,
|
||||
2
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
0,
|
||||
2
|
||||
],
|
||||
"aggnonce_index": 0,
|
||||
"msg_index": 0,
|
||||
"signer_index": 1,
|
||||
"expected": "9FF2F7AAA856150CC8819254218D3ADEEB0535269051897724F9DB3789513A52"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"aggnonce_index": 0,
|
||||
"msg_index": 0,
|
||||
"signer_index": 2,
|
||||
"expected": "FA23C359F6FAC4E7796BB93BC9F0532A95468C539BA20FF86D7C76ED92227900"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"nonce_indices": [
|
||||
0,
|
||||
3
|
||||
],
|
||||
"aggnonce_index": 1,
|
||||
"msg_index": 0,
|
||||
"signer_index": 0,
|
||||
"expected": "AE386064B26105404798F75DE2EB9AF5EDA5387B064B83D049CB7C5E08879531",
|
||||
"comment": "Both halves of aggregate nonce correspond to point at infinity"
|
||||
}
|
||||
],
|
||||
"sign_error_test_cases": [
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2
|
||||
],
|
||||
"aggnonce_index": 0,
|
||||
"msg_index": 0,
|
||||
"secnonce_index": 0,
|
||||
"error": {
|
||||
"type": "value",
|
||||
"message": "The signer's pubkey must be included in the list of pubkeys."
|
||||
},
|
||||
"comment": "The signers pubkey is not in the list of pubkeys"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
0,
|
||||
3
|
||||
],
|
||||
"aggnonce_index": 0,
|
||||
"msg_index": 0,
|
||||
"secnonce_index": 0,
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 2,
|
||||
"contrib": "pubkey"
|
||||
},
|
||||
"comment": "Signer 2 provided an invalid public key"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"aggnonce_index": 2,
|
||||
"msg_index": 0,
|
||||
"secnonce_index": 0,
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": null,
|
||||
"contrib": "aggnonce"
|
||||
},
|
||||
"comment": "Aggregate nonce is invalid due wrong tag, 0x04, in the first half"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"aggnonce_index": 3,
|
||||
"msg_index": 0,
|
||||
"secnonce_index": 0,
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": null,
|
||||
"contrib": "aggnonce"
|
||||
},
|
||||
"comment": "Aggregate nonce is invalid because the second half does not correspond to an X coordinate"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"aggnonce_index": 4,
|
||||
"msg_index": 0,
|
||||
"secnonce_index": 0,
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": null,
|
||||
"contrib": "aggnonce"
|
||||
},
|
||||
"comment": "Aggregate nonce is invalid because second half exceeds field size"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"aggnonce_index": 0,
|
||||
"msg_index": 0,
|
||||
"signer_index": 0,
|
||||
"secnonce_index": 1,
|
||||
"error": {
|
||||
"type": "value",
|
||||
"message": "first secnonce value is out of range."
|
||||
},
|
||||
"comment": "Secnonce is invalid which may indicate nonce reuse"
|
||||
}
|
||||
],
|
||||
"verify_fail_test_cases": [
|
||||
{
|
||||
"sig": "97AC833ADCB1AFA42EBF9E0725616F3C9A0D5B614F6FE283CEAAA37A8FFAF406",
|
||||
"key_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"nonce_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"msg_index": 0,
|
||||
"signer_index": 0,
|
||||
"comment": "Wrong signature (which is equal to the negation of valid signature)"
|
||||
},
|
||||
{
|
||||
"sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B",
|
||||
"key_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"nonce_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"msg_index": 0,
|
||||
"signer_index": 1,
|
||||
"comment": "Wrong signer"
|
||||
},
|
||||
{
|
||||
"sig": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",
|
||||
"key_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"nonce_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"msg_index": 0,
|
||||
"signer_index": 0,
|
||||
"comment": "Signature exceeds group size"
|
||||
}
|
||||
],
|
||||
"verify_error_test_cases": [
|
||||
{
|
||||
"sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B",
|
||||
"key_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"nonce_indices": [
|
||||
4,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"msg_index": 0,
|
||||
"signer_index": 0,
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 0,
|
||||
"contrib": "pubnonce"
|
||||
},
|
||||
"comment": "Invalid pubnonce"
|
||||
},
|
||||
{
|
||||
"sig": "68537CC5234E505BD14061F8DA9E90C220A181855FD8BDB7F127BB12403B4D3B",
|
||||
"key_indices": [
|
||||
3,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"nonce_indices": [
|
||||
0,
|
||||
1,
|
||||
2
|
||||
],
|
||||
"msg_index": 0,
|
||||
"signer_index": 0,
|
||||
"error": {
|
||||
"type": "invalid_contribution",
|
||||
"signer": 0,
|
||||
"contrib": "pubkey"
|
||||
},
|
||||
"comment": "Invalid pubkey"
|
||||
}
|
||||
]
|
||||
}
|
||||
170
pkg/crypto/ec/musig2/data/tweak_vectors.json
Normal file
170
pkg/crypto/ec/musig2/data/tweak_vectors.json
Normal file
@@ -0,0 +1,170 @@
|
||||
{
|
||||
"sk": "7FB9E0E687ADA1EEBF7ECFE2F21E73EBDB51A7D450948DFE8D76D7F2D1007671",
|
||||
"pubkeys": [
|
||||
"03935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||
"02F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
"02DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659"
|
||||
],
|
||||
"secnonce": "508B81A611F100A6B2B6B29656590898AF488BCF2E1F55CF22E5CFB84421FE61FA27FD49B1D50085B481285E1CA205D55C82CC1B31FF5CD54A489829355901F703935F972DA013F80AE011890FA89B67A27B7BE6CCB24D3274D18B2D4067F261A9",
|
||||
"pnonces": [
|
||||
"0337C87821AFD50A8644D820A8F3E02E499C931865C2360FB43D0A0D20DAFE07EA0287BF891D2A6DEAEBADC909352AA9405D1428C15F4B75F04DAE642A95C2548480",
|
||||
"0279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980279BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",
|
||||
"032DE2662628C90B03F5E720284EB52FF7D71F4284F627B68A853D78C78E1FFE9303E4C5524E83FFE1493B9077CF1CA6BEB2090C93D930321071AD40B2F44E599046"
|
||||
],
|
||||
"aggnonce": "028465FCF0BBDBCF443AABCCE533D42B4B5A10966AC09A49655E8C42DAAB8FCD61037496A3CC86926D452CAFCFD55D25972CA1675D549310DE296BFF42F72EEEA8C9",
|
||||
"tweaks": [
|
||||
"E8F791FF9225A2AF0102AFFF4A9A723D9612A682A25EBE79802B263CDFCD83BB",
|
||||
"AE2EA797CC0FE72AC5B97B97F3C6957D7E4199A167A58EB08BCAFFDA70AC0455",
|
||||
"F52ECBC565B3D8BEA2DFD5B75A4F457E54369809322E4120831626F290FA87E0",
|
||||
"1969AD73CC177FA0B4FCED6DF1F7BF9907E665FDE9BA196A74FED0A3CF5AEF9D",
|
||||
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141"
|
||||
],
|
||||
"msg": "F95466D086770E689964664219266FE5ED215C92AE20BAB5C9D79ADDDDF3C0CF",
|
||||
"valid_test_cases": [
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"tweak_indices": [
|
||||
0
|
||||
],
|
||||
"is_xonly": [
|
||||
true
|
||||
],
|
||||
"signer_index": 2,
|
||||
"expected": "E28A5C66E61E178C2BA19DB77B6CF9F7E2F0F56C17918CD13135E60CC848FE91",
|
||||
"comment": "A single x-only tweak"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"tweak_indices": [
|
||||
0
|
||||
],
|
||||
"is_xonly": [
|
||||
false
|
||||
],
|
||||
"signer_index": 2,
|
||||
"expected": "38B0767798252F21BF5702C48028B095428320F73A4B14DB1E25DE58543D2D2D",
|
||||
"comment": "A single plain tweak"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"tweak_indices": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"is_xonly": [
|
||||
false,
|
||||
true
|
||||
],
|
||||
"signer_index": 2,
|
||||
"expected": "408A0A21C4A0F5DACAF9646AD6EB6FECD7F7A11F03ED1F48DFFF2185BC2C2408",
|
||||
"comment": "A plain tweak followed by an x-only tweak"
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"tweak_indices": [
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3
|
||||
],
|
||||
"is_xonly": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
],
|
||||
"signer_index": 2,
|
||||
"expected": "45ABD206E61E3DF2EC9E264A6FEC8292141A633C28586388235541F9ADE75435",
|
||||
"comment": "Four tweaks: plain, plain, x-only, x-only."
|
||||
},
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"tweak_indices": [
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3
|
||||
],
|
||||
"is_xonly": [
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
],
|
||||
"signer_index": 2,
|
||||
"expected": "B255FDCAC27B40C7CE7848E2D3B7BF5EA0ED756DA81565AC804CCCA3E1D5D239",
|
||||
"comment": "Four tweaks: x-only, plain, x-only, plain. If an implementation prohibits applying plain tweaks after x-only tweaks, it can skip this test vector or return an error."
|
||||
}
|
||||
],
|
||||
"error_test_cases": [
|
||||
{
|
||||
"key_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"nonce_indices": [
|
||||
1,
|
||||
2,
|
||||
0
|
||||
],
|
||||
"tweak_indices": [
|
||||
4
|
||||
],
|
||||
"is_xonly": [
|
||||
false
|
||||
],
|
||||
"signer_index": 2,
|
||||
"error": {
|
||||
"type": "value",
|
||||
"message": "The tweak must be less than n."
|
||||
},
|
||||
"comment": "Tweak is invalid because it exceeds group size"
|
||||
}
|
||||
]
|
||||
}
|
||||
2
pkg/crypto/ec/musig2/doc.go
Normal file
2
pkg/crypto/ec/musig2/doc.go
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package musig2 provides an implementation of the musig2 protocol for bitcoin.
|
||||
package musig2
|
||||
414
pkg/crypto/ec/musig2/keys.go
Normal file
414
pkg/crypto/ec/musig2/keys.go
Normal file
@@ -0,0 +1,414 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/chainhash"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
var (
|
||||
// KeyAggTagList is the tagged hash tag used to compute the hash of the
|
||||
// list of sorted public keys.
|
||||
KeyAggTagList = []byte("KeyAgg list")
|
||||
// KeyAggTagCoeff is the tagged hash tag used to compute the key
|
||||
// aggregation coefficient for each key.
|
||||
KeyAggTagCoeff = []byte("KeyAgg coefficient")
|
||||
// ErrTweakedKeyIsInfinity is returned if while tweaking a key, we end
|
||||
// up with the point at infinity.
|
||||
ErrTweakedKeyIsInfinity = fmt.Errorf("tweaked key is infinity point")
|
||||
// ErrTweakedKeyOverflows is returned if a tweaking key is larger than
|
||||
// 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141.
|
||||
ErrTweakedKeyOverflows = fmt.Errorf("tweaked key is too large")
|
||||
)
|
||||
|
||||
// sortableKeys defines a type of slice of public keys that implements the sort
|
||||
// interface for BIP 340 keys.
|
||||
type sortableKeys []*btcec.PublicKey
|
||||
|
||||
// Less reports whether the element with index i must sort before the element
|
||||
// with index j.
|
||||
func (s sortableKeys) Less(i, j int) bool {
|
||||
// TODO(roasbeef): more efficient way to compare...
|
||||
keyIBytes := s[i].SerializeCompressed()
|
||||
keyJBytes := s[j].SerializeCompressed()
|
||||
return bytes.Compare(keyIBytes, keyJBytes) == -1
|
||||
}
|
||||
|
||||
// Swap swaps the elements with indexes i and j.
|
||||
func (s sortableKeys) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
|
||||
// Len is the number of elements in the collection.
|
||||
func (s sortableKeys) Len() int { return len(s) }
|
||||
|
||||
// sortKeys takes a set of public keys and returns a new slice that is a copy
|
||||
// of the keys sorted in lexicographical order bytes on the x-only pubkey
|
||||
// serialization.
|
||||
func sortKeys(keys []*btcec.PublicKey) []*btcec.PublicKey {
|
||||
keySet := sortableKeys(keys)
|
||||
if sort.IsSorted(keySet) {
|
||||
return keys
|
||||
}
|
||||
sort.Sort(keySet)
|
||||
return keySet
|
||||
}
|
||||
|
||||
// keyHashFingerprint computes the tagged hash of the series of (sorted) public
|
||||
// keys passed as input. This is used to compute the aggregation coefficient
|
||||
// for each key. The final computation is:
|
||||
// - H(tag=KeyAgg list, pk1 || pk2..)
|
||||
func keyHashFingerprint(keys []*btcec.PublicKey, sort bool) []byte {
|
||||
if sort {
|
||||
keys = sortKeys(keys)
|
||||
}
|
||||
// We'll create a single buffer and slice into that so the bytes buffer
|
||||
// doesn't continually need to grow the underlying buffer.
|
||||
keyAggBuf := make([]byte, 33*len(keys))
|
||||
keyBytes := bytes.NewBuffer(keyAggBuf[0:0])
|
||||
for _, key := range keys {
|
||||
keyBytes.Write(key.SerializeCompressed())
|
||||
}
|
||||
h := chainhash.TaggedHash(KeyAggTagList, keyBytes.Bytes())
|
||||
return h[:]
|
||||
}
|
||||
|
||||
// keyBytesEqual returns true if two keys are the same based on the compressed
|
||||
// serialization of each key.
|
||||
func keyBytesEqual(a, b *btcec.PublicKey) bool {
|
||||
return utils.FastEqual(a.SerializeCompressed(), b.SerializeCompressed())
|
||||
}
|
||||
|
||||
// aggregationCoefficient computes the key aggregation coefficient for the
|
||||
// specified target key. The coefficient is computed as:
|
||||
// - H(tag=KeyAgg coefficient, keyHashFingerprint(pks) || pk)
|
||||
func aggregationCoefficient(
|
||||
keySet []*btcec.PublicKey,
|
||||
targetKey *btcec.PublicKey, keysHash []byte,
|
||||
secondKeyIdx int,
|
||||
) *btcec.ModNScalar {
|
||||
|
||||
var mu btcec.ModNScalar
|
||||
// If this is the second key, then this coefficient is just one.
|
||||
if secondKeyIdx != -1 && keyBytesEqual(keySet[secondKeyIdx], targetKey) {
|
||||
return mu.SetInt(1)
|
||||
}
|
||||
// Otherwise, we'll compute the full finger print hash for this given
|
||||
// key and then use that to compute the coefficient tagged hash:
|
||||
// * H(tag=KeyAgg coefficient, keyHashFingerprint(pks, pk) || pk)
|
||||
var coefficientBytes [65]byte
|
||||
copy(coefficientBytes[:], keysHash[:])
|
||||
copy(coefficientBytes[32:], targetKey.SerializeCompressed())
|
||||
muHash := chainhash.TaggedHash(KeyAggTagCoeff, coefficientBytes[:])
|
||||
mu.SetByteSlice(muHash[:])
|
||||
return &mu
|
||||
}
|
||||
|
||||
// secondUniqueKeyIndex returns the index of the second unique key. If all keys
|
||||
// are the same, then a value of -1 is returned.
|
||||
func secondUniqueKeyIndex(keySet []*btcec.PublicKey, sort bool) int {
|
||||
if sort {
|
||||
keySet = sortKeys(keySet)
|
||||
}
|
||||
// Find the first key that isn't the same as the very first key (second
|
||||
// unique key).
|
||||
for i := range keySet {
|
||||
if !keyBytesEqual(keySet[i], keySet[0]) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
// A value of negative one is used to indicate that all the keys in the
|
||||
// sign set are actually equal, which in practice actually makes musig2
|
||||
// useless, but we need a value to distinguish this case.
|
||||
return -1
|
||||
}
|
||||
|
||||
// KeyTweakDesc describes a tweak to be applied to the aggregated public key
|
||||
// generation and signing process. The IsXOnly specifies if the target key
|
||||
// should be converted to an x-only public key before tweaking.
|
||||
type KeyTweakDesc struct {
|
||||
// Tweak is the 32-byte value that will modify the public key.
|
||||
Tweak [32]byte
|
||||
// IsXOnly if true, then the public key will be mapped to an x-only key
|
||||
// before the tweaking operation is applied.
|
||||
IsXOnly bool
|
||||
}
|
||||
|
||||
// KeyAggOption is a functional option argument that allows callers to specify
|
||||
// more or less information that has been pre-computed to the main routine.
|
||||
type KeyAggOption func(*keyAggOption)
|
||||
|
||||
// keyAggOption houses the set of functional options that modify key
|
||||
// aggregation.
|
||||
type keyAggOption struct {
|
||||
// keyHash is the output of keyHashFingerprint for a given set of keys.
|
||||
keyHash []byte
|
||||
// uniqueKeyIndex is the pre-computed index of the second unique key.
|
||||
uniqueKeyIndex *int
|
||||
// tweaks specifies a series of tweaks to be applied to the aggregated
|
||||
// public key.
|
||||
tweaks []KeyTweakDesc
|
||||
// taprootTweak controls if the tweaks above should be applied in a BIP
|
||||
// 340 style.
|
||||
taprootTweak bool
|
||||
// bip86Tweak specifies that the taproot tweak should be done in a BIP
|
||||
// 86 style, where we don't expect an actual tweak and instead just
|
||||
// commit to the public key itself.
|
||||
bip86Tweak bool
|
||||
}
|
||||
|
||||
// WithKeysHash allows key aggregation to be optimize, by allowing the caller
|
||||
// to specify the hash of all the keys.
|
||||
func WithKeysHash(keyHash []byte) KeyAggOption {
|
||||
return func(o *keyAggOption) { o.keyHash = keyHash }
|
||||
}
|
||||
|
||||
// WithUniqueKeyIndex allows the caller to specify the index of the second
|
||||
// unique key.
|
||||
func WithUniqueKeyIndex(idx int) KeyAggOption {
|
||||
return func(o *keyAggOption) {
|
||||
i := idx
|
||||
o.uniqueKeyIndex = &i
|
||||
}
|
||||
}
|
||||
|
||||
// WithKeyTweaks allows a caller to specify a series of 32-byte tweaks that
|
||||
// should be applied to the final aggregated public key.
|
||||
func WithKeyTweaks(tweaks ...KeyTweakDesc) KeyAggOption {
|
||||
return func(o *keyAggOption) { o.tweaks = tweaks }
|
||||
}
|
||||
|
||||
// WithTaprootKeyTweak specifies that within this context, the final key should
|
||||
// use the taproot tweak as defined in BIP 341: outputKey = internalKey +
|
||||
// h_tapTweak(internalKey || scriptRoot). In this case, the aggregated key
|
||||
// before the tweak will be used as the internal key.
|
||||
//
|
||||
// This option should be used instead of WithKeyTweaks when the aggregated key
|
||||
// is intended to be used as a taproot output key that commits to a script
|
||||
// root.
|
||||
func WithTaprootKeyTweak(scriptRoot []byte) KeyAggOption {
|
||||
return func(o *keyAggOption) {
|
||||
var tweak [32]byte
|
||||
copy(tweak[:], scriptRoot[:])
|
||||
o.tweaks = []KeyTweakDesc{
|
||||
{
|
||||
Tweak: tweak,
|
||||
IsXOnly: true,
|
||||
},
|
||||
}
|
||||
o.taprootTweak = true
|
||||
}
|
||||
}
|
||||
|
||||
// WithBIP86KeyTweak specifies that then during key aggregation, the BIP 86
|
||||
// tweak which just commits to the hash of the serialized public key should be
|
||||
// used. This option should be used when signing with a key that was derived
|
||||
// using BIP 86.
|
||||
func WithBIP86KeyTweak() KeyAggOption {
|
||||
return func(o *keyAggOption) {
|
||||
o.tweaks = []KeyTweakDesc{{IsXOnly: true}}
|
||||
o.taprootTweak = true
|
||||
o.bip86Tweak = true
|
||||
}
|
||||
}
|
||||
|
||||
// defaultKeyAggOptions returns the set of default arguments for key
|
||||
// aggregation.
|
||||
func defaultKeyAggOptions() *keyAggOption { return &keyAggOption{} }
|
||||
|
||||
// hasEvenY returns true if the affine representation of the passed jacobian
|
||||
// point has an even y coordinate.
|
||||
//
|
||||
// TODO(roasbeef): double check, can just check the y coord even not jacobian?
|
||||
func hasEvenY(pJ btcec.JacobianPoint) bool {
|
||||
pJ.ToAffine()
|
||||
p := btcec.NewPublicKey(&pJ.X, &pJ.Y)
|
||||
keyBytes := p.SerializeCompressed()
|
||||
return keyBytes[0] == secp256k1.PubKeyFormatCompressedEven
|
||||
}
|
||||
|
||||
// tweakKey applies a tweaks to the passed public key using the specified
|
||||
// tweak. The parityAcc and tweakAcc are returned (in that order) which
|
||||
// includes the accumulate ration of the parity factor and the tweak multiplied
|
||||
// by the parity factor. The xOnly bool specifies if this is to be an x-only
|
||||
// tweak or not.
|
||||
func tweakKey(
|
||||
keyJ btcec.JacobianPoint, parityAcc btcec.ModNScalar,
|
||||
tweak [32]byte,
|
||||
tweakAcc btcec.ModNScalar,
|
||||
xOnly bool,
|
||||
) (btcec.JacobianPoint, btcec.ModNScalar, btcec.ModNScalar, error) {
|
||||
|
||||
// First we'll compute the new parity factor for this key. If the key has
|
||||
// an odd y coordinate (not even), then we'll need to negate it (multiply
|
||||
// by -1 mod n, in this case).
|
||||
var parityFactor btcec.ModNScalar
|
||||
if xOnly && !hasEvenY(keyJ) {
|
||||
parityFactor.SetInt(1).Negate()
|
||||
} else {
|
||||
parityFactor.SetInt(1)
|
||||
}
|
||||
|
||||
// Next, map the tweak into a mod n integer so we can use it for
|
||||
// manipulations below.
|
||||
tweakInt := new(btcec.ModNScalar)
|
||||
overflows := tweakInt.SetBytes(&tweak)
|
||||
if overflows == 1 {
|
||||
return keyJ, parityAcc, tweakAcc, ErrTweakedKeyOverflows
|
||||
}
|
||||
// Next, we'll compute: Q_i = g*Q + t*G, where g is our parityFactor and t
|
||||
// is the tweakInt above. We'll space things out a bit to make it easier to
|
||||
// follow.
|
||||
//
|
||||
// First compute t*G:
|
||||
var tweakedGenerator btcec.JacobianPoint
|
||||
btcec.ScalarBaseMultNonConst(tweakInt, &tweakedGenerator)
|
||||
// Next compute g*Q:
|
||||
btcec.ScalarMultNonConst(&parityFactor, &keyJ, &keyJ)
|
||||
// Finally add both of them together to get our final
|
||||
// tweaked point.
|
||||
btcec.AddNonConst(&tweakedGenerator, &keyJ, &keyJ)
|
||||
// As a sanity check, make sure that we didn't just end up with the
|
||||
// point at infinity.
|
||||
if keyJ == infinityPoint {
|
||||
return keyJ, parityAcc, tweakAcc, ErrTweakedKeyIsInfinity
|
||||
}
|
||||
// As a final wrap up step, we'll accumulate the parity
|
||||
// factor and also this tweak into the final set of accumulators.
|
||||
parityAcc.Mul(&parityFactor)
|
||||
tweakAcc.Mul(&parityFactor).Add(tweakInt)
|
||||
return keyJ, parityAcc, tweakAcc, nil
|
||||
}
|
||||
|
||||
// AggregateKey is a final aggregated key along with a possible version of the
|
||||
// key without any tweaks applied.
|
||||
type AggregateKey struct {
|
||||
// FinalKey is the final aggregated key which may include one or more
|
||||
// tweaks applied to it.
|
||||
FinalKey *btcec.PublicKey
|
||||
// PreTweakedKey is the aggregated *before* any tweaks have been
|
||||
// applied. This should be used as the internal key in taproot
|
||||
// contexts.
|
||||
PreTweakedKey *btcec.PublicKey
|
||||
}
|
||||
|
||||
// AggregateKeys takes a list of possibly unsorted keys and returns a single
|
||||
// aggregated key as specified by the musig2 key aggregation algorithm. A nil
|
||||
// value can be passed for keyHash, which causes this function to re-derive it.
|
||||
// In addition to the combined public key, the parity accumulator and the tweak
|
||||
// accumulator are returned as well.
|
||||
func AggregateKeys(
|
||||
keys []*btcec.PublicKey, sort bool,
|
||||
keyOpts ...KeyAggOption,
|
||||
) (
|
||||
*AggregateKey, *btcec.ModNScalar, *btcec.ModNScalar, error,
|
||||
) {
|
||||
// First, parse the set of optional signing options.
|
||||
opts := defaultKeyAggOptions()
|
||||
for _, option := range keyOpts {
|
||||
option(opts)
|
||||
}
|
||||
// Sort the set of public key so we know we're working with them in
|
||||
// sorted order for all the routines below.
|
||||
if sort {
|
||||
keys = sortKeys(keys)
|
||||
}
|
||||
// The caller may provide the hash of all the keys as an optimization
|
||||
// during signing, as it already needs to be computed.
|
||||
if opts.keyHash == nil {
|
||||
opts.keyHash = keyHashFingerprint(keys, sort)
|
||||
}
|
||||
// A caller may also specify the unique key index themselves so we
|
||||
// don't need to re-compute it.
|
||||
if opts.uniqueKeyIndex == nil {
|
||||
idx := secondUniqueKeyIndex(keys, sort)
|
||||
opts.uniqueKeyIndex = &idx
|
||||
}
|
||||
// For each key, we'll compute the intermediate blinded key: a_i*P_i,
|
||||
// where a_i is the aggregation coefficient for that key, and P_i is
|
||||
// the key itself, then accumulate that (addition) into the main final
|
||||
// key: P = P_1 + P_2 ... P_N.
|
||||
var finalKeyJ btcec.JacobianPoint
|
||||
for _, key := range keys {
|
||||
// Port the key over to Jacobian coordinates as we need it in
|
||||
// this format for the routines below.
|
||||
var keyJ btcec.JacobianPoint
|
||||
key.AsJacobian(&keyJ)
|
||||
// Compute the aggregation coefficient for the key, then
|
||||
// multiply it by the key itself: P_i' = a_i*P_i.
|
||||
var tweakedKeyJ btcec.JacobianPoint
|
||||
a := aggregationCoefficient(
|
||||
keys, key, opts.keyHash, *opts.uniqueKeyIndex,
|
||||
)
|
||||
btcec.ScalarMultNonConst(a, &keyJ, &tweakedKeyJ)
|
||||
// Finally accumulate this into the final key in an incremental
|
||||
// fashion.
|
||||
btcec.AddNonConst(&finalKeyJ, &tweakedKeyJ, &finalKeyJ)
|
||||
}
|
||||
|
||||
// We'll copy over the key at this point, since this represents the
|
||||
// aggregated key before any tweaks have been applied. This'll be used
|
||||
// as the internal key for script path proofs.
|
||||
finalKeyJ.ToAffine()
|
||||
combinedKey := btcec.NewPublicKey(&finalKeyJ.X, &finalKeyJ.Y)
|
||||
// At this point, if this is a taproot tweak, then we'll modify the
|
||||
// base tweak value to use the BIP 341 tweak value.
|
||||
if opts.taprootTweak {
|
||||
// Emulate the same behavior as txscript.ComputeTaprootOutputKey
|
||||
// which only operates on the x-only public key.
|
||||
key, _ := schnorr.ParsePubKey(
|
||||
schnorr.SerializePubKey(
|
||||
combinedKey,
|
||||
),
|
||||
)
|
||||
// We only use the actual tweak bytes if we're not committing
|
||||
// to a BIP-0086 key only spend output. Otherwise, we just
|
||||
// commit to the internal key and an empty byte slice as the
|
||||
// root hash.
|
||||
tweakBytes := []byte{}
|
||||
if !opts.bip86Tweak {
|
||||
tweakBytes = opts.tweaks[0].Tweak[:]
|
||||
}
|
||||
// Compute the taproot key tagged hash of:
|
||||
// h_tapTweak(internalKey || scriptRoot). We only do this for
|
||||
// the first one, as you can only specify a single tweak when
|
||||
// using the taproot mode with this API.
|
||||
tapTweakHash := chainhash.TaggedHash(
|
||||
chainhash.TagTapTweak, schnorr.SerializePubKey(key),
|
||||
tweakBytes,
|
||||
)
|
||||
opts.tweaks[0].Tweak = *tapTweakHash
|
||||
}
|
||||
|
||||
var (
|
||||
err error
|
||||
tweakAcc btcec.ModNScalar
|
||||
parityAcc btcec.ModNScalar
|
||||
)
|
||||
parityAcc.SetInt(1)
|
||||
// In this case we have a set of tweaks, so we'll incrementally apply
|
||||
// each one, until we have our final tweaked key, and the related
|
||||
// accumulators.
|
||||
for i := 1; i <= len(opts.tweaks); i++ {
|
||||
finalKeyJ, parityAcc, tweakAcc, err = tweakKey(
|
||||
finalKeyJ, parityAcc, opts.tweaks[i-1].Tweak, tweakAcc,
|
||||
opts.tweaks[i-1].IsXOnly,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
}
|
||||
finalKeyJ.ToAffine()
|
||||
finalKey := btcec.NewPublicKey(&finalKeyJ.X, &finalKeyJ.Y)
|
||||
return &AggregateKey{
|
||||
PreTweakedKey: combinedKey,
|
||||
FinalKey: finalKey,
|
||||
}, &parityAcc, &tweakAcc, nil
|
||||
}
|
||||
332
pkg/crypto/ec/musig2/keys_test.go
Normal file
332
pkg/crypto/ec/musig2/keys_test.go
Normal file
@@ -0,0 +1,332 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const (
|
||||
keySortTestVectorFileName = "key_sort_vectors.json"
|
||||
keyAggTestVectorFileName = "key_agg_vectors.json"
|
||||
keyTweakTestVectorFileName = "tweak_vectors.json"
|
||||
)
|
||||
|
||||
type keySortTestVector struct {
|
||||
PubKeys []string `json:"pubkeys"`
|
||||
SortedKeys []string `json:"sorted_pubkeys"`
|
||||
}
|
||||
|
||||
// TestMusig2KeySort tests that keys are properly sorted according to the
|
||||
// musig2 test vectors.
|
||||
func TestMusig2KeySort(t *testing.T) {
|
||||
t.Parallel()
|
||||
testVectorPath := path.Join(
|
||||
testVectorBaseDir, keySortTestVectorFileName,
|
||||
)
|
||||
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||
require.NoError(t, err)
|
||||
var testCase keySortTestVector
|
||||
require.NoError(t, json.Unmarshal(testVectorBytes, &testCase))
|
||||
keys := make([]*btcec.PublicKey, len(testCase.PubKeys))
|
||||
for i, keyStr := range testCase.PubKeys {
|
||||
pubKey, err := btcec.ParsePubKey(mustParseHex(keyStr))
|
||||
require.NoError(t, err)
|
||||
keys[i] = pubKey
|
||||
}
|
||||
sortedKeys := sortKeys(keys)
|
||||
expectedKeys := make([]*btcec.PublicKey, len(testCase.PubKeys))
|
||||
for i, keyStr := range testCase.SortedKeys {
|
||||
pubKey, err := btcec.ParsePubKey(mustParseHex(keyStr))
|
||||
require.NoError(t, err)
|
||||
expectedKeys[i] = pubKey
|
||||
}
|
||||
require.Equal(t, sortedKeys, expectedKeys)
|
||||
}
|
||||
|
||||
type keyAggValidTest struct {
|
||||
Indices []int `json:"key_indices"`
|
||||
Expected string `json:"expected"`
|
||||
}
|
||||
|
||||
type keyAggError struct {
|
||||
Type string `json:"type"`
|
||||
Signer int `json:"signer"`
|
||||
Contring string `json:"contrib"`
|
||||
}
|
||||
|
||||
type keyAggInvalidTest struct {
|
||||
Indices []int `json:"key_indices"`
|
||||
TweakIndices []int `json:"tweak_indices"`
|
||||
IsXOnly []bool `json:"is_xonly"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type keyAggTestVectors struct {
|
||||
PubKeys []string `json:"pubkeys"`
|
||||
Tweaks []string `json:"tweaks"`
|
||||
ValidCases []keyAggValidTest `json:"valid_test_cases"`
|
||||
InvalidCases []keyAggInvalidTest `json:"error_test_cases"`
|
||||
}
|
||||
|
||||
func keysFromIndices(
|
||||
t *testing.T, indices []int,
|
||||
pubKeys []string,
|
||||
) ([]*btcec.PublicKey, error) {
|
||||
t.Helper()
|
||||
inputKeys := make([]*btcec.PublicKey, len(indices))
|
||||
for i, keyIdx := range indices {
|
||||
var err error
|
||||
inputKeys[i], err = btcec.ParsePubKey(
|
||||
mustParseHex(pubKeys[keyIdx]),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return inputKeys, nil
|
||||
}
|
||||
|
||||
func tweaksFromIndices(
|
||||
t *testing.T, indices []int,
|
||||
tweaks []string, isXonly []bool,
|
||||
) []KeyTweakDesc {
|
||||
|
||||
t.Helper()
|
||||
testTweaks := make([]KeyTweakDesc, len(indices))
|
||||
for i, idx := range indices {
|
||||
var rawTweak [32]byte
|
||||
copy(rawTweak[:], mustParseHex(tweaks[idx]))
|
||||
testTweaks[i] = KeyTweakDesc{
|
||||
Tweak: rawTweak,
|
||||
IsXOnly: isXonly[i],
|
||||
}
|
||||
}
|
||||
return testTweaks
|
||||
}
|
||||
|
||||
// TestMuSig2KeyAggTestVectors tests that this implementation of musig2 key
|
||||
// aggregation lines up with the secp256k1-zkp test vectors.
|
||||
func TestMuSig2KeyAggTestVectors(t *testing.T) {
|
||||
t.Parallel()
|
||||
testVectorPath := path.Join(
|
||||
testVectorBaseDir, keyAggTestVectorFileName,
|
||||
)
|
||||
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||
require.NoError(t, err)
|
||||
var testCases keyAggTestVectors
|
||||
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||
tweaks := make([][]byte, len(testCases.Tweaks))
|
||||
for i := range testCases.Tweaks {
|
||||
tweaks[i] = mustParseHex(testCases.Tweaks[i])
|
||||
}
|
||||
for i, testCase := range testCases.ValidCases {
|
||||
testCase := testCase
|
||||
// Assemble the set of keys we'll pass in based on their key
|
||||
// index. We don't use sorting to ensure we send the keys in
|
||||
// the exact same order as the test vectors do.
|
||||
inputKeys, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
t.Run(
|
||||
fmt.Sprintf("test_case=%v", i), func(t *testing.T) {
|
||||
uniqueKeyIndex := secondUniqueKeyIndex(inputKeys, false)
|
||||
opts := []KeyAggOption{WithUniqueKeyIndex(uniqueKeyIndex)}
|
||||
|
||||
combinedKey, _, _, err := AggregateKeys(
|
||||
inputKeys, false, opts...,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(
|
||||
t, schnorr.SerializePubKey(combinedKey.FinalKey),
|
||||
mustParseHex(testCase.Expected),
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
for _, testCase := range testCases.InvalidCases {
|
||||
testCase := testCase
|
||||
testName := fmt.Sprintf(
|
||||
"invalid_%v",
|
||||
strings.ToLower(testCase.Comment),
|
||||
)
|
||||
t.Run(
|
||||
testName, func(t *testing.T) {
|
||||
// For each test, we'll extract the set of input keys
|
||||
// as well as the tweaks since this set of cases also
|
||||
// exercises error cases related to the set of tweaks.
|
||||
inputKeys, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
// In this set of test cases, we should only get this
|
||||
// for the very first vector.
|
||||
if err != nil {
|
||||
switch testCase.Comment {
|
||||
case "Invalid public key":
|
||||
require.ErrorIs(
|
||||
t, err,
|
||||
secp256k1.ErrPubKeyNotOnCurve,
|
||||
)
|
||||
case "Public key exceeds field size":
|
||||
require.ErrorIs(
|
||||
t, err, secp256k1.ErrPubKeyXTooBig,
|
||||
)
|
||||
case "First byte of public key is not 2 or 3":
|
||||
require.ErrorIs(
|
||||
t, err,
|
||||
secp256k1.ErrPubKeyInvalidFormat,
|
||||
)
|
||||
default:
|
||||
t.Fatalf("uncaught err: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
var tweaks []KeyTweakDesc
|
||||
if len(testCase.TweakIndices) != 0 {
|
||||
tweaks = tweaksFromIndices(
|
||||
t, testCase.TweakIndices, testCases.Tweaks,
|
||||
testCase.IsXOnly,
|
||||
)
|
||||
}
|
||||
uniqueKeyIndex := secondUniqueKeyIndex(inputKeys, false)
|
||||
opts := []KeyAggOption{
|
||||
WithUniqueKeyIndex(uniqueKeyIndex),
|
||||
}
|
||||
if len(tweaks) != 0 {
|
||||
opts = append(opts, WithKeyTweaks(tweaks...))
|
||||
}
|
||||
_, _, _, err = AggregateKeys(
|
||||
inputKeys, false, opts...,
|
||||
)
|
||||
require.Error(t, err)
|
||||
switch testCase.Comment {
|
||||
case "Tweak is out of range":
|
||||
require.ErrorIs(t, err, ErrTweakedKeyOverflows)
|
||||
case "Intermediate tweaking result is point at infinity":
|
||||
require.ErrorIs(t, err, ErrTweakedKeyIsInfinity)
|
||||
default:
|
||||
t.Fatalf("uncaught err: %v", err)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
type keyTweakInvalidTest struct {
|
||||
Indices []int `json:"key_indices"`
|
||||
NonceIndices []int `json:"nonce_indices"`
|
||||
TweakIndices []int `json:"tweak_indices"`
|
||||
IsXOnly []bool `json:"is_only"`
|
||||
SignerIndex int `json:"signer_index"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type keyTweakValidTest struct {
|
||||
Indices []int `json:"key_indices"`
|
||||
NonceIndices []int `json:"nonce_indices"`
|
||||
TweakIndices []int `json:"tweak_indices"`
|
||||
IsXOnly []bool `json:"is_xonly"`
|
||||
SignerIndex int `json:"signer_index"`
|
||||
Expected string `json:"expected"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type keyTweakVector struct {
|
||||
SecKey string `json:"sk"`
|
||||
PubKeys []string `json:"pubkeys"`
|
||||
PrivNonce string `json:"secnonce"`
|
||||
PubNonces []string `json:"pnonces"`
|
||||
AggNnoce string `json:"aggnonce"`
|
||||
Tweaks []string `json:"tweaks"`
|
||||
Msg string `json:"msg"`
|
||||
ValidCases []keyTweakValidTest `json:"valid_test_cases"`
|
||||
InvalidCases []keyTweakInvalidTest `json:"error_test_cases"`
|
||||
}
|
||||
|
||||
func pubNoncesFromIndices(
|
||||
t *testing.T, nonceIndices []int,
|
||||
pubNonces []string,
|
||||
) [][PubNonceSize]byte {
|
||||
|
||||
nonces := make([][PubNonceSize]byte, len(nonceIndices))
|
||||
for i, idx := range nonceIndices {
|
||||
var pubNonce [PubNonceSize]byte
|
||||
copy(pubNonce[:], mustParseHex(pubNonces[idx]))
|
||||
nonces[i] = pubNonce
|
||||
}
|
||||
return nonces
|
||||
}
|
||||
|
||||
// TestMuSig2TweakTestVectors tests that we properly handle the various edge
|
||||
// cases related to tweaking public keys.
|
||||
func TestMuSig2TweakTestVectors(t *testing.T) {
|
||||
t.Parallel()
|
||||
testVectorPath := path.Join(
|
||||
testVectorBaseDir, keyTweakTestVectorFileName,
|
||||
)
|
||||
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||
require.NoError(t, err)
|
||||
var testCases keyTweakVector
|
||||
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||
privKey, _ := btcec.SecKeyFromBytes(mustParseHex(testCases.SecKey))
|
||||
var msg [32]byte
|
||||
copy(msg[:], mustParseHex(testCases.Msg))
|
||||
var secNonce [SecNonceSize]byte
|
||||
copy(secNonce[:], mustParseHex(testCases.PrivNonce))
|
||||
for _, testCase := range testCases.ValidCases {
|
||||
testCase := testCase
|
||||
testName := fmt.Sprintf(
|
||||
"valid_%v",
|
||||
strings.ToLower(testCase.Comment),
|
||||
)
|
||||
t.Run(
|
||||
testName, func(t *testing.T) {
|
||||
pubKeys, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
var tweaks []KeyTweakDesc
|
||||
if len(testCase.TweakIndices) != 0 {
|
||||
tweaks = tweaksFromIndices(
|
||||
t, testCase.TweakIndices,
|
||||
testCases.Tweaks, testCase.IsXOnly,
|
||||
)
|
||||
}
|
||||
pubNonces := pubNoncesFromIndices(
|
||||
t, testCase.NonceIndices, testCases.PubNonces,
|
||||
)
|
||||
combinedNonce, err := AggregateNonces(pubNonces)
|
||||
require.NoError(t, err)
|
||||
var opts []SignOption
|
||||
if len(tweaks) != 0 {
|
||||
opts = append(opts, WithTweaks(tweaks...))
|
||||
}
|
||||
partialSig, err := Sign(
|
||||
secNonce, privKey, combinedNonce, pubKeys,
|
||||
msg, opts...,
|
||||
)
|
||||
var partialSigBytes [32]byte
|
||||
partialSig.S.PutBytesUnchecked(partialSigBytes[:])
|
||||
require.Equal(
|
||||
t, hex.Enc(partialSigBytes[:]),
|
||||
hex.Enc(mustParseHex(testCase.Expected)),
|
||||
)
|
||||
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
409
pkg/crypto/ec/musig2/musig2_test.go
Normal file
409
pkg/crypto/ec/musig2/musig2_test.go
Normal file
@@ -0,0 +1,409 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
const (
|
||||
testVectorBaseDir = "data"
|
||||
)
|
||||
|
||||
func mustParseHex(str string) []byte {
|
||||
b, err := hex.Dec(str)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("unable to parse hex: %v", err))
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
type signer struct {
|
||||
privKey *btcec.SecretKey
|
||||
pubKey *btcec.PublicKey
|
||||
nonces *Nonces
|
||||
partialSig *PartialSignature
|
||||
}
|
||||
|
||||
type signerSet []signer
|
||||
|
||||
func (s signerSet) keys() []*btcec.PublicKey {
|
||||
keys := make([]*btcec.PublicKey, len(s))
|
||||
for i := 0; i < len(s); i++ {
|
||||
keys[i] = s[i].pubKey
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (s signerSet) partialSigs() []*PartialSignature {
|
||||
sigs := make([]*PartialSignature, len(s))
|
||||
for i := 0; i < len(s); i++ {
|
||||
sigs[i] = s[i].partialSig
|
||||
}
|
||||
return sigs
|
||||
}
|
||||
|
||||
func (s signerSet) pubNonces() [][PubNonceSize]byte {
|
||||
nonces := make([][PubNonceSize]byte, len(s))
|
||||
for i := 0; i < len(s); i++ {
|
||||
nonces[i] = s[i].nonces.PubNonce
|
||||
}
|
||||
return nonces
|
||||
}
|
||||
|
||||
func (s signerSet) combinedKey() *btcec.PublicKey {
|
||||
uniqueKeyIndex := secondUniqueKeyIndex(s.keys(), false)
|
||||
key, _, _, _ := AggregateKeys(
|
||||
s.keys(), false, WithUniqueKeyIndex(uniqueKeyIndex),
|
||||
)
|
||||
return key.FinalKey
|
||||
}
|
||||
|
||||
// testMultiPartySign executes a multi-party signing context w/ 100 signers.
|
||||
func testMultiPartySign(
|
||||
t *testing.T, taprootTweak []byte,
|
||||
tweaks ...KeyTweakDesc,
|
||||
) {
|
||||
|
||||
const numSigners = 100
|
||||
// First generate the set of signers along with their public keys.
|
||||
signerKeys := make([]*btcec.SecretKey, numSigners)
|
||||
signSet := make([]*btcec.PublicKey, numSigners)
|
||||
for i := 0; i < numSigners; i++ {
|
||||
privKey, err := btcec.NewSecretKey()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to gen priv key: %v", err)
|
||||
}
|
||||
pubKey := privKey.PubKey()
|
||||
signerKeys[i] = privKey
|
||||
signSet[i] = pubKey
|
||||
}
|
||||
var combinedKey *btcec.PublicKey
|
||||
var ctxOpts []ContextOption
|
||||
switch {
|
||||
case len(taprootTweak) == 0:
|
||||
ctxOpts = append(ctxOpts, WithBip86TweakCtx())
|
||||
case taprootTweak != nil:
|
||||
ctxOpts = append(ctxOpts, WithTaprootTweakCtx(taprootTweak))
|
||||
case len(tweaks) != 0:
|
||||
ctxOpts = append(ctxOpts, WithTweakedContext(tweaks...))
|
||||
}
|
||||
ctxOpts = append(ctxOpts, WithKnownSigners(signSet))
|
||||
// Now that we have all the signers, we'll make a new context, then
|
||||
// generate a new session for each of them(which handles nonce
|
||||
// generation).
|
||||
signers := make([]*Session, numSigners)
|
||||
for i, signerKey := range signerKeys {
|
||||
signCtx, err := NewContext(
|
||||
signerKey, false, ctxOpts...,
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to generate context: %v", err)
|
||||
}
|
||||
if combinedKey == nil {
|
||||
combinedKey, err = signCtx.CombinedKey()
|
||||
if err != nil {
|
||||
t.Fatalf("combined key not available: %v", err)
|
||||
}
|
||||
}
|
||||
session, err := signCtx.NewSession()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to generate new session: %v", err)
|
||||
}
|
||||
signers[i] = session
|
||||
}
|
||||
// Next, in the pre-signing phase, we'll send all the nonces to each
|
||||
// signer.
|
||||
var wg sync.WaitGroup
|
||||
for i, signCtx := range signers {
|
||||
signCtx := signCtx
|
||||
wg.Add(1)
|
||||
go func(idx int, signer *Session) {
|
||||
defer wg.Done()
|
||||
for j, otherCtx := range signers {
|
||||
if idx == j {
|
||||
continue
|
||||
}
|
||||
nonce := otherCtx.PublicNonce()
|
||||
haveAll, err := signer.RegisterPubNonce(nonce)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to add public nonce")
|
||||
}
|
||||
if j == len(signers)-1 && !haveAll {
|
||||
t.Fatalf("all public nonces should have been detected")
|
||||
}
|
||||
}
|
||||
}(i, signCtx)
|
||||
}
|
||||
wg.Wait()
|
||||
msg := sha256.Sum256([]byte("let's get taprooty"))
|
||||
// In the final step, we'll use the first signer as our combiner, and
|
||||
// generate a signature for each signer, and then accumulate that with
|
||||
// the combiner.
|
||||
combiner := signers[0]
|
||||
for i := range signers {
|
||||
signer := signers[i]
|
||||
partialSig, err := signer.Sign(msg)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to generate partial sig: %v", err)
|
||||
}
|
||||
// We don't need to combine the signature for the very first
|
||||
// signer, as it already has that partial signature.
|
||||
if i != 0 {
|
||||
haveAll, err := combiner.CombineSig(partialSig)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to combine sigs: %v", err)
|
||||
}
|
||||
|
||||
if i == len(signers)-1 && !haveAll {
|
||||
t.Fatalf("final sig wasn't reconstructed")
|
||||
}
|
||||
}
|
||||
}
|
||||
// Finally we'll combined all the nonces, and ensure that it validates
|
||||
// as a single schnorr signature.
|
||||
finalSig := combiner.FinalSig()
|
||||
if !finalSig.Verify(msg[:], combinedKey) {
|
||||
t.Fatalf("final sig is invalid!")
|
||||
}
|
||||
// Verify that if we try to sign again with any of the existing
|
||||
// signers, then we'll get an error as the nonces have already been
|
||||
// used.
|
||||
for _, signer := range signers {
|
||||
_, err := signer.Sign(msg)
|
||||
if err != ErrSigningContextReuse {
|
||||
t.Fatalf("expected to get signing context reuse")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestMuSigMultiParty tests that for a given set of 100 signers, we're able to
|
||||
// properly generate valid sub signatures, which ultimately can be combined
|
||||
// into a single valid signature.
|
||||
func TestMuSigMultiParty(t *testing.T) {
|
||||
t.Parallel()
|
||||
testTweak := [32]byte{
|
||||
0xE8, 0xF7, 0x91, 0xFF, 0x92, 0x25, 0xA2, 0xAF,
|
||||
0x01, 0x02, 0xAF, 0xFF, 0x4A, 0x9A, 0x72, 0x3D,
|
||||
0x96, 0x12, 0xA6, 0x82, 0xA2, 0x5E, 0xBE, 0x79,
|
||||
0x80, 0x2B, 0x26, 0x3C, 0xDF, 0xCD, 0x83, 0xBB,
|
||||
}
|
||||
t.Run(
|
||||
"no_tweak", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testMultiPartySign(t, nil)
|
||||
},
|
||||
)
|
||||
t.Run(
|
||||
"tweaked", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testMultiPartySign(
|
||||
t, nil, KeyTweakDesc{
|
||||
Tweak: testTweak,
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
t.Run(
|
||||
"tweaked_x_only", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testMultiPartySign(
|
||||
t, nil, KeyTweakDesc{
|
||||
Tweak: testTweak,
|
||||
IsXOnly: true,
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
t.Run(
|
||||
"taproot_tweaked_x_only", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testMultiPartySign(t, testTweak[:])
|
||||
},
|
||||
)
|
||||
t.Run(
|
||||
"taproot_bip_86", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testMultiPartySign(t, []byte{})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// TestMuSigEarlyNonce tests that for protocols where nonces need to be
|
||||
// exchagned before all signers are known, the context API works as expected.
|
||||
func TestMuSigEarlyNonce(t *testing.T) {
|
||||
t.Parallel()
|
||||
privKey1, err := btcec.NewSecretKey()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to gen priv key: %v", err)
|
||||
}
|
||||
privKey2, err := btcec.NewSecretKey()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to gen priv key: %v", err)
|
||||
}
|
||||
// If we try to make a context, with just the secret key and sorting
|
||||
// value, we should get an error.
|
||||
_, err = NewContext(privKey1, true)
|
||||
if !errors.Is(err, ErrSignersNotSpecified) {
|
||||
t.Fatalf("unexpected ctx error: %v", err)
|
||||
}
|
||||
signers := []*btcec.PublicKey{privKey1.PubKey(), privKey2.PubKey()}
|
||||
numSigners := len(signers)
|
||||
ctx1, err := NewContext(
|
||||
privKey1, true, WithNumSigners(numSigners), WithEarlyNonceGen(),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to make ctx: %v", err)
|
||||
}
|
||||
pubKey1 := ctx1.PubKey()
|
||||
ctx2, err := NewContext(
|
||||
privKey2, true, WithKnownSigners(signers), WithEarlyNonceGen(),
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to make ctx: %v", err)
|
||||
}
|
||||
pubKey2 := ctx2.PubKey()
|
||||
// At this point, the combined key shouldn't be available for signer 1,
|
||||
// but should be for signer 2, as they know about all signers.
|
||||
if _, err := ctx1.CombinedKey(); !errors.Is(err, ErrNotEnoughSigners) {
|
||||
t.Fatalf("unepxected error: %v", err)
|
||||
}
|
||||
_, err = ctx2.CombinedKey()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to get combined key: %v", err)
|
||||
}
|
||||
// The early nonces _should_ be available at this point.
|
||||
nonce1, err := ctx1.EarlySessionNonce()
|
||||
if err != nil {
|
||||
t.Fatalf("session nonce not available: %v", err)
|
||||
}
|
||||
nonce2, err := ctx2.EarlySessionNonce()
|
||||
if err != nil {
|
||||
t.Fatalf("session nonce not available: %v", err)
|
||||
}
|
||||
// The number of registered signers should still be 1 for both parties.
|
||||
if ctx1.NumRegisteredSigners() != 1 {
|
||||
t.Fatalf(
|
||||
"expected 1 signer, instead have: %v",
|
||||
ctx1.NumRegisteredSigners(),
|
||||
)
|
||||
}
|
||||
if ctx2.NumRegisteredSigners() != 2 {
|
||||
t.Fatalf(
|
||||
"expected 2 signers, instead have: %v",
|
||||
ctx2.NumRegisteredSigners(),
|
||||
)
|
||||
}
|
||||
// If we try to make a session, we should get an error since we dn't
|
||||
// have all the signers yet.
|
||||
if _, err := ctx1.NewSession(); !errors.Is(err, ErrNotEnoughSigners) {
|
||||
t.Fatalf("unexpected session key error: %v", err)
|
||||
}
|
||||
// The combined key should also be unavailable as well.
|
||||
if _, err := ctx1.CombinedKey(); !errors.Is(err, ErrNotEnoughSigners) {
|
||||
t.Fatalf("unexpected combined key error: %v", err)
|
||||
}
|
||||
// We'll now register the other signer for party 1.
|
||||
done, err := ctx1.RegisterSigner(&pubKey2)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to register signer: %v", err)
|
||||
}
|
||||
if !done {
|
||||
t.Fatalf("signer 1 doesn't have all keys")
|
||||
}
|
||||
// If we try to register the signer again, we should get an error.
|
||||
_, err = ctx2.RegisterSigner(&pubKey1)
|
||||
if !errors.Is(err, ErrAlreadyHaveAllSigners) {
|
||||
t.Fatalf("should not be able to register too many signers")
|
||||
}
|
||||
// We should be able to create the session at this point.
|
||||
session1, err := ctx1.NewSession()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create new session: %v", err)
|
||||
}
|
||||
session2, err := ctx2.NewSession()
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create new session: %v", err)
|
||||
}
|
||||
msg := sha256.Sum256([]byte("let's get taprooty, LN style"))
|
||||
// If we try to sign before we have the combined nonce, we shoudl get
|
||||
// an error.
|
||||
_, err = session1.Sign(msg)
|
||||
if !errors.Is(err, ErrCombinedNonceUnavailable) {
|
||||
t.Fatalf("unable to gen sig: %v", err)
|
||||
}
|
||||
// Now we can exchange nonces to continue with the rest of the signing
|
||||
// process as normal.
|
||||
done, err = session1.RegisterPubNonce(nonce2.PubNonce)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to register nonce: %v", err)
|
||||
}
|
||||
if !done {
|
||||
t.Fatalf("signer 1 doesn't have all nonces")
|
||||
}
|
||||
done, err = session2.RegisterPubNonce(nonce1.PubNonce)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to register nonce: %v", err)
|
||||
}
|
||||
if !done {
|
||||
t.Fatalf("signer 2 doesn't have all nonces")
|
||||
}
|
||||
// Registering the nonce again should error out.
|
||||
_, err = session2.RegisterPubNonce(nonce1.PubNonce)
|
||||
if !errors.Is(err, ErrAlreadyHaveAllNonces) {
|
||||
t.Fatalf("shouldn't be able to register nonces twice")
|
||||
}
|
||||
// Sign the message and combine the two partial sigs into one.
|
||||
_, err = session1.Sign(msg)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to gen sig: %v", err)
|
||||
}
|
||||
sig2, err := session2.Sign(msg)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to gen sig: %v", err)
|
||||
}
|
||||
done, err = session1.CombineSig(sig2)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to combine sig: %v", err)
|
||||
}
|
||||
if !done {
|
||||
t.Fatalf("all sigs should be known now: %v", err)
|
||||
}
|
||||
// If we try to combine another sig, then we should get an error.
|
||||
_, err = session1.CombineSig(sig2)
|
||||
if !errors.Is(err, ErrAlredyHaveAllSigs) {
|
||||
t.Fatalf("shouldn't be able to combine again")
|
||||
}
|
||||
// Finally, verify that the final signature is valid.
|
||||
combinedKey, err := ctx1.CombinedKey()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected combined key error: %v", err)
|
||||
}
|
||||
finalSig := session1.FinalSig()
|
||||
if !finalSig.Verify(msg[:], combinedKey) {
|
||||
t.Fatalf("final sig is invalid!")
|
||||
}
|
||||
}
|
||||
|
||||
type memsetRandReader struct {
|
||||
i int
|
||||
}
|
||||
|
||||
func (mr *memsetRandReader) Read(buf []byte) (n int, err error) {
|
||||
for i := range buf {
|
||||
buf[i] = byte(mr.i)
|
||||
}
|
||||
return len(buf), nil
|
||||
}
|
||||
407
pkg/crypto/ec/musig2/nonces.go
Normal file
407
pkg/crypto/ec/musig2/nonces.go
Normal file
@@ -0,0 +1,407 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/chainhash"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
)
|
||||
|
||||
const (
|
||||
// PubNonceSize is the size of the public nonces. Each public nonce is
|
||||
// serialized the full compressed encoding, which uses 32 bytes for each
|
||||
// nonce.
|
||||
PubNonceSize = 66
|
||||
// SecNonceSize is the size of the secret nonces for musig2. The secret
|
||||
// nonces are the corresponding secret keys to the public nonce points.
|
||||
SecNonceSize = 97
|
||||
)
|
||||
|
||||
var (
|
||||
// NonceAuxTag is the tag used to optionally mix in the secret key with
|
||||
// the set of aux randomness.
|
||||
NonceAuxTag = []byte("MuSig/aux")
|
||||
// NonceGenTag is used to generate the value (from a set of required an
|
||||
// optional field) that will be used as the part of the secret nonce.
|
||||
NonceGenTag = []byte("MuSig/nonce")
|
||||
byteOrder = binary.BigEndian
|
||||
// ErrPubkeyInvalid is returned when the pubkey of the WithPublicKey
|
||||
// option is not passed or of invalid length.
|
||||
ErrPubkeyInvalid = errors.New("nonce generation requires a valid pubkey")
|
||||
)
|
||||
|
||||
// zeroSecNonce is a secret nonce that's all zeroes. This is used to check that
|
||||
// we're not attempting to re-use a nonce, and also protect callers from it.
|
||||
var zeroSecNonce [SecNonceSize]byte
|
||||
|
||||
// Nonces holds the public and secret nonces required for musig2.
|
||||
//
|
||||
// TODO(roasbeef): methods on this to help w/ parsing, etc?
|
||||
type Nonces struct {
|
||||
// PubNonce holds the two 33-byte compressed encoded points that serve
|
||||
// as the public set of nonces.
|
||||
PubNonce [PubNonceSize]byte
|
||||
// SecNonce holds the two 32-byte scalar values that are the secret
|
||||
// keys to the two public nonces.
|
||||
SecNonce [SecNonceSize]byte
|
||||
}
|
||||
|
||||
// secNonceToPubNonce takes our two secrete nonces, and produces their two
|
||||
// corresponding EC points, serialized in compressed format.
|
||||
func secNonceToPubNonce(secNonce [SecNonceSize]byte) [PubNonceSize]byte {
|
||||
var k1Mod, k2Mod btcec.ModNScalar
|
||||
k1Mod.SetByteSlice(secNonce[:btcec.SecKeyBytesLen])
|
||||
k2Mod.SetByteSlice(secNonce[btcec.SecKeyBytesLen:])
|
||||
var r1, r2 btcec.JacobianPoint
|
||||
btcec.ScalarBaseMultNonConst(&k1Mod, &r1)
|
||||
btcec.ScalarBaseMultNonConst(&k2Mod, &r2)
|
||||
// Next, we'll convert the key in jacobian format to a normal public
|
||||
// key expressed in affine coordinates.
|
||||
r1.ToAffine()
|
||||
r2.ToAffine()
|
||||
r1Pub := btcec.NewPublicKey(&r1.X, &r1.Y)
|
||||
r2Pub := btcec.NewPublicKey(&r2.X, &r2.Y)
|
||||
var pubNonce [PubNonceSize]byte
|
||||
// The public nonces are serialized as: R1 || R2, where both keys are
|
||||
// serialized in compressed format.
|
||||
copy(pubNonce[:], r1Pub.SerializeCompressed())
|
||||
copy(
|
||||
pubNonce[btcec.PubKeyBytesLenCompressed:],
|
||||
r2Pub.SerializeCompressed(),
|
||||
)
|
||||
return pubNonce
|
||||
}
|
||||
|
||||
// NonceGenOption is a function option that allows callers to modify how nonce
|
||||
// generation happens.
|
||||
type NonceGenOption func(*nonceGenOpts)
|
||||
|
||||
// nonceGenOpts is the set of options that control how nonce generation
|
||||
// happens.
|
||||
type nonceGenOpts struct {
|
||||
// randReader is what we'll use to generate a set of random bytes. If
|
||||
// unspecified, then the normal crypto/rand rand.Read method will be
|
||||
// used in place.
|
||||
randReader io.Reader
|
||||
// publicKey is the mandatory public key that will be mixed into the nonce
|
||||
// generation.
|
||||
publicKey []byte
|
||||
// secretKey is an optional argument that's used to further augment the
|
||||
// generated nonce by xor'ing it with this secret key.
|
||||
secretKey []byte
|
||||
// combinedKey is an optional argument that if specified, will be
|
||||
// combined along with the nonce generation.
|
||||
combinedKey []byte
|
||||
// msg is an optional argument that will be mixed into the nonce
|
||||
// derivation algorithm.
|
||||
msg []byte
|
||||
// auxInput is an optional argument that will be mixed into the nonce
|
||||
// derivation algorithm.
|
||||
auxInput []byte
|
||||
}
|
||||
|
||||
// cryptoRandAdapter is an adapter struct that allows us to pass in the package
|
||||
// level Read function from crypto/rand into a context that accepts an
|
||||
// io.Reader.
|
||||
type cryptoRandAdapter struct{}
|
||||
|
||||
// Read implements the io.Reader interface for the crypto/rand package. By
|
||||
// default, we always use the crypto/rand reader, but the caller is able to
|
||||
// specify their own generation, which can be useful for deterministic tests.
|
||||
func (c *cryptoRandAdapter) Read(p []byte) (n int, err error) {
|
||||
return rand.Read(p)
|
||||
}
|
||||
|
||||
// defaultNonceGenOpts returns the default set of nonce generation options.
|
||||
func defaultNonceGenOpts() *nonceGenOpts {
|
||||
return &nonceGenOpts{randReader: &cryptoRandAdapter{}}
|
||||
}
|
||||
|
||||
// WithCustomRand allows a caller to use a custom random number generator in
|
||||
// place for crypto/rand. This should only really be used to generate
|
||||
// determinstic tests.
|
||||
func WithCustomRand(r io.Reader) NonceGenOption {
|
||||
return func(o *nonceGenOpts) { o.randReader = r }
|
||||
}
|
||||
|
||||
// WithPublicKey is the mandatory public key that will be mixed into the nonce
|
||||
// generation.
|
||||
func WithPublicKey(pubKey *btcec.PublicKey) NonceGenOption {
|
||||
return func(o *nonceGenOpts) {
|
||||
o.publicKey = pubKey.SerializeCompressed()
|
||||
}
|
||||
}
|
||||
|
||||
// WithNonceSecretKeyAux allows a caller to optionally specify a secret key
|
||||
// that should be used to augment the randomness used to generate the nonces.
|
||||
func WithNonceSecretKeyAux(secKey *btcec.SecretKey) NonceGenOption {
|
||||
return func(o *nonceGenOpts) { o.secretKey = secKey.Serialize() }
|
||||
}
|
||||
|
||||
var WithNoncePrivateKeyAux = WithNonceSecretKeyAux
|
||||
|
||||
// WithNonceCombinedKeyAux allows a caller to optionally specify the combined
|
||||
// key used in this signing session to further augment the randomness used to
|
||||
// generate nonces.
|
||||
func WithNonceCombinedKeyAux(combinedKey *btcec.PublicKey) NonceGenOption {
|
||||
return func(o *nonceGenOpts) {
|
||||
o.combinedKey = schnorr.SerializePubKey(combinedKey)
|
||||
}
|
||||
}
|
||||
|
||||
// WithNonceMessageAux allows a caller to optionally specify a message to be
|
||||
// mixed into the randomness generated to create the nonce.
|
||||
func WithNonceMessageAux(msg [32]byte) NonceGenOption {
|
||||
return func(o *nonceGenOpts) { o.msg = msg[:] }
|
||||
}
|
||||
|
||||
// WithNonceAuxInput is a set of auxiliary randomness, similar to BIP 340 that
|
||||
// can be used to further augment the nonce generation process.
|
||||
func WithNonceAuxInput(aux []byte) NonceGenOption {
|
||||
return func(o *nonceGenOpts) { o.auxInput = aux }
|
||||
}
|
||||
|
||||
// withCustomOptions allows a caller to pass a complete set of custom
|
||||
// nonceGenOpts, without needing to create custom and checked structs such as
|
||||
// *btcec.SecretKey. This is mainly used to match the testcases provided by
|
||||
// the MuSig2 BIP.
|
||||
func withCustomOptions(customOpts nonceGenOpts) NonceGenOption {
|
||||
return func(o *nonceGenOpts) {
|
||||
o.randReader = customOpts.randReader
|
||||
o.secretKey = customOpts.secretKey
|
||||
o.combinedKey = customOpts.combinedKey
|
||||
o.auxInput = customOpts.auxInput
|
||||
o.msg = customOpts.msg
|
||||
o.publicKey = customOpts.publicKey
|
||||
}
|
||||
}
|
||||
|
||||
// lengthWriter is a function closure that allows a caller to control how the
|
||||
// length prefix of a byte slice is written.
|
||||
//
|
||||
// TODO(roasbeef): use type params once we bump repo version
|
||||
type lengthWriter func(w io.Writer, b []byte) error
|
||||
|
||||
// uint8Writer is an implementation of lengthWriter that writes the length of
|
||||
// the byte slice using 1 byte.
|
||||
func uint8Writer(w io.Writer, b []byte) error {
|
||||
return binary.Write(w, byteOrder, uint8(len(b)))
|
||||
}
|
||||
|
||||
// uint32Writer is an implementation of lengthWriter that writes the length of
|
||||
// the byte slice using 4 bytes.
|
||||
func uint32Writer(w io.Writer, b []byte) error {
|
||||
return binary.Write(w, byteOrder, uint32(len(b)))
|
||||
}
|
||||
|
||||
// uint32Writer is an implementation of lengthWriter that writes the length of
|
||||
// the byte slice using 8 bytes.
|
||||
func uint64Writer(w io.Writer, b []byte) error {
|
||||
return binary.Write(w, byteOrder, uint64(len(b)))
|
||||
}
|
||||
|
||||
// writeBytesPrefix is used to write out: len(b) || b, to the passed io.Writer.
|
||||
// The lengthWriter function closure is used to allow the caller to specify the
|
||||
// precise byte packing of the length.
|
||||
func writeBytesPrefix(w io.Writer, b []byte, lenWriter lengthWriter) error {
|
||||
// Write out the length of the byte first, followed by the set of bytes
|
||||
// itself.
|
||||
if err := lenWriter(w, b); chk.T(err) {
|
||||
return err
|
||||
}
|
||||
if _, err := w.Write(b); chk.T(err) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// genNonceAuxBytes writes out the full byte string used to derive a secret
|
||||
// nonce based on some initial randomness as well as the series of optional
|
||||
// fields. The byte string used for derivation is:
|
||||
// - tagged_hash("MuSig/nonce", rand || len(pk) || pk ||
|
||||
// len(aggpk) || aggpk || m_prefixed || len(in) || in || i).
|
||||
//
|
||||
// where i is the ith secret nonce being generated and m_prefixed is:
|
||||
// - bytes(1, 0) if the message is blank
|
||||
// - bytes(1, 1) || bytes(8, len(m)) || m if the message is present.
|
||||
func genNonceAuxBytes(
|
||||
rand []byte, pubkey []byte, i int,
|
||||
opts *nonceGenOpts,
|
||||
) (*chainhash.Hash, error) {
|
||||
|
||||
var w bytes.Buffer
|
||||
// First, write out the randomness generated in the prior step.
|
||||
if _, err := w.Write(rand); chk.T(err) {
|
||||
return nil, err
|
||||
}
|
||||
// Next, we'll write out: len(pk) || pk
|
||||
err := writeBytesPrefix(&w, pubkey, uint8Writer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Next, we'll write out: len(aggpk) || aggpk.
|
||||
err = writeBytesPrefix(&w, opts.combinedKey, uint8Writer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch {
|
||||
// If the message isn't present, then we'll just write out a single
|
||||
// uint8 of a zero byte: m_prefixed = bytes(1, 0).
|
||||
case opts.msg == nil:
|
||||
if _, err := w.Write([]byte{0x00}); chk.T(err) {
|
||||
return nil, err
|
||||
}
|
||||
// Otherwise, we'll write a single byte of 0x01 with a 1 byte length
|
||||
// prefix, followed by the message itself with an 8 byte length prefix:
|
||||
// m_prefixed = bytes(1, 1) || bytes(8, len(m)) || m.
|
||||
case len(opts.msg) == 0:
|
||||
fallthrough
|
||||
default:
|
||||
if _, err := w.Write([]byte{0x01}); chk.T(err) {
|
||||
return nil, err
|
||||
}
|
||||
err = writeBytesPrefix(&w, opts.msg, uint64Writer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
// Finally we'll write out the auxiliary input.
|
||||
err = writeBytesPrefix(&w, opts.auxInput, uint32Writer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Next we'll write out the interaction/index number which will
|
||||
// uniquely generate two nonces given the rest of the possibly static
|
||||
// parameters.
|
||||
if err := binary.Write(&w, byteOrder, uint8(i)); chk.T(err) {
|
||||
return nil, err
|
||||
}
|
||||
// With the message buffer complete, we'll now derive the tagged hash
|
||||
// using our set of params.
|
||||
return chainhash.TaggedHash(NonceGenTag, w.Bytes()), nil
|
||||
}
|
||||
|
||||
// GenNonces generates the secret nonces, as well as the public nonces which
|
||||
// correspond to an EC point generated using the secret nonce as a secret key.
|
||||
func GenNonces(options ...NonceGenOption) (*Nonces, error) {
|
||||
opts := defaultNonceGenOpts()
|
||||
for _, opt := range options {
|
||||
opt(opts)
|
||||
}
|
||||
// We require the pubkey option.
|
||||
if opts.publicKey == nil || len(opts.publicKey) != 33 {
|
||||
return nil, ErrPubkeyInvalid
|
||||
}
|
||||
// First, we'll start out by generating 32 random bytes drawn from our
|
||||
// CSPRNG.
|
||||
var randBytes [32]byte
|
||||
if _, err := opts.randReader.Read(randBytes[:]); chk.T(err) {
|
||||
return nil, err
|
||||
}
|
||||
// If the options contain a secret key, we XOR it with with the tagged
|
||||
// random bytes.
|
||||
if len(opts.secretKey) == 32 {
|
||||
taggedHash := chainhash.TaggedHash(NonceAuxTag, randBytes[:])
|
||||
|
||||
for i := 0; i < chainhash.HashSize; i++ {
|
||||
randBytes[i] = opts.secretKey[i] ^ taggedHash[i]
|
||||
}
|
||||
}
|
||||
// Using our randomness, pubkey and the set of optional params, generate our
|
||||
// two secret nonces: k1 and k2.
|
||||
k1, err := genNonceAuxBytes(randBytes[:], opts.publicKey, 0, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
k2, err := genNonceAuxBytes(randBytes[:], opts.publicKey, 1, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var k1Mod, k2Mod btcec.ModNScalar
|
||||
k1Mod.SetBytes((*[32]byte)(k1))
|
||||
k2Mod.SetBytes((*[32]byte)(k2))
|
||||
// The secret nonces are serialized as the concatenation of the two 32
|
||||
// byte secret nonce values and the pubkey.
|
||||
var nonces Nonces
|
||||
k1Mod.PutBytesUnchecked(nonces.SecNonce[:])
|
||||
k2Mod.PutBytesUnchecked(nonces.SecNonce[btcec.SecKeyBytesLen:])
|
||||
copy(nonces.SecNonce[btcec.SecKeyBytesLen*2:], opts.publicKey)
|
||||
// Next, we'll generate R_1 = k_1*G and R_2 = k_2*G. Along the way we
|
||||
// need to map our nonce values into mod n scalars so we can work with
|
||||
// the btcec API.
|
||||
nonces.PubNonce = secNonceToPubNonce(nonces.SecNonce)
|
||||
return &nonces, nil
|
||||
}
|
||||
|
||||
// AggregateNonces aggregates the set of a pair of public nonces for each party
|
||||
// into a single aggregated nonces to be used for multi-signing.
|
||||
func AggregateNonces(pubNonces [][PubNonceSize]byte) (
|
||||
[PubNonceSize]byte,
|
||||
error,
|
||||
) {
|
||||
|
||||
// combineNonces is a helper function that aggregates (adds) up a
|
||||
// series of nonces encoded in compressed format. It uses a slicing
|
||||
// function to extra 33 bytes at a time from the packed 2x public
|
||||
// nonces.
|
||||
type nonceSlicer func([PubNonceSize]byte) []byte
|
||||
combineNonces := func(slicer nonceSlicer) (btcec.JacobianPoint, error) {
|
||||
// Convert the set of nonces into jacobian coordinates we can
|
||||
// use to accumulate them all into each other.
|
||||
pubNonceJs := make([]*btcec.JacobianPoint, len(pubNonces))
|
||||
for i, pubNonceBytes := range pubNonces {
|
||||
// Using the slicer, extract just the bytes we need to
|
||||
// decode.
|
||||
var nonceJ btcec.JacobianPoint
|
||||
nonceJ, err := btcec.ParseJacobian(slicer(pubNonceBytes))
|
||||
if err != nil {
|
||||
return btcec.JacobianPoint{}, err
|
||||
}
|
||||
pubNonceJs[i] = &nonceJ
|
||||
}
|
||||
// Now that we have the set of complete nonces, we'll aggregate
|
||||
// them: R = R_i + R_i+1 + ... + R_i+n.
|
||||
var aggregateNonce btcec.JacobianPoint
|
||||
for _, pubNonceJ := range pubNonceJs {
|
||||
btcec.AddNonConst(
|
||||
&aggregateNonce, pubNonceJ, &aggregateNonce,
|
||||
)
|
||||
}
|
||||
aggregateNonce.ToAffine()
|
||||
return aggregateNonce, nil
|
||||
}
|
||||
// The final nonce public nonce is actually two nonces, one that
|
||||
// aggregate the first nonce of all the parties, and the other that
|
||||
// aggregates the second nonce of all the parties.
|
||||
var finalNonce [PubNonceSize]byte
|
||||
combinedNonce1, err := combineNonces(
|
||||
func(n [PubNonceSize]byte) []byte {
|
||||
return n[:btcec.PubKeyBytesLenCompressed]
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return finalNonce, err
|
||||
}
|
||||
combinedNonce2, err := combineNonces(
|
||||
func(n [PubNonceSize]byte) []byte {
|
||||
return n[btcec.PubKeyBytesLenCompressed:]
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return finalNonce, err
|
||||
}
|
||||
copy(finalNonce[:], btcec.JacobianToByteSlice(combinedNonce1))
|
||||
copy(
|
||||
finalNonce[btcec.PubKeyBytesLenCompressed:],
|
||||
btcec.JacobianToByteSlice(combinedNonce2),
|
||||
)
|
||||
return finalNonce, nil
|
||||
}
|
||||
151
pkg/crypto/ec/musig2/nonces_test.go
Normal file
151
pkg/crypto/ec/musig2/nonces_test.go
Normal file
@@ -0,0 +1,151 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type nonceGenTestCase struct {
|
||||
Rand string `json:"rand_"`
|
||||
Sk string `json:"sk"`
|
||||
AggPk string `json:"aggpk"`
|
||||
Msg *string `json:"msg"`
|
||||
ExtraIn string `json:"extra_in"`
|
||||
Pk string `json:"pk"`
|
||||
Expected string `json:"expected"`
|
||||
}
|
||||
|
||||
type nonceGenTestCases struct {
|
||||
TestCases []nonceGenTestCase `json:"test_cases"`
|
||||
}
|
||||
|
||||
const (
|
||||
nonceGenTestVectorsFileName = "nonce_gen_vectors.json"
|
||||
nonceAggTestVectorsFileName = "nonce_agg_vectors.json"
|
||||
)
|
||||
|
||||
// TestMusig2NonceGenTestVectors tests the nonce generation function with the
|
||||
// testvectors defined in the Musig2 BIP.
|
||||
func TestMusig2NonceGenTestVectors(t *testing.T) {
|
||||
t.Parallel()
|
||||
testVectorPath := path.Join(
|
||||
testVectorBaseDir, nonceGenTestVectorsFileName,
|
||||
)
|
||||
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||
require.NoError(t, err)
|
||||
var testCases nonceGenTestCases
|
||||
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||
for i, testCase := range testCases.TestCases {
|
||||
testCase := testCase
|
||||
customOpts := nonceGenOpts{
|
||||
randReader: &memsetRandReader{i: 0},
|
||||
secretKey: mustParseHex(testCase.Sk),
|
||||
combinedKey: mustParseHex(testCase.AggPk),
|
||||
auxInput: mustParseHex(testCase.ExtraIn),
|
||||
publicKey: mustParseHex(testCase.Pk),
|
||||
}
|
||||
if testCase.Msg != nil {
|
||||
customOpts.msg = mustParseHex(*testCase.Msg)
|
||||
}
|
||||
t.Run(
|
||||
fmt.Sprintf("test_case=%v", i), func(t *testing.T) {
|
||||
nonce, err := GenNonces(withCustomOptions(customOpts))
|
||||
if err != nil {
|
||||
t.Fatalf("err gen nonce aux bytes %v", err)
|
||||
}
|
||||
expectedBytes, _ := hex.Dec(testCase.Expected)
|
||||
if !utils.FastEqual(nonce.SecNonce[:], expectedBytes) {
|
||||
t.Fatalf(
|
||||
"nonces don't match: expected %x, got %x",
|
||||
expectedBytes, nonce.SecNonce[:],
|
||||
)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
type nonceAggError struct {
|
||||
Type string `json:"type"`
|
||||
Signer int `json:"signer"`
|
||||
Contrib string `json:"contrib"`
|
||||
}
|
||||
|
||||
type nonceAggValidCase struct {
|
||||
Indices []int `json:"pnonce_indices"`
|
||||
Expected string `json:"expected"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type nonceAggInvalidCase struct {
|
||||
Indices []int `json:"pnonce_indices"`
|
||||
Error nonceAggError `json:"error"`
|
||||
Comment string `json:"comment"`
|
||||
ExpectedErr string `json:"btcec_err"`
|
||||
}
|
||||
|
||||
type nonceAggTestCases struct {
|
||||
Nonces []string `json:"pnonces"`
|
||||
ValidCases []nonceAggValidCase `json:"valid_test_cases"`
|
||||
InvalidCases []nonceAggInvalidCase `json:"error_test_cases"`
|
||||
}
|
||||
|
||||
// TestMusig2AggregateNoncesTestVectors tests that the musig2 implementation
|
||||
// passes the nonce aggregration test vectors for musig2 1.0.
|
||||
func TestMusig2AggregateNoncesTestVectors(t *testing.T) {
|
||||
t.Parallel()
|
||||
testVectorPath := path.Join(
|
||||
testVectorBaseDir, nonceAggTestVectorsFileName,
|
||||
)
|
||||
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||
require.NoError(t, err)
|
||||
var testCases nonceAggTestCases
|
||||
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||
nonces := make([][PubNonceSize]byte, len(testCases.Nonces))
|
||||
for i := range testCases.Nonces {
|
||||
var nonce [PubNonceSize]byte
|
||||
copy(nonce[:], mustParseHex(testCases.Nonces[i]))
|
||||
nonces[i] = nonce
|
||||
}
|
||||
for i, testCase := range testCases.ValidCases {
|
||||
testCase := testCase
|
||||
var testNonces [][PubNonceSize]byte
|
||||
for _, idx := range testCase.Indices {
|
||||
testNonces = append(testNonces, nonces[idx])
|
||||
}
|
||||
t.Run(
|
||||
fmt.Sprintf("valid_case=%v", i), func(t *testing.T) {
|
||||
aggregatedNonce, err := AggregateNonces(testNonces)
|
||||
require.NoError(t, err)
|
||||
var expectedNonce [PubNonceSize]byte
|
||||
copy(expectedNonce[:], mustParseHex(testCase.Expected))
|
||||
require.Equal(t, aggregatedNonce[:], expectedNonce[:])
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
for i, testCase := range testCases.InvalidCases {
|
||||
var testNonces [][PubNonceSize]byte
|
||||
for _, idx := range testCase.Indices {
|
||||
testNonces = append(testNonces, nonces[idx])
|
||||
}
|
||||
t.Run(
|
||||
fmt.Sprintf("invalid_case=%v", i), func(t *testing.T) {
|
||||
_, err := AggregateNonces(testNonces)
|
||||
require.True(t, err != nil)
|
||||
require.Equal(t, testCase.ExpectedErr, err.Error())
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
768
pkg/crypto/ec/musig2/sign.go
Normal file
768
pkg/crypto/ec/musig2/sign.go
Normal file
@@ -0,0 +1,768 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/chainhash"
|
||||
"next.orly.dev/pkg/crypto/ec/schnorr"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
var (
|
||||
// NonceBlindTag is that tag used to construct the value b, which
|
||||
// blinds the second public nonce of each party.
|
||||
NonceBlindTag = []byte("MuSig/noncecoef")
|
||||
|
||||
// ChallengeHashTag is the tag used to construct the challenge hash
|
||||
ChallengeHashTag = []byte("BIP0340/challenge")
|
||||
|
||||
// ErrNoncePointAtInfinity is returned if during signing, the fully
|
||||
// combined public nonce is the point at infinity.
|
||||
ErrNoncePointAtInfinity = fmt.Errorf(
|
||||
"signing nonce is the infinity " +
|
||||
"point",
|
||||
)
|
||||
|
||||
// ErrSecKeyZero is returned when the secret key for signing is
|
||||
// actually zero.
|
||||
ErrSecKeyZero = fmt.Errorf("priv key is zero")
|
||||
|
||||
// ErrPartialSigInvalid is returned when a partial is found to be
|
||||
// invalid.
|
||||
ErrPartialSigInvalid = fmt.Errorf("partial signature is invalid")
|
||||
|
||||
// ErrSecretNonceZero is returned when a secret nonce is passed in a
|
||||
// zero.
|
||||
ErrSecretNonceZero = fmt.Errorf("secret nonce is blank")
|
||||
|
||||
// ErrSecNoncePubkey is returned when the signing key does not match the
|
||||
// sec nonce pubkey
|
||||
ErrSecNoncePubkey = fmt.Errorf("public key does not match secnonce")
|
||||
|
||||
// ErrPubkeyNotIncluded is returned when the signers pubkey is not included
|
||||
// in the list of pubkeys.
|
||||
ErrPubkeyNotIncluded = fmt.Errorf(
|
||||
"signer's pubkey must be included" +
|
||||
" in the list of pubkeys",
|
||||
)
|
||||
)
|
||||
|
||||
// infinityPoint is the jacobian representation of the point at infinity.
|
||||
var infinityPoint btcec.JacobianPoint
|
||||
|
||||
// PartialSignature reprints a partial (s-only) musig2 multi-signature. This
|
||||
// isn't a valid schnorr signature by itself, as it needs to be aggregated
|
||||
// along with the other partial signatures to be completed.
|
||||
type PartialSignature struct {
|
||||
S *btcec.ModNScalar
|
||||
|
||||
R *btcec.PublicKey
|
||||
}
|
||||
|
||||
// NewPartialSignature returns a new instances of the partial sig struct.
|
||||
func NewPartialSignature(
|
||||
s *btcec.ModNScalar,
|
||||
r *btcec.PublicKey,
|
||||
) PartialSignature {
|
||||
|
||||
return PartialSignature{
|
||||
S: s,
|
||||
R: r,
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes a serialized version of the partial signature to the passed
|
||||
// io.Writer
|
||||
func (p *PartialSignature) Encode(w io.Writer) error {
|
||||
var sBytes [32]byte
|
||||
p.S.PutBytes(&sBytes)
|
||||
|
||||
if _, err := w.Write(sBytes[:]); chk.T(err) {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Decode attempts to parse a serialized PartialSignature stored in the io reader.
|
||||
func (p *PartialSignature) Decode(r io.Reader) error {
|
||||
p.S = new(btcec.ModNScalar)
|
||||
|
||||
var sBytes [32]byte
|
||||
if _, err := io.ReadFull(r, sBytes[:]); chk.T(err) {
|
||||
return nil
|
||||
}
|
||||
|
||||
overflows := p.S.SetBytes(&sBytes)
|
||||
if overflows == 1 {
|
||||
return ErrPartialSigInvalid
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// SignOption is a functional option argument that allows callers to modify the
|
||||
// way we generate musig2 schnorr signatures.
|
||||
type SignOption func(*signOptions)
|
||||
|
||||
// signOptions houses the set of functional options that can be used to modify
|
||||
// the method used to generate the musig2 partial signature.
|
||||
type signOptions struct {
|
||||
// fastSign determines if we'll skip the check at the end of the
|
||||
// routine where we attempt to verify the produced signature.
|
||||
fastSign bool
|
||||
|
||||
// sortKeys determines if the set of keys should be sorted before doing
|
||||
// key aggregation.
|
||||
sortKeys bool
|
||||
|
||||
// tweaks specifies a series of tweaks to be applied to the aggregated
|
||||
// public key, which also partially carries over into the signing
|
||||
// process.
|
||||
tweaks []KeyTweakDesc
|
||||
|
||||
// taprootTweak specifies a taproot specific tweak. of the tweaks
|
||||
// specified above. Normally we'd just apply the raw 32 byte tweak, but
|
||||
// for taproot, we first need to compute the aggregated key before
|
||||
// tweaking, and then use it as the internal key. This is required as
|
||||
// the taproot tweak also commits to the public key, which in this case
|
||||
// is the aggregated key before the tweak.
|
||||
taprootTweak []byte
|
||||
|
||||
// bip86Tweak specifies that the taproot tweak should be done in a BIP
|
||||
// 86 style, where we don't expect an actual tweak and instead just
|
||||
// commit to the public key itself.
|
||||
bip86Tweak bool
|
||||
}
|
||||
|
||||
// defaultSignOptions returns the default set of signing operations.
|
||||
func defaultSignOptions() *signOptions {
|
||||
return &signOptions{}
|
||||
}
|
||||
|
||||
// WithFastSign forces signing to skip the extra verification step at the end.
|
||||
// Performance sensitive applications may opt to use this option to speed up
|
||||
// the signing operation.
|
||||
func WithFastSign() SignOption {
|
||||
return func(o *signOptions) {
|
||||
o.fastSign = true
|
||||
}
|
||||
}
|
||||
|
||||
// WithSortedKeys determines if the set of signing public keys are to be sorted
|
||||
// or not before doing key aggregation.
|
||||
func WithSortedKeys() SignOption {
|
||||
return func(o *signOptions) {
|
||||
o.sortKeys = true
|
||||
}
|
||||
}
|
||||
|
||||
// WithTweaks determines if the aggregated public key used should apply a
|
||||
// series of tweaks before key aggregation.
|
||||
func WithTweaks(tweaks ...KeyTweakDesc) SignOption {
|
||||
return func(o *signOptions) {
|
||||
o.tweaks = tweaks
|
||||
}
|
||||
}
|
||||
|
||||
// WithTaprootSignTweak allows a caller to specify a tweak that should be used
|
||||
// in a bip 340 manner when signing. This differs from WithTweaks as the tweak
|
||||
// will be assumed to always be x-only and the intermediate aggregate key
|
||||
// before tweaking will be used to generate part of the tweak (as the taproot
|
||||
// tweak also commits to the internal key).
|
||||
//
|
||||
// This option should be used in the taproot context to create a valid
|
||||
// signature for the keypath spend for taproot, when the output key is actually
|
||||
// committing to a script path, or some other data.
|
||||
func WithTaprootSignTweak(scriptRoot []byte) SignOption {
|
||||
return func(o *signOptions) {
|
||||
o.taprootTweak = scriptRoot
|
||||
}
|
||||
}
|
||||
|
||||
// WithBip86SignTweak allows a caller to specify a tweak that should be used in
|
||||
// a bip 340 manner when signing, factoring in BIP 86 as well. This differs
|
||||
// from WithTaprootSignTweak as no true script root will be committed to,
|
||||
// instead we just commit to the internal key.
|
||||
//
|
||||
// This option should be used in the taproot context to create a valid
|
||||
// signature for the keypath spend for taproot, when the output key was
|
||||
// generated using BIP 86.
|
||||
func WithBip86SignTweak() SignOption {
|
||||
return func(o *signOptions) {
|
||||
o.bip86Tweak = true
|
||||
}
|
||||
}
|
||||
|
||||
// computeSigningNonce calculates the final nonce used for signing. This will
|
||||
// be the R value used in the final signature.
|
||||
func computeSigningNonce(
|
||||
combinedNonce [PubNonceSize]byte,
|
||||
combinedKey *btcec.PublicKey, msg [32]byte,
|
||||
) (
|
||||
*btcec.JacobianPoint, *btcec.ModNScalar, error,
|
||||
) {
|
||||
|
||||
// Next we'll compute the value b, that blinds our second public
|
||||
// nonce:
|
||||
// * b = h(tag=NonceBlindTag, combinedNonce || combinedKey || m).
|
||||
var (
|
||||
nonceMsgBuf bytes.Buffer
|
||||
nonceBlinder btcec.ModNScalar
|
||||
)
|
||||
nonceMsgBuf.Write(combinedNonce[:])
|
||||
nonceMsgBuf.Write(schnorr.SerializePubKey(combinedKey))
|
||||
nonceMsgBuf.Write(msg[:])
|
||||
nonceBlindHash := chainhash.TaggedHash(
|
||||
NonceBlindTag, nonceMsgBuf.Bytes(),
|
||||
)
|
||||
nonceBlinder.SetByteSlice(nonceBlindHash[:])
|
||||
|
||||
// Next, we'll parse the public nonces into R1 and R2.
|
||||
r1J, err := btcec.ParseJacobian(
|
||||
combinedNonce[:btcec.PubKeyBytesLenCompressed],
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
r2J, err := btcec.ParseJacobian(
|
||||
combinedNonce[btcec.PubKeyBytesLenCompressed:],
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// With our nonce blinding value, we'll now combine both the public
|
||||
// nonces, using the blinding factor to tweak the second nonce:
|
||||
// * R = R_1 + b*R_2
|
||||
var nonce btcec.JacobianPoint
|
||||
btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J)
|
||||
btcec.AddNonConst(&r1J, &r2J, &nonce)
|
||||
|
||||
// If the combined nonce is the point at infinity, we'll use the
|
||||
// generator point instead.
|
||||
if nonce == infinityPoint {
|
||||
G := btcec.Generator()
|
||||
G.AsJacobian(&nonce)
|
||||
}
|
||||
|
||||
return &nonce, &nonceBlinder, nil
|
||||
}
|
||||
|
||||
// Sign generates a musig2 partial signature given the passed key set, secret
|
||||
// nonce, public nonce, and secret keys. This method returns an error if the
|
||||
// generated nonces are either too large, or end up mapping to the point at
|
||||
// infinity.
|
||||
func Sign(
|
||||
secNonce [SecNonceSize]byte, privKey *btcec.SecretKey,
|
||||
combinedNonce [PubNonceSize]byte, pubKeys []*btcec.PublicKey,
|
||||
msg [32]byte, signOpts ...SignOption,
|
||||
) (*PartialSignature, error) {
|
||||
|
||||
// First, parse the set of optional signing options.
|
||||
opts := defaultSignOptions()
|
||||
for _, option := range signOpts {
|
||||
option(opts)
|
||||
}
|
||||
|
||||
// Check that our signing key belongs to the secNonce
|
||||
if !utils.FastEqual(
|
||||
secNonce[btcec.SecKeyBytesLen*2:],
|
||||
privKey.PubKey().SerializeCompressed(),
|
||||
) {
|
||||
|
||||
return nil, ErrSecNoncePubkey
|
||||
}
|
||||
|
||||
// Check that the key set contains the public key to our secret key.
|
||||
var containsSecKey bool
|
||||
for _, pk := range pubKeys {
|
||||
if privKey.PubKey().IsEqual(pk) {
|
||||
containsSecKey = true
|
||||
}
|
||||
}
|
||||
|
||||
if !containsSecKey {
|
||||
return nil, ErrPubkeyNotIncluded
|
||||
}
|
||||
|
||||
// Compute the hash of all the keys here as we'll need it do aggregate
|
||||
// the keys and also at the final step of signing.
|
||||
keysHash := keyHashFingerprint(pubKeys, opts.sortKeys)
|
||||
uniqueKeyIndex := secondUniqueKeyIndex(pubKeys, opts.sortKeys)
|
||||
|
||||
keyAggOpts := []KeyAggOption{
|
||||
WithKeysHash(keysHash), WithUniqueKeyIndex(uniqueKeyIndex),
|
||||
}
|
||||
switch {
|
||||
case opts.bip86Tweak:
|
||||
keyAggOpts = append(
|
||||
keyAggOpts, WithBIP86KeyTweak(),
|
||||
)
|
||||
case opts.taprootTweak != nil:
|
||||
keyAggOpts = append(
|
||||
keyAggOpts, WithTaprootKeyTweak(opts.taprootTweak),
|
||||
)
|
||||
case len(opts.tweaks) != 0:
|
||||
keyAggOpts = append(keyAggOpts, WithKeyTweaks(opts.tweaks...))
|
||||
}
|
||||
|
||||
// Next we'll construct the aggregated public key based on the set of
|
||||
// signers.
|
||||
combinedKey, parityAcc, _, err := AggregateKeys(
|
||||
pubKeys, opts.sortKeys, keyAggOpts...,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// We'll now combine both the public nonces, using the blinding factor
|
||||
// to tweak the second nonce:
|
||||
// * R = R_1 + b*R_2
|
||||
nonce, nonceBlinder, err := computeSigningNonce(
|
||||
combinedNonce, combinedKey.FinalKey, msg,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Next we'll parse out our two secret nonces, which we'll be using in
|
||||
// the core signing process below.
|
||||
var k1, k2 btcec.ModNScalar
|
||||
k1.SetByteSlice(secNonce[:btcec.SecKeyBytesLen])
|
||||
k2.SetByteSlice(secNonce[btcec.SecKeyBytesLen:])
|
||||
|
||||
if k1.IsZero() || k2.IsZero() {
|
||||
return nil, ErrSecretNonceZero
|
||||
}
|
||||
|
||||
nonce.ToAffine()
|
||||
|
||||
nonceKey := btcec.NewPublicKey(&nonce.X, &nonce.Y)
|
||||
|
||||
// If the nonce R has an odd y coordinate, then we'll negate both our
|
||||
// secret nonces.
|
||||
if nonce.Y.IsOdd() {
|
||||
k1.Negate()
|
||||
k2.Negate()
|
||||
}
|
||||
|
||||
privKeyScalar := privKey.Key
|
||||
if privKeyScalar.IsZero() {
|
||||
return nil, ErrSecKeyZero
|
||||
}
|
||||
|
||||
pubKey := privKey.PubKey()
|
||||
combinedKeyYIsOdd := func() bool {
|
||||
combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed()
|
||||
return combinedKeyBytes[0] == secp256k1.PubKeyFormatCompressedOdd
|
||||
}()
|
||||
|
||||
// Next we'll compute the two parity factors for Q, the combined key.
|
||||
// If the key is odd, then we'll negate it.
|
||||
parityCombinedKey := new(btcec.ModNScalar).SetInt(1)
|
||||
if combinedKeyYIsOdd {
|
||||
parityCombinedKey.Negate()
|
||||
}
|
||||
|
||||
// Before we sign below, we'll multiply by our various parity factors
|
||||
// to ensure that the signing key is properly negated (if necessary):
|
||||
// * d = g⋅gacc⋅d'
|
||||
privKeyScalar.Mul(parityCombinedKey).Mul(parityAcc)
|
||||
|
||||
// Next we'll create the challenge hash that commits to the combined
|
||||
// nonce, combined public key and also the message:
|
||||
// * e = H(tag=ChallengeHashTag, R || Q || m) mod n
|
||||
var challengeMsg bytes.Buffer
|
||||
challengeMsg.Write(schnorr.SerializePubKey(nonceKey))
|
||||
challengeMsg.Write(schnorr.SerializePubKey(combinedKey.FinalKey))
|
||||
challengeMsg.Write(msg[:])
|
||||
challengeBytes := chainhash.TaggedHash(
|
||||
ChallengeHashTag, challengeMsg.Bytes(),
|
||||
)
|
||||
var e btcec.ModNScalar
|
||||
e.SetByteSlice(challengeBytes[:])
|
||||
|
||||
// Next, we'll compute a, our aggregation coefficient for the key that
|
||||
// we're signing with.
|
||||
a := aggregationCoefficient(pubKeys, pubKey, keysHash, uniqueKeyIndex)
|
||||
|
||||
// With mu constructed, we can finally generate our partial signature
|
||||
// as: s = (k1_1 + b*k_2 + e*a*d) mod n.
|
||||
s := new(btcec.ModNScalar)
|
||||
s.Add(&k1).Add(k2.Mul(nonceBlinder)).Add(e.Mul(a).Mul(&privKeyScalar))
|
||||
|
||||
sig := NewPartialSignature(s, nonceKey)
|
||||
|
||||
// If we're not in fast sign mode, then we'll also validate our partial
|
||||
// signature.
|
||||
if !opts.fastSign {
|
||||
pubNonce := secNonceToPubNonce(secNonce)
|
||||
sigValid := sig.Verify(
|
||||
pubNonce, combinedNonce, pubKeys, pubKey, msg,
|
||||
signOpts...,
|
||||
)
|
||||
if !sigValid {
|
||||
return nil, fmt.Errorf("sig is invalid!")
|
||||
}
|
||||
}
|
||||
|
||||
return &sig, nil
|
||||
}
|
||||
|
||||
// Verify implements partial signature verification given the public nonce for
|
||||
// the signer, aggregate nonce, signer set and finally the message being
|
||||
// signed.
|
||||
func (p *PartialSignature) Verify(
|
||||
pubNonce [PubNonceSize]byte,
|
||||
combinedNonce [PubNonceSize]byte, keySet []*btcec.PublicKey,
|
||||
signingKey *btcec.PublicKey, msg [32]byte, signOpts ...SignOption,
|
||||
) bool {
|
||||
|
||||
pubKey := signingKey.SerializeCompressed()
|
||||
|
||||
return verifyPartialSig(
|
||||
p, pubNonce, combinedNonce, keySet, pubKey, msg, signOpts...,
|
||||
) == nil
|
||||
}
|
||||
|
||||
// verifyPartialSig attempts to verify a partial schnorr signature given the
|
||||
// necessary parameters. This is the internal version of Verify that returns
|
||||
// detailed errors. signed.
|
||||
func verifyPartialSig(
|
||||
partialSig *PartialSignature, pubNonce [PubNonceSize]byte,
|
||||
combinedNonce [PubNonceSize]byte, keySet []*btcec.PublicKey,
|
||||
pubKey []byte, msg [32]byte, signOpts ...SignOption,
|
||||
) error {
|
||||
|
||||
opts := defaultSignOptions()
|
||||
for _, option := range signOpts {
|
||||
option(opts)
|
||||
}
|
||||
|
||||
// First we'll map the internal partial signature back into something
|
||||
// we can manipulate.
|
||||
s := partialSig.S
|
||||
|
||||
// Next we'll parse out the two public nonces into something we can
|
||||
// use.
|
||||
//
|
||||
// Compute the hash of all the keys here as we'll need it do aggregate
|
||||
// the keys and also at the final step of verification.
|
||||
keysHash := keyHashFingerprint(keySet, opts.sortKeys)
|
||||
uniqueKeyIndex := secondUniqueKeyIndex(keySet, opts.sortKeys)
|
||||
|
||||
keyAggOpts := []KeyAggOption{
|
||||
WithKeysHash(keysHash), WithUniqueKeyIndex(uniqueKeyIndex),
|
||||
}
|
||||
switch {
|
||||
case opts.bip86Tweak:
|
||||
keyAggOpts = append(
|
||||
keyAggOpts, WithBIP86KeyTweak(),
|
||||
)
|
||||
case opts.taprootTweak != nil:
|
||||
keyAggOpts = append(
|
||||
keyAggOpts, WithTaprootKeyTweak(opts.taprootTweak),
|
||||
)
|
||||
case len(opts.tweaks) != 0:
|
||||
keyAggOpts = append(keyAggOpts, WithKeyTweaks(opts.tweaks...))
|
||||
}
|
||||
|
||||
// Next we'll construct the aggregated public key based on the set of
|
||||
// signers.
|
||||
combinedKey, parityAcc, _, err := AggregateKeys(
|
||||
keySet, opts.sortKeys, keyAggOpts...,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Next we'll compute the value b, that blinds our second public
|
||||
// nonce:
|
||||
// * b = h(tag=NonceBlindTag, combinedNonce || combinedKey || m).
|
||||
var (
|
||||
nonceMsgBuf bytes.Buffer
|
||||
nonceBlinder btcec.ModNScalar
|
||||
)
|
||||
nonceMsgBuf.Write(combinedNonce[:])
|
||||
nonceMsgBuf.Write(schnorr.SerializePubKey(combinedKey.FinalKey))
|
||||
nonceMsgBuf.Write(msg[:])
|
||||
nonceBlindHash := chainhash.TaggedHash(NonceBlindTag, nonceMsgBuf.Bytes())
|
||||
nonceBlinder.SetByteSlice(nonceBlindHash[:])
|
||||
|
||||
r1J, err := btcec.ParseJacobian(
|
||||
combinedNonce[:btcec.PubKeyBytesLenCompressed],
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r2J, err := btcec.ParseJacobian(
|
||||
combinedNonce[btcec.PubKeyBytesLenCompressed:],
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// With our nonce blinding value, we'll now combine both the public
|
||||
// nonces, using the blinding factor to tweak the second nonce:
|
||||
// * R = R_1 + b*R_2
|
||||
var nonce btcec.JacobianPoint
|
||||
btcec.ScalarMultNonConst(&nonceBlinder, &r2J, &r2J)
|
||||
btcec.AddNonConst(&r1J, &r2J, &nonce)
|
||||
|
||||
// Next, we'll parse out the set of public nonces this signer used to
|
||||
// generate the signature.
|
||||
pubNonce1J, err := btcec.ParseJacobian(
|
||||
pubNonce[:btcec.PubKeyBytesLenCompressed],
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pubNonce2J, err := btcec.ParseJacobian(
|
||||
pubNonce[btcec.PubKeyBytesLenCompressed:],
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If the nonce is the infinity point we set it to the Generator.
|
||||
if nonce == infinityPoint {
|
||||
btcec.GeneratorJacobian(&nonce)
|
||||
} else {
|
||||
nonce.ToAffine()
|
||||
}
|
||||
|
||||
// We'll perform a similar aggregation and blinding operator as we did
|
||||
// above for the combined nonces: R' = R_1' + b*R_2'.
|
||||
var pubNonceJ btcec.JacobianPoint
|
||||
|
||||
btcec.ScalarMultNonConst(&nonceBlinder, &pubNonce2J, &pubNonce2J)
|
||||
btcec.AddNonConst(&pubNonce1J, &pubNonce2J, &pubNonceJ)
|
||||
|
||||
pubNonceJ.ToAffine()
|
||||
|
||||
// If the combined nonce used in the challenge hash has an odd y
|
||||
// coordinate, then we'll negate our final public nonce.
|
||||
if nonce.Y.IsOdd() {
|
||||
pubNonceJ.Y.Negate(1)
|
||||
pubNonceJ.Y.Normalize()
|
||||
}
|
||||
|
||||
// Next we'll create the challenge hash that commits to the combined
|
||||
// nonce, combined public key and also the message:
|
||||
// * e = H(tag=ChallengeHashTag, R || Q || m) mod n
|
||||
var challengeMsg bytes.Buffer
|
||||
challengeMsg.Write(
|
||||
schnorr.SerializePubKey(
|
||||
btcec.NewPublicKey(
|
||||
&nonce.X, &nonce.Y,
|
||||
),
|
||||
),
|
||||
)
|
||||
challengeMsg.Write(schnorr.SerializePubKey(combinedKey.FinalKey))
|
||||
challengeMsg.Write(msg[:])
|
||||
challengeBytes := chainhash.TaggedHash(
|
||||
ChallengeHashTag, challengeMsg.Bytes(),
|
||||
)
|
||||
var e btcec.ModNScalar
|
||||
e.SetByteSlice(challengeBytes[:])
|
||||
|
||||
signingKey, err := btcec.ParsePubKey(pubKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Next, we'll compute a, our aggregation coefficient for the key that
|
||||
// we're signing with.
|
||||
a := aggregationCoefficient(keySet, signingKey, keysHash, uniqueKeyIndex)
|
||||
|
||||
// If the combined key has an odd y coordinate, then we'll negate
|
||||
// parity factor for the signing key.
|
||||
parityCombinedKey := new(btcec.ModNScalar).SetInt(1)
|
||||
combinedKeyBytes := combinedKey.FinalKey.SerializeCompressed()
|
||||
if combinedKeyBytes[0] == secp256k1.PubKeyFormatCompressedOdd {
|
||||
parityCombinedKey.Negate()
|
||||
}
|
||||
|
||||
// Next, we'll construct the final parity factor by multiplying the
|
||||
// sign key parity factor with the accumulated parity factor for all
|
||||
// the keys.
|
||||
finalParityFactor := parityCombinedKey.Mul(parityAcc)
|
||||
|
||||
var signKeyJ btcec.JacobianPoint
|
||||
signingKey.AsJacobian(&signKeyJ)
|
||||
|
||||
// In the final set, we'll check that: s*G == R' + e*a*g*P.
|
||||
var sG, rP btcec.JacobianPoint
|
||||
btcec.ScalarBaseMultNonConst(s, &sG)
|
||||
btcec.ScalarMultNonConst(e.Mul(a).Mul(finalParityFactor), &signKeyJ, &rP)
|
||||
btcec.AddNonConst(&rP, &pubNonceJ, &rP)
|
||||
|
||||
sG.ToAffine()
|
||||
rP.ToAffine()
|
||||
|
||||
if sG != rP {
|
||||
return ErrPartialSigInvalid
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CombineOption is a functional option argument that allows callers to modify the
|
||||
// way we combine musig2 schnorr signatures.
|
||||
type CombineOption func(*combineOptions)
|
||||
|
||||
// combineOptions houses the set of functional options that can be used to
|
||||
// modify the method used to combine the musig2 partial signatures.
|
||||
type combineOptions struct {
|
||||
msg [32]byte
|
||||
|
||||
combinedKey *btcec.PublicKey
|
||||
|
||||
tweakAcc *btcec.ModNScalar
|
||||
}
|
||||
|
||||
// defaultCombineOptions returns the default set of signing operations.
|
||||
func defaultCombineOptions() *combineOptions {
|
||||
return &combineOptions{}
|
||||
}
|
||||
|
||||
// WithTweakedCombine is a functional option that allows callers to specify
|
||||
// that the signature was produced using a tweaked aggregated public key. In
|
||||
// order to properly aggregate the partial signatures, the caller must specify
|
||||
// enough information to reconstruct the challenge, and also the final
|
||||
// accumulated tweak value.
|
||||
func WithTweakedCombine(
|
||||
msg [32]byte, keys []*btcec.PublicKey,
|
||||
tweaks []KeyTweakDesc, sort bool,
|
||||
) CombineOption {
|
||||
|
||||
return func(o *combineOptions) {
|
||||
combinedKey, _, tweakAcc, _ := AggregateKeys(
|
||||
keys, sort, WithKeyTweaks(tweaks...),
|
||||
)
|
||||
|
||||
o.msg = msg
|
||||
o.combinedKey = combinedKey.FinalKey
|
||||
o.tweakAcc = tweakAcc
|
||||
}
|
||||
}
|
||||
|
||||
// WithTaprootTweakedCombine is similar to the WithTweakedCombine option, but
|
||||
// assumes a BIP 341 context where the final tweaked key is to be used as the
|
||||
// output key, where the internal key is the aggregated key pre-tweak.
|
||||
//
|
||||
// This option should be used over WithTweakedCombine when attempting to
|
||||
// aggregate signatures for a top-level taproot keyspend, where the output key
|
||||
// commits to a script root.
|
||||
func WithTaprootTweakedCombine(
|
||||
msg [32]byte, keys []*btcec.PublicKey,
|
||||
scriptRoot []byte, sort bool,
|
||||
) CombineOption {
|
||||
|
||||
return func(o *combineOptions) {
|
||||
combinedKey, _, tweakAcc, _ := AggregateKeys(
|
||||
keys, sort, WithTaprootKeyTweak(scriptRoot),
|
||||
)
|
||||
|
||||
o.msg = msg
|
||||
o.combinedKey = combinedKey.FinalKey
|
||||
o.tweakAcc = tweakAcc
|
||||
}
|
||||
}
|
||||
|
||||
// WithBip86TweakedCombine is similar to the WithTaprootTweakedCombine option,
|
||||
// but assumes a BIP 341 + BIP 86 context where the final tweaked key is to be
|
||||
// used as the output key, where the internal key is the aggregated key
|
||||
// pre-tweak.
|
||||
//
|
||||
// This option should be used over WithTaprootTweakedCombine when attempting to
|
||||
// aggregate signatures for a top-level taproot keyspend, where the output key
|
||||
// was generated using BIP 86.
|
||||
func WithBip86TweakedCombine(
|
||||
msg [32]byte, keys []*btcec.PublicKey,
|
||||
sort bool,
|
||||
) CombineOption {
|
||||
|
||||
return func(o *combineOptions) {
|
||||
combinedKey, _, tweakAcc, _ := AggregateKeys(
|
||||
keys, sort, WithBIP86KeyTweak(),
|
||||
)
|
||||
|
||||
o.msg = msg
|
||||
o.combinedKey = combinedKey.FinalKey
|
||||
o.tweakAcc = tweakAcc
|
||||
}
|
||||
}
|
||||
|
||||
// CombineSigs combines the set of public keys given the final aggregated
|
||||
// nonce, and the series of partial signatures for each nonce.
|
||||
func CombineSigs(
|
||||
combinedNonce *btcec.PublicKey,
|
||||
partialSigs []*PartialSignature,
|
||||
combineOpts ...CombineOption,
|
||||
) *schnorr.Signature {
|
||||
|
||||
// First, parse the set of optional combine options.
|
||||
opts := defaultCombineOptions()
|
||||
for _, option := range combineOpts {
|
||||
option(opts)
|
||||
}
|
||||
|
||||
// If signer keys and tweaks are specified, then we need to carry out
|
||||
// some intermediate steps before we can combine the signature.
|
||||
var tweakProduct *btcec.ModNScalar
|
||||
if opts.combinedKey != nil && opts.tweakAcc != nil {
|
||||
// Next, we'll construct the parity factor of the combined key,
|
||||
// negating it if the combined key has an even y coordinate.
|
||||
parityFactor := new(btcec.ModNScalar).SetInt(1)
|
||||
combinedKeyBytes := opts.combinedKey.SerializeCompressed()
|
||||
if combinedKeyBytes[0] == secp256k1.PubKeyFormatCompressedOdd {
|
||||
parityFactor.Negate()
|
||||
}
|
||||
|
||||
// Next we'll reconstruct e the challenge has based on the
|
||||
// nonce and combined public key.
|
||||
// * e = H(tag=ChallengeHashTag, R || Q || m) mod n
|
||||
var challengeMsg bytes.Buffer
|
||||
challengeMsg.Write(schnorr.SerializePubKey(combinedNonce))
|
||||
challengeMsg.Write(schnorr.SerializePubKey(opts.combinedKey))
|
||||
challengeMsg.Write(opts.msg[:])
|
||||
challengeBytes := chainhash.TaggedHash(
|
||||
ChallengeHashTag, challengeMsg.Bytes(),
|
||||
)
|
||||
var e btcec.ModNScalar
|
||||
e.SetByteSlice(challengeBytes[:])
|
||||
|
||||
tweakProduct = new(btcec.ModNScalar).Set(&e)
|
||||
tweakProduct.Mul(opts.tweakAcc).Mul(parityFactor)
|
||||
}
|
||||
|
||||
// Finally, the tweak factor also needs to be re-computed as well.
|
||||
var combinedSig btcec.ModNScalar
|
||||
for _, partialSig := range partialSigs {
|
||||
combinedSig.Add(partialSig.S)
|
||||
}
|
||||
|
||||
// If the tweak product was set above, then we'll need to add the value
|
||||
// at the very end in order to produce a valid signature under the
|
||||
// final tweaked key.
|
||||
if tweakProduct != nil {
|
||||
combinedSig.Add(tweakProduct)
|
||||
}
|
||||
|
||||
// TODO(roasbeef): less verbose way to get the x coord...
|
||||
var nonceJ btcec.JacobianPoint
|
||||
combinedNonce.AsJacobian(&nonceJ)
|
||||
nonceJ.ToAffine()
|
||||
|
||||
return schnorr.NewSignature(&nonceJ.X, &combinedSig)
|
||||
}
|
||||
330
pkg/crypto/ec/musig2/sign_test.go
Normal file
330
pkg/crypto/ec/musig2/sign_test.go
Normal file
@@ -0,0 +1,330 @@
|
||||
// Copyright 2013-2022 The btcsuite developers
|
||||
|
||||
package musig2
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const (
|
||||
signVerifyTestVectorFileName = "sign_verify_vectors.json"
|
||||
sigCombineTestVectorFileName = "sig_agg_vectors.json"
|
||||
)
|
||||
|
||||
type signVerifyValidCase struct {
|
||||
Indices []int `json:"key_indices"`
|
||||
NonceIndices []int `json:"nonce_indices"`
|
||||
AggNonceIndex int `json:"aggnonce_index"`
|
||||
MsgIndex int `json:"msg_index"`
|
||||
SignerIndex int `json:"signer_index"`
|
||||
Expected string `json:"expected"`
|
||||
}
|
||||
|
||||
type signErrorCase struct {
|
||||
Indices []int `json:"key_indices"`
|
||||
AggNonceIndex int `json:"aggnonce_index"`
|
||||
MsgIndex int `json:"msg_index"`
|
||||
SecNonceIndex int `json:"secnonce_index"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type verifyFailCase struct {
|
||||
Sig string `json:"sig"`
|
||||
Indices []int `json:"key_indices"`
|
||||
NonceIndices []int `json:"nonce_indices"`
|
||||
MsgIndex int `json:"msg_index"`
|
||||
SignerIndex int `json:"signer_index"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type verifyErrorCase struct {
|
||||
Sig string `json:"sig"`
|
||||
Indices []int `json:"key_indices"`
|
||||
NonceIndices []int `json:"nonce_indices"`
|
||||
MsgIndex int `json:"msg_index"`
|
||||
SignerIndex int `json:"signer_index"`
|
||||
Comment string `json:"comment"`
|
||||
}
|
||||
|
||||
type signVerifyTestVectors struct {
|
||||
SecKey string `json:"sk"`
|
||||
PubKeys []string `json:"pubkeys"`
|
||||
PrivNonces []string `json:"secnonces"`
|
||||
PubNonces []string `json:"pnonces"`
|
||||
AggNonces []string `json:"aggnonces"`
|
||||
Msgs []string `json:"msgs"`
|
||||
ValidCases []signVerifyValidCase `json:"valid_test_cases"`
|
||||
SignErrorCases []signErrorCase `json:"sign_error_test_cases"`
|
||||
VerifyFailCases []verifyFailCase `json:"verify_fail_test_cases"`
|
||||
VerifyErrorCases []verifyErrorCase `json:"verify_error_test_cases"`
|
||||
}
|
||||
|
||||
// TestMusig2SignVerify tests that we pass the musig2 verification tests.
|
||||
func TestMusig2SignVerify(t *testing.T) {
|
||||
t.Parallel()
|
||||
testVectorPath := path.Join(
|
||||
testVectorBaseDir, signVerifyTestVectorFileName,
|
||||
)
|
||||
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||
require.NoError(t, err)
|
||||
var testCases signVerifyTestVectors
|
||||
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||
privKey, _ := btcec.SecKeyFromBytes(mustParseHex(testCases.SecKey))
|
||||
for i, testCase := range testCases.ValidCases {
|
||||
testCase := testCase
|
||||
testName := fmt.Sprintf("valid_case_%v", i)
|
||||
t.Run(
|
||||
testName, func(t *testing.T) {
|
||||
pubKeys, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
pubNonces := pubNoncesFromIndices(
|
||||
t, testCase.NonceIndices, testCases.PubNonces,
|
||||
)
|
||||
combinedNonce, err := AggregateNonces(pubNonces)
|
||||
require.NoError(t, err)
|
||||
var msg [32]byte
|
||||
copy(msg[:], mustParseHex(testCases.Msgs[testCase.MsgIndex]))
|
||||
var secNonce [SecNonceSize]byte
|
||||
copy(secNonce[:], mustParseHex(testCases.PrivNonces[0]))
|
||||
partialSig, err := Sign(
|
||||
secNonce, privKey, combinedNonce, pubKeys,
|
||||
msg,
|
||||
)
|
||||
var partialSigBytes [32]byte
|
||||
partialSig.S.PutBytesUnchecked(partialSigBytes[:])
|
||||
require.Equal(
|
||||
t, hex.Enc(partialSigBytes[:]),
|
||||
hex.Enc(mustParseHex(testCase.Expected)),
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
for _, testCase := range testCases.SignErrorCases {
|
||||
testCase := testCase
|
||||
testName := fmt.Sprintf(
|
||||
"invalid_case_%v",
|
||||
strings.ToLower(testCase.Comment),
|
||||
)
|
||||
t.Run(
|
||||
testName, func(t *testing.T) {
|
||||
pubKeys, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
if err != nil {
|
||||
require.ErrorIs(t, err, secp256k1.ErrPubKeyNotOnCurve)
|
||||
return
|
||||
}
|
||||
var aggNonce [PubNonceSize]byte
|
||||
copy(
|
||||
aggNonce[:],
|
||||
mustParseHex(
|
||||
testCases.AggNonces[testCase.AggNonceIndex],
|
||||
),
|
||||
)
|
||||
var msg [32]byte
|
||||
copy(msg[:], mustParseHex(testCases.Msgs[testCase.MsgIndex]))
|
||||
var secNonce [SecNonceSize]byte
|
||||
copy(
|
||||
secNonce[:],
|
||||
mustParseHex(
|
||||
testCases.PrivNonces[testCase.SecNonceIndex],
|
||||
),
|
||||
)
|
||||
_, err = Sign(
|
||||
secNonce, privKey, aggNonce, pubKeys,
|
||||
msg,
|
||||
)
|
||||
require.Error(t, err)
|
||||
},
|
||||
)
|
||||
}
|
||||
for _, testCase := range testCases.VerifyFailCases {
|
||||
testCase := testCase
|
||||
testName := fmt.Sprintf(
|
||||
"verify_fail_%v",
|
||||
strings.ToLower(testCase.Comment),
|
||||
)
|
||||
t.Run(
|
||||
testName, func(t *testing.T) {
|
||||
pubKeys, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
pubNonces := pubNoncesFromIndices(
|
||||
t, testCase.NonceIndices, testCases.PubNonces,
|
||||
)
|
||||
combinedNonce, err := AggregateNonces(pubNonces)
|
||||
require.NoError(t, err)
|
||||
var msg [32]byte
|
||||
copy(
|
||||
msg[:],
|
||||
mustParseHex(testCases.Msgs[testCase.MsgIndex]),
|
||||
)
|
||||
var secNonce [SecNonceSize]byte
|
||||
copy(secNonce[:], mustParseHex(testCases.PrivNonces[0]))
|
||||
signerNonce := secNonceToPubNonce(secNonce)
|
||||
var partialSig PartialSignature
|
||||
err = partialSig.Decode(
|
||||
bytes.NewReader(mustParseHex(testCase.Sig)),
|
||||
)
|
||||
if err != nil && strings.Contains(
|
||||
testCase.Comment, "group size",
|
||||
) {
|
||||
require.ErrorIs(t, err, ErrPartialSigInvalid)
|
||||
}
|
||||
err = verifyPartialSig(
|
||||
&partialSig, signerNonce, combinedNonce,
|
||||
pubKeys, privKey.PubKey().SerializeCompressed(),
|
||||
msg,
|
||||
)
|
||||
require.Error(t, err)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
for _, testCase := range testCases.VerifyErrorCases {
|
||||
testCase := testCase
|
||||
testName := fmt.Sprintf(
|
||||
"verify_error_%v",
|
||||
strings.ToLower(testCase.Comment),
|
||||
)
|
||||
t.Run(
|
||||
testName, func(t *testing.T) {
|
||||
switch testCase.Comment {
|
||||
case "Invalid pubnonce":
|
||||
pubNonces := pubNoncesFromIndices(
|
||||
t, testCase.NonceIndices, testCases.PubNonces,
|
||||
)
|
||||
_, err := AggregateNonces(pubNonces)
|
||||
require.ErrorIs(t, err, secp256k1.ErrPubKeyNotOnCurve)
|
||||
|
||||
case "Invalid pubkey":
|
||||
_, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
require.ErrorIs(t, err, secp256k1.ErrPubKeyNotOnCurve)
|
||||
|
||||
default:
|
||||
t.Fatalf("unhandled case: %v", testCase.Comment)
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type sigCombineValidCase struct {
|
||||
AggNonce string `json:"aggnonce"`
|
||||
NonceIndices []int `json:"nonce_indices"`
|
||||
Indices []int `json:"key_indices"`
|
||||
TweakIndices []int `json:"tweak_indices"`
|
||||
IsXOnly []bool `json:"is_xonly"`
|
||||
PSigIndices []int `json:"psig_indices"`
|
||||
Expected string `json:"expected"`
|
||||
}
|
||||
|
||||
type sigCombineTestVectors struct {
|
||||
PubKeys []string `json:"pubkeys"`
|
||||
PubNonces []string `json:"pnonces"`
|
||||
Tweaks []string `json:"tweaks"`
|
||||
Psigs []string `json:"psigs"`
|
||||
Msg string `json:"msg"`
|
||||
ValidCases []sigCombineValidCase `json:"valid_test_cases"`
|
||||
}
|
||||
|
||||
func pSigsFromIndicies(
|
||||
t *testing.T, sigs []string,
|
||||
indices []int,
|
||||
) []*PartialSignature {
|
||||
pSigs := make([]*PartialSignature, len(indices))
|
||||
for i, idx := range indices {
|
||||
var pSig PartialSignature
|
||||
err := pSig.Decode(bytes.NewReader(mustParseHex(sigs[idx])))
|
||||
require.NoError(t, err)
|
||||
pSigs[i] = &pSig
|
||||
}
|
||||
return pSigs
|
||||
}
|
||||
|
||||
// TestMusig2SignCombine tests that we pass the musig2 sig combination tests.
|
||||
func TestMusig2SignCombine(t *testing.T) {
|
||||
t.Parallel()
|
||||
testVectorPath := path.Join(
|
||||
testVectorBaseDir, sigCombineTestVectorFileName,
|
||||
)
|
||||
testVectorBytes, err := os.ReadFile(testVectorPath)
|
||||
require.NoError(t, err)
|
||||
var testCases sigCombineTestVectors
|
||||
require.NoError(t, json.Unmarshal(testVectorBytes, &testCases))
|
||||
var msg [32]byte
|
||||
copy(msg[:], mustParseHex(testCases.Msg))
|
||||
for i, testCase := range testCases.ValidCases {
|
||||
testCase := testCase
|
||||
testName := fmt.Sprintf("valid_case_%v", i)
|
||||
t.Run(
|
||||
testName, func(t *testing.T) {
|
||||
pubKeys, err := keysFromIndices(
|
||||
t, testCase.Indices, testCases.PubKeys,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
pubNonces := pubNoncesFromIndices(
|
||||
t, testCase.NonceIndices, testCases.PubNonces,
|
||||
)
|
||||
partialSigs := pSigsFromIndicies(
|
||||
t, testCases.Psigs, testCase.PSigIndices,
|
||||
)
|
||||
var (
|
||||
combineOpts []CombineOption
|
||||
keyOpts []KeyAggOption
|
||||
)
|
||||
if len(testCase.TweakIndices) > 0 {
|
||||
tweaks := tweaksFromIndices(
|
||||
t, testCase.TweakIndices,
|
||||
testCases.Tweaks, testCase.IsXOnly,
|
||||
)
|
||||
combineOpts = append(
|
||||
combineOpts, WithTweakedCombine(
|
||||
msg, pubKeys, tweaks, false,
|
||||
),
|
||||
)
|
||||
keyOpts = append(keyOpts, WithKeyTweaks(tweaks...))
|
||||
}
|
||||
combinedKey, _, _, err := AggregateKeys(
|
||||
pubKeys, false, keyOpts...,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
combinedNonce, err := AggregateNonces(pubNonces)
|
||||
require.NoError(t, err)
|
||||
finalNonceJ, _, err := computeSigningNonce(
|
||||
combinedNonce, combinedKey.FinalKey, msg,
|
||||
)
|
||||
finalNonceJ.ToAffine()
|
||||
finalNonce := btcec.NewPublicKey(
|
||||
&finalNonceJ.X, &finalNonceJ.Y,
|
||||
)
|
||||
combinedSig := CombineSigs(
|
||||
finalNonce, partialSigs, combineOpts...,
|
||||
)
|
||||
require.Equal(
|
||||
t,
|
||||
strings.ToLower(testCase.Expected),
|
||||
hex.Enc(combinedSig.Serialize()),
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
52
pkg/crypto/ec/pubkey.go
Normal file
52
pkg/crypto/ec/pubkey.go
Normal file
@@ -0,0 +1,52 @@
|
||||
// Copyright (c) 2013-2014 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// These constants define the lengths of serialized public keys.
|
||||
|
||||
const (
|
||||
PubKeyBytesLenCompressed = 33
|
||||
)
|
||||
|
||||
const (
|
||||
pubkeyCompressed byte = 0x2 // y_bit + x coord
|
||||
pubkeyUncompressed byte = 0x4 // x coord + y coord
|
||||
pubkeyHybrid byte = 0x6 // y_bit + x coord + y coord
|
||||
)
|
||||
|
||||
// IsCompressedPubKey returns true the passed serialized public key has
|
||||
// been encoded in compressed format, and false otherwise.
|
||||
func IsCompressedPubKey(pubKey []byte) bool {
|
||||
// The public key is only compressed if it is the correct length and
|
||||
// the format (first byte) is one of the compressed pubkey values.
|
||||
return len(pubKey) == PubKeyBytesLenCompressed &&
|
||||
(pubKey[0]&^byte(0x1) == pubkeyCompressed)
|
||||
}
|
||||
|
||||
// ParsePubKey parses a public key for a koblitz curve from a bytestring into a
|
||||
// ecdsa.Publickey, verifying that it is valid. It supports compressed,
|
||||
// uncompressed and hybrid signature formats.
|
||||
func ParsePubKey(pubKeyStr []byte) (*PublicKey, error) {
|
||||
return secp256k1.ParsePubKey(pubKeyStr)
|
||||
}
|
||||
|
||||
// PublicKey is an ecdsa.PublicKey with additional functions to
|
||||
// serialize in uncompressed, compressed, and hybrid formats.
|
||||
type PublicKey = secp256k1.PublicKey
|
||||
|
||||
// NewPublicKey instantiates a new public key with the given x and y
|
||||
// coordinates.
|
||||
//
|
||||
// It should be noted that, unlike ParsePubKey, since this accepts arbitrary x
|
||||
// and y coordinates, it allows creation of public keys that are not valid
|
||||
// points on the secp256k1 curve. The IsOnCurve method of the returned instance
|
||||
// can be used to determine validity.
|
||||
func NewPublicKey(x, y *FieldVal) *PublicKey {
|
||||
return secp256k1.NewPublicKey(x, y)
|
||||
}
|
||||
321
pkg/crypto/ec/pubkey_test.go
Normal file
321
pkg/crypto/ec/pubkey_test.go
Normal file
@@ -0,0 +1,321 @@
|
||||
// Copyright (c) 2013-2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
type pubKeyTest struct {
|
||||
name string
|
||||
key []byte
|
||||
format byte
|
||||
isValid bool
|
||||
}
|
||||
|
||||
var pubKeyTests = []pubKeyTest{
|
||||
// pubkey from bitcoin blockchain tx
|
||||
// 0437cd7f8525ceed2324359c2d0ba26006d92d85
|
||||
{
|
||||
name: "uncompressed ok",
|
||||
key: []byte{
|
||||
0x04, 0x11, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa3,
|
||||
},
|
||||
isValid: true,
|
||||
format: pubkeyUncompressed,
|
||||
},
|
||||
{
|
||||
name: "uncompressed x changed",
|
||||
key: []byte{
|
||||
0x04, 0x15, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa3,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "uncompressed y changed",
|
||||
key: []byte{
|
||||
0x04, 0x11, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa4,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "uncompressed claims compressed",
|
||||
key: []byte{
|
||||
0x03, 0x11, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa3,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "uncompressed as hybrid ok",
|
||||
key: []byte{
|
||||
0x07, 0x11, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa3,
|
||||
},
|
||||
isValid: true,
|
||||
format: pubkeyHybrid,
|
||||
},
|
||||
{
|
||||
name: "uncompressed as hybrid wrong",
|
||||
key: []byte{
|
||||
0x06, 0x11, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa3,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
// from tx 0b09c51c51ff762f00fb26217269d2a18e77a4fa87d69b3c363ab4df16543f20
|
||||
{
|
||||
name: "compressed ok (ybit = 0)",
|
||||
key: []byte{
|
||||
0x02, 0xce, 0x0b, 0x14, 0xfb, 0x84, 0x2b, 0x1b,
|
||||
0xa5, 0x49, 0xfd, 0xd6, 0x75, 0xc9, 0x80, 0x75, 0xf1,
|
||||
0x2e, 0x9c, 0x51, 0x0f, 0x8e, 0xf5, 0x2b, 0xd0, 0x21,
|
||||
0xa9, 0xa1, 0xf4, 0x80, 0x9d, 0x3b, 0x4d,
|
||||
},
|
||||
isValid: true,
|
||||
format: pubkeyCompressed,
|
||||
},
|
||||
// from tx fdeb8e72524e8dab0da507ddbaf5f88fe4a933eb10a66bc4745bb0aa11ea393c
|
||||
{
|
||||
name: "compressed ok (ybit = 1)",
|
||||
key: []byte{
|
||||
0x03, 0x26, 0x89, 0xc7, 0xc2, 0xda, 0xb1, 0x33,
|
||||
0x09, 0xfb, 0x14, 0x3e, 0x0e, 0x8f, 0xe3, 0x96, 0x34,
|
||||
0x25, 0x21, 0x88, 0x7e, 0x97, 0x66, 0x90, 0xb6, 0xb4,
|
||||
0x7f, 0x5b, 0x2a, 0x4b, 0x7d, 0x44, 0x8e,
|
||||
},
|
||||
isValid: true,
|
||||
format: pubkeyCompressed,
|
||||
},
|
||||
{
|
||||
name: "compressed claims uncompressed (ybit = 0)",
|
||||
key: []byte{
|
||||
0x04, 0xce, 0x0b, 0x14, 0xfb, 0x84, 0x2b, 0x1b,
|
||||
0xa5, 0x49, 0xfd, 0xd6, 0x75, 0xc9, 0x80, 0x75, 0xf1,
|
||||
0x2e, 0x9c, 0x51, 0x0f, 0x8e, 0xf5, 0x2b, 0xd0, 0x21,
|
||||
0xa9, 0xa1, 0xf4, 0x80, 0x9d, 0x3b, 0x4d,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "compressed claims uncompressed (ybit = 1)",
|
||||
key: []byte{
|
||||
0x05, 0x26, 0x89, 0xc7, 0xc2, 0xda, 0xb1, 0x33,
|
||||
0x09, 0xfb, 0x14, 0x3e, 0x0e, 0x8f, 0xe3, 0x96, 0x34,
|
||||
0x25, 0x21, 0x88, 0x7e, 0x97, 0x66, 0x90, 0xb6, 0xb4,
|
||||
0x7f, 0x5b, 0x2a, 0x4b, 0x7d, 0x44, 0x8e,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "wrong length)",
|
||||
key: []byte{0x05},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "X == P",
|
||||
key: []byte{
|
||||
0x04, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFC, 0x2F, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa3,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "X > P",
|
||||
key: []byte{
|
||||
0x04, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFE, 0xFF, 0xFF, 0xFD, 0x2F, 0xb2, 0xe0,
|
||||
0xea, 0xdd, 0xfb, 0x84, 0xcc, 0xf9, 0x74, 0x44, 0x64,
|
||||
0xf8, 0x2e, 0x16, 0x0b, 0xfa, 0x9b, 0x8b, 0x64, 0xf9,
|
||||
0xd4, 0xc0, 0x3f, 0x99, 0x9b, 0x86, 0x43, 0xf6, 0x56,
|
||||
0xb4, 0x12, 0xa3,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "Y == P",
|
||||
key: []byte{
|
||||
0x04, 0x11, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF,
|
||||
0xFF, 0xFC, 0x2F,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "Y > P",
|
||||
key: []byte{
|
||||
0x04, 0x11, 0xdb, 0x93, 0xe1, 0xdc, 0xdb, 0x8a,
|
||||
0x01, 0x6b, 0x49, 0x84, 0x0f, 0x8c, 0x53, 0xbc, 0x1e,
|
||||
0xb6, 0x8a, 0x38, 0x2e, 0x97, 0xb1, 0x48, 0x2e, 0xca,
|
||||
0xd7, 0xb1, 0x48, 0xa6, 0x90, 0x9a, 0x5c, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
|
||||
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF,
|
||||
0xFF, 0xFD, 0x2F,
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
name: "hybrid",
|
||||
key: []byte{
|
||||
0x06, 0x79, 0xbe, 0x66, 0x7e, 0xf9, 0xdc, 0xbb,
|
||||
0xac, 0x55, 0xa0, 0x62, 0x95, 0xce, 0x87, 0x0b, 0x07,
|
||||
0x02, 0x9b, 0xfc, 0xdb, 0x2d, 0xce, 0x28, 0xd9, 0x59,
|
||||
0xf2, 0x81, 0x5b, 0x16, 0xf8, 0x17, 0x98, 0x48, 0x3a,
|
||||
0xda, 0x77, 0x26, 0xa3, 0xc4, 0x65, 0x5d, 0xa4, 0xfb,
|
||||
0xfc, 0x0e, 0x11, 0x08, 0xa8, 0xfd, 0x17, 0xb4, 0x48,
|
||||
0xa6, 0x85, 0x54, 0x19, 0x9c, 0x47, 0xd0, 0x8f, 0xfb,
|
||||
0x10, 0xd4, 0xb8,
|
||||
},
|
||||
format: pubkeyHybrid,
|
||||
isValid: true,
|
||||
},
|
||||
}
|
||||
|
||||
func TestPubKeys(t *testing.T) {
|
||||
for _, test := range pubKeyTests {
|
||||
pk, err := ParsePubKey(test.key)
|
||||
if err != nil {
|
||||
if test.isValid {
|
||||
t.Errorf(
|
||||
"%s pubkey failed when shouldn't %v",
|
||||
test.name, err,
|
||||
)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if !test.isValid {
|
||||
t.Errorf(
|
||||
"%s counted as valid when it should fail",
|
||||
test.name,
|
||||
)
|
||||
continue
|
||||
}
|
||||
var pkStr []byte
|
||||
switch test.format {
|
||||
case pubkeyUncompressed:
|
||||
pkStr = pk.SerializeUncompressed()
|
||||
case pubkeyCompressed:
|
||||
pkStr = pk.SerializeCompressed()
|
||||
case pubkeyHybrid:
|
||||
pkStr = test.key
|
||||
}
|
||||
if !utils.FastEqual(test.key, pkStr) {
|
||||
t.Errorf(
|
||||
"%s pubkey: serialized keys do not match.",
|
||||
test.name,
|
||||
)
|
||||
spew.Dump(test.key)
|
||||
spew.Dump(pkStr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestPublicKeyIsEqual(t *testing.T) {
|
||||
pubKey1, err := ParsePubKey(
|
||||
[]byte{
|
||||
0x03, 0x26, 0x89, 0xc7, 0xc2, 0xda, 0xb1, 0x33,
|
||||
0x09, 0xfb, 0x14, 0x3e, 0x0e, 0x8f, 0xe3, 0x96, 0x34,
|
||||
0x25, 0x21, 0x88, 0x7e, 0x97, 0x66, 0x90, 0xb6, 0xb4,
|
||||
0x7f, 0x5b, 0x2a, 0x4b, 0x7d, 0x44, 0x8e,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse raw bytes for pubKey1: %v", err)
|
||||
}
|
||||
pubKey2, err := ParsePubKey(
|
||||
[]byte{
|
||||
0x02, 0xce, 0x0b, 0x14, 0xfb, 0x84, 0x2b, 0x1b,
|
||||
0xa5, 0x49, 0xfd, 0xd6, 0x75, 0xc9, 0x80, 0x75, 0xf1,
|
||||
0x2e, 0x9c, 0x51, 0x0f, 0x8e, 0xf5, 0x2b, 0xd0, 0x21,
|
||||
0xa9, 0xa1, 0xf4, 0x80, 0x9d, 0x3b, 0x4d,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse raw bytes for pubKey2: %v", err)
|
||||
}
|
||||
if !pubKey1.IsEqual(pubKey1) {
|
||||
t.Fatalf(
|
||||
"value of IsEqual is incorrect, %v is "+
|
||||
"equal to %v", pubKey1, pubKey1,
|
||||
)
|
||||
}
|
||||
if pubKey1.IsEqual(pubKey2) {
|
||||
t.Fatalf(
|
||||
"value of IsEqual is incorrect, %v is not "+
|
||||
"equal to %v", pubKey1, pubKey2,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsCompressed(t *testing.T) {
|
||||
for _, test := range pubKeyTests {
|
||||
isCompressed := IsCompressedPubKey(test.key)
|
||||
wantCompressed := (test.format == pubkeyCompressed)
|
||||
if isCompressed != wantCompressed {
|
||||
t.Fatalf(
|
||||
"%s (%x) pubkey: unexpected compressed result, "+
|
||||
"got %v, want %v", test.name, test.key,
|
||||
isCompressed, wantCompressed,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
173
pkg/crypto/ec/schnorr/bench_test.go
Normal file
173
pkg/crypto/ec/schnorr/bench_test.go
Normal file
@@ -0,0 +1,173 @@
|
||||
// Copyright 2013-2016 The btcsuite developers
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package schnorr
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// hexToBytes converts the passed hex string into bytes and will panic if there
|
||||
// is an error. This is only provided for the hard-coded constants, so errors in
|
||||
// the source code can be detected. It will only (and must only) be called with
|
||||
// hard-coded values.
|
||||
func hexToBytes(s string) []byte {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// hexToModNScalar converts the passed hex string into a ModNScalar and will
|
||||
// panic if there is an error. This is only provided for the hard-coded
|
||||
//
|
||||
// constants, so errors in the source code can be detected. It will only (and
|
||||
//
|
||||
// must only) be called with hard-coded values.
|
||||
func hexToModNScalar(s string) *btcec.ModNScalar {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
var scalar btcec.ModNScalar
|
||||
if overflow := scalar.SetByteSlice(b); overflow {
|
||||
panic("hex in source file overflows mod N scalar: " + s)
|
||||
}
|
||||
return &scalar
|
||||
}
|
||||
|
||||
// hexToFieldVal converts the passed hex string into a FieldVal and will panic
|
||||
// if there is an error. This is only provided for the hard-coded constants, so
|
||||
// errors in the source code can be detected. It will only (and must only) be
|
||||
// called with hard-coded values.
|
||||
func hexToFieldVal(s string) *btcec.FieldVal {
|
||||
b, err := hex.Dec(s)
|
||||
if err != nil {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
var f btcec.FieldVal
|
||||
if overflow := f.SetByteSlice(b); overflow {
|
||||
panic("hex in source file overflows mod P: " + s)
|
||||
}
|
||||
return &f
|
||||
}
|
||||
|
||||
// fromHex converts the passed hex string into a big integer pointer and will
|
||||
// panic if there is an error. This is only provided for the hard-coded
|
||||
// constants, so errors in the source code can be detected. It will only (and
|
||||
// must only) be called for initialization purposes.
|
||||
func fromHex(s string) *big.Int {
|
||||
if s == "" {
|
||||
return big.NewInt(0)
|
||||
}
|
||||
r, ok := new(big.Int).SetString(s, 16)
|
||||
if !ok {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
var testOk bool
|
||||
|
||||
// BenchmarkSign benchmarks how long it takes to sign a message.
|
||||
func BenchmarkSign(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
d := hexToModNScalar("9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d")
|
||||
privKey := secp256k1.NewSecretKey(d)
|
||||
// blake256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
msgHash := hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
var auxBytes [32]byte
|
||||
copy(auxBytes[:], msgHash)
|
||||
auxBytes[0] ^= 1
|
||||
var (
|
||||
sig *Signature
|
||||
err error
|
||||
)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
sig, err = Sign(
|
||||
privKey, msgHash, CustomNonce(auxBytes), FastSign(),
|
||||
)
|
||||
}
|
||||
testSig = sig
|
||||
testErr = err
|
||||
}
|
||||
|
||||
// BenchmarkSigVerify benchmarks how long it takes the secp256k1 curve to
|
||||
// verify signatures.
|
||||
func BenchmarkSigVerify(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
d := hexToModNScalar("9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d")
|
||||
privKey := secp256k1.NewSecretKey(d)
|
||||
pubKey := privKey.PubKey()
|
||||
// Double sha256 of by{0x01, 0x02, 0x03, 0x04}
|
||||
msgHash := sha256.Sum256([]byte("benchmark"))
|
||||
sig, err := Sign(privKey, msgHash[:])
|
||||
if err != nil {
|
||||
b.Fatalf("unable to sign: %v", err)
|
||||
}
|
||||
if !sig.Verify(msgHash[:], pubKey) {
|
||||
b.Errorf("Signature failed to verify")
|
||||
return
|
||||
}
|
||||
var ok bool
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
ok = sig.Verify(msgHash[:], pubKey)
|
||||
}
|
||||
testOk = ok
|
||||
}
|
||||
|
||||
// Used to ensure the compiler doesn't optimize away the benchmark.
|
||||
var (
|
||||
testSig *Signature
|
||||
testErr error
|
||||
)
|
||||
|
||||
// BenchmarkSignRfc6979 benchmarks how long it takes to sign a message.
|
||||
func BenchmarkSignRfc6979(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
d := hexToModNScalar("9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d")
|
||||
privKey := secp256k1.NewSecretKey(d)
|
||||
// blake256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
msgHash := hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
var (
|
||||
sig *Signature
|
||||
err error
|
||||
)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
sig, err = Sign(privKey, msgHash, FastSign())
|
||||
}
|
||||
testSig = sig
|
||||
testErr = err
|
||||
}
|
||||
|
||||
// BenchmarkSigSerialize benchmarks how long it takes to serialize Schnorr
|
||||
// signatures.
|
||||
func BenchmarkSigSerialize(b *testing.B) {
|
||||
// From randomly generated keypair.
|
||||
d := hexToModNScalar("9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d")
|
||||
secKey := secp256k1.NewSecretKey(d)
|
||||
// blake256 of by{0x01, 0x02, 0x03, 0x04}.
|
||||
msgHash := hexToBytes("c301ba9de5d6053caad9f5eb46523f007702add2c62fa39de03146a36b8026b7")
|
||||
// Generate the signature.
|
||||
sig, _ := Sign(secKey, msgHash)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
sig.Serialize()
|
||||
}
|
||||
}
|
||||
303
pkg/crypto/ec/schnorr/bip/bip-0340.mediawiki
Normal file
303
pkg/crypto/ec/schnorr/bip/bip-0340.mediawiki
Normal file
@@ -0,0 +1,303 @@
|
||||
<pre>
|
||||
BIP: 340
|
||||
Title: Schnorr Signatures for secp256k1
|
||||
Author: Pieter Wuille <pieter.wuille@gmail.com>
|
||||
Jonas Nick <jonasd.nick@gmail.com>
|
||||
Tim Ruffing <crypto@timruffing.de>
|
||||
Comments-Summary: No comments yet.
|
||||
Comments-URI: https://github.com/bitcoin/bips/wiki/Comments:BIP-0340
|
||||
Status: Final
|
||||
Type: Standards Track
|
||||
License: BSD-2-Clause
|
||||
Created: 2020-01-19
|
||||
Post-History: 2018-07-06: https://lists.linuxfoundation.org/pipermail/bitcoin-dev/2018-July/016203.html [bitcoin-dev] Schnorr signatures BIP
|
||||
</pre>
|
||||
|
||||
== Introduction ==
|
||||
|
||||
=== Abstract ===
|
||||
|
||||
This document proposes a standard for 64-byte Schnorr signatures over the elliptic curve ''secp256k1''.
|
||||
|
||||
=== Copyright ===
|
||||
|
||||
This document is licensed under the 2-clause BSD license.
|
||||
|
||||
=== Motivation ===
|
||||
|
||||
Bitcoin has traditionally used
|
||||
[https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm ECDSA] signatures over the [https://www.secg.org/sec2-v2.pdf secp256k1 curve] with [https://en.wikipedia.org/wiki/SHA-2 SHA256] hashes for authenticating
|
||||
transactions. These are [https://www.secg.org/sec1-v2.pdf standardized], but have a number of downsides
|
||||
compared to [http://publikationen.ub.uni-frankfurt.de/opus4/files/4280/schnorr.pdf Schnorr signatures] over the same curve:
|
||||
|
||||
* '''Provable security''': Schnorr signatures are provably secure. In more detail, they are ''strongly unforgeable under chosen message attack (SUF-CMA)''<ref>Informally, this means that without knowledge of the secret key but given valid signatures of arbitrary messages, it is not possible to come up with further valid signatures.</ref> [https://www.di.ens.fr/~pointche/Documents/Papers/2000_joc.pdf in the random oracle model assuming the hardness of the elliptic curve discrete logarithm problem (ECDLP)] and [http://www.neven.org/papers/schnorr.pdf in the generic group model assuming variants of preimage and second preimage resistance of the used hash function]<ref>A detailed security proof in the random oracle model, which essentially restates [https://www.di.ens.fr/~pointche/Documents/Papers/2000_joc.pdf the original security proof by Pointcheval and Stern] more explicitly, can be found in [https://eprint.iacr.org/2016/191 a paper by Kiltz, Masny and Pan]. All these security proofs assume a variant of Schnorr signatures that use ''(e,s)'' instead of ''(R,s)'' (see Design above). Since we use a unique encoding of ''R'', there is an efficiently computable bijection that maps ''(R,s)'' to ''(e,s)'', which allows to convert a successful SUF-CMA attacker for the ''(e,s)'' variant to a successful SUF-CMA attacker for the ''(R,s)'' variant (and vice-versa). Furthermore, the proofs consider a variant of Schnorr signatures without key prefixing (see Design above), but it can be verified that the proofs are also correct for the variant with key prefixing. As a result, all the aforementioned security proofs apply to the variant of Schnorr signatures proposed in this document.</ref>. In contrast, the [https://nbn-resolving.de/urn:nbn:de:hbz:294-60803 best known results for the provable security of ECDSA] rely on stronger assumptions.
|
||||
* '''Non-malleability''': The SUF-CMA security of Schnorr signatures implies that they are non-malleable. On the other hand, ECDSA signatures are inherently malleable<ref>If ''(r,s)'' is a valid ECDSA signature for a given message and key, then ''(r,n-s)'' is also valid for the same message and key. If ECDSA is restricted to only permit one of the two variants (as Bitcoin does through a policy rule on the network), it can be [https://nbn-resolving.de/urn:nbn:de:hbz:294-60803 proven] non-malleable under stronger than usual assumptions.</ref>; a third party without access to the secret key can alter an existing valid signature for a given public key and message into another signature that is valid for the same key and message. This issue is discussed in [[bip-0062.mediawiki|BIP62]] and [[bip-0146.mediawiki|BIP146]].
|
||||
* '''Linearity''': Schnorr signatures provide a simple and efficient method that enables multiple collaborating parties to produce a signature that is valid for the sum of their public keys. This is the building block for various higher-level constructions that improve efficiency and privacy, such as multisignatures and others (see Applications below).
|
||||
|
||||
For all these advantages, there are virtually no disadvantages, apart
|
||||
from not being standardized. This document seeks to change that. As we
|
||||
propose a new standard, a number of improvements not specific to Schnorr signatures can be
|
||||
made:
|
||||
|
||||
* '''Signature encoding''': Instead of using [https://en.wikipedia.org/wiki/X.690#DER_encoding DER]-encoding for signatures (which are variable size, and up to 72 bytes), we can use a simple fixed 64-byte format.
|
||||
* '''Public key encoding''': Instead of using [https://www.secg.org/sec1-v2.pdf ''compressed''] 33-byte encodings of elliptic curve points which are common in Bitcoin today, public keys in this proposal are encoded as 32 bytes.
|
||||
* '''Batch verification''': The specific formulation of ECDSA signatures that is standardized cannot be verified more efficiently in batch compared to individually, unless additional witness data is added. Changing the signature scheme offers an opportunity to address this.
|
||||
* '''Completely specified''': To be safe for usage in consensus systems, the verification algorithm must be completely specified at the byte level. This guarantees that nobody can construct a signature that is valid to some verifiers but not all. This is traditionally not a requirement for digital signature schemes, and the lack of exact specification for the DER parsing of ECDSA signatures has caused problems for Bitcoin [https://lists.linuxfoundation.org/pipermail/bitcoin-dev/2015-July/009697.html in the past], needing [[bip-0066.mediawiki|BIP66]] to address it. In this document we aim to meet this property by design. For batch verification, which is inherently non-deterministic as the verifier can choose their batches, this property implies that the outcome of verification may only differ from individual verifications with negligible probability, even to an attacker who intentionally tries to make batch- and non-batch verification differ.
|
||||
|
||||
By reusing the same curve and hash function as Bitcoin uses for ECDSA, we are able to retain existing mechanisms for choosing secret and public keys, and we avoid introducing new assumptions about the security of elliptic curves and hash functions.
|
||||
|
||||
== Description ==
|
||||
|
||||
We first build up the algebraic formulation of the signature scheme by
|
||||
going through the design choices. Afterwards, we specify the exact
|
||||
encodings and operations.
|
||||
|
||||
=== Design ===
|
||||
|
||||
'''Schnorr signature variant''' Elliptic Curve Schnorr signatures for message ''m'' and public key ''P'' generally involve a point ''R'', integers ''e'' and ''s'' picked by the signer, and the base point ''G'' which satisfy ''e = hash(R || m)'' and ''s⋅G = R + e⋅P''. Two formulations exist, depending on whether the signer reveals ''e'' or ''R'':
|
||||
# Signatures are pairs ''(e, s)'' that satisfy ''e = hash(s⋅G - e⋅P || m)''. This variant avoids minor complexity introduced by the encoding of the point ''R'' in the signature (see paragraphs "Encoding R and public key point P" and "Implicit Y coordinates" further below in this subsection). Moreover, revealing ''e'' instead of ''R'' allows for potentially shorter signatures: Whereas an encoding of ''R'' inherently needs about 32 bytes, the hash ''e'' can be tuned to be shorter than 32 bytes, and [http://www.neven.org/papers/schnorr.pdf a short hash of only 16 bytes suffices to provide SUF-CMA security at the target security level of 128 bits]. However, a major drawback of this optimization is that finding collisions in a short hash function is easy. This complicates the implementation of secure signing protocols in scenarios in which a group of mutually distrusting signers work together to produce a single joint signature (see Applications below). In these scenarios, which are not captured by the SUF-CMA model due its assumption of a single honest signer, a promising attack strategy for malicious co-signers is to find a collision in the hash function in order to obtain a valid signature on a message that an honest co-signer did not intend to sign.
|
||||
# Signatures are pairs ''(R, s)'' that satisfy ''s⋅G = R + hash(R || m)⋅P''. This supports batch verification, as there are no elliptic curve operations inside the hashes. Batch verification enables significant speedups.<ref>The speedup that results from batch verification can be demonstrated with the cryptography library [https://github.com/jonasnick/secp256k1/blob/schnorrsig-batch-verify/doc/speedup-batch.md libsecp256k1].</ref>
|
||||
|
||||
Since we would like to avoid the fragility that comes with short hashes, the ''e'' variant does not provide significant advantages. We choose the ''R''-option, which supports batch verification.
|
||||
|
||||
'''Key prefixing''' Using the verification rule above directly makes Schnorr signatures vulnerable to "related-key attacks" in which a third party can convert a signature ''(R, s)'' for public key ''P'' into a signature ''(R, s + a⋅hash(R || m))'' for public key ''P + a⋅G'' and the same message ''m'', for any given additive tweak ''a'' to the signing key. This would render signatures insecure when keys are generated using [[bip-0032.mediawiki#public-parent-key--public-child-key|BIP32's unhardened derivation]] and other methods that rely on additive tweaks to existing keys such as Taproot.
|
||||
|
||||
To protect against these attacks, we choose ''key prefixed''<ref>A limitation of committing to the public key (rather than to a short hash of it, or not at all) is that it removes the ability for public key recovery or verifying signatures against a short public key hash. These constructions are generally incompatible with batch verification.</ref> Schnorr signatures which means that the public key is prefixed to the message in the challenge hash input. This changes the equation to ''s⋅G = R + hash(R || P || m)⋅P''. [https://eprint.iacr.org/2015/1135.pdf It can be shown] that key prefixing protects against related-key attacks with additive tweaks. In general, key prefixing increases robustness in multi-user settings, e.g., it seems to be a requirement for proving the MuSig multisignature scheme secure (see Applications below).
|
||||
|
||||
We note that key prefixing is not strictly necessary for transaction signatures as used in Bitcoin currently, because signed transactions indirectly commit to the public keys already, i.e., ''m'' contains a commitment to ''pk''. However, this indirect commitment should not be relied upon because it may change with proposals such as SIGHASH_NOINPUT ([[bip-0118.mediawiki|BIP118]]), and would render the signature scheme unsuitable for other purposes than signing transactions, e.g., [https://bitcoin.org/en/developer-reference#signmessage signing ordinary messages].
|
||||
|
||||
'''Encoding R and public key point P''' There exist several possibilities for encoding elliptic curve points:
|
||||
# Encoding the full X and Y coordinates of ''P'' and ''R'', resulting in a 64-byte public key and a 96-byte signature.
|
||||
# Encoding the full X coordinate and one bit of the Y coordinate to determine one of the two possible Y coordinates. This would result in 33-byte public keys and 65-byte signatures.
|
||||
# Encoding only the X coordinate, resulting in 32-byte public keys and 64-byte signatures.
|
||||
|
||||
Using the first option would be slightly more efficient for verification (around 10%), but we prioritize compactness, and therefore choose option 3.
|
||||
|
||||
'''Implicit Y coordinates''' In order to support efficient verification and batch verification, the Y coordinate of ''P'' and of ''R'' cannot be ambiguous (every valid X coordinate has two possible Y coordinates). We have a choice between several options for symmetry breaking:
|
||||
# Implicitly choosing the Y coordinate that is in the lower half.
|
||||
# Implicitly choosing the Y coordinate that is even<ref>Since ''p'' is odd, negation modulo ''p'' will map even numbers to odd numbers and the other way around. This means that for a valid X coordinate, one of the corresponding Y coordinates will be even, and the other will be odd.</ref>.
|
||||
# Implicitly choosing the Y coordinate that is a quadratic residue (i.e. has a square root modulo ''p'').
|
||||
|
||||
The second option offers the greatest compatibility with existing key generation systems, where the standard 33-byte compressed public key format consists of a byte indicating the oddness of the Y coordinate, plus the full X coordinate. To avoid gratuitous incompatibilities, we pick that option for ''P'', and thus our X-only public keys become equivalent to a compressed public key that is the X-only key prefixed by the byte 0x02. For consistency, the same is done for ''R''<ref>An earlier version of this draft used the third option instead, based on a belief that this would in general trade signing efficiency for verification efficiency. When using Jacobian coordinates, a common optimization in ECC implementations, it is possible to determine if a Y coordinate is a quadratic residue by computing the Legendre symbol, without converting to affine coordinates first (which needs a modular inversion). As modular inverses and Legendre symbols have similar [https://lists.linuxfoundation.org/pipermail/bitcoin-dev/2020-August/018081.html performance] in practice, this trade-off is not worth it.</ref>.
|
||||
|
||||
Despite halving the size of the set of valid public keys, implicit Y coordinates are not a reduction in security. Informally, if a fast algorithm existed to compute the discrete logarithm of an X-only public key, then it could also be used to compute the discrete logarithm of a full public key: apply it to the X coordinate, and then optionally negate the result. This shows that breaking an X-only public key can be at most a small constant term faster than breaking a full one.<ref>This can be formalized by a simple reduction that reduces an attack on Schnorr signatures with implicit Y coordinates to an attack to Schnorr signatures with explicit Y coordinates. The reduction works by reencoding public keys and negating the result of the hash function, which is modeled as random oracle, whenever the challenge public key has an explicit Y coordinate that is odd. A proof sketch can be found [https://medium.com/blockstream/reducing-bitcoin-transaction-sizes-with-x-only-pubkeys-f86476af05d7 here].</ref>.
|
||||
|
||||
'''Tagged Hashes''' Cryptographic hash functions are used for multiple purposes in the specification below and in Bitcoin in general. To make sure hashes used in one context can't be reinterpreted in another one, hash functions can be tweaked with a context-dependent tag name, in such a way that collisions across contexts can be assumed to be infeasible. Such collisions obviously can not be ruled out completely, but only for schemes using tagging with a unique name. As for other schemes collisions are at least less likely with tagging than without.
|
||||
|
||||
For example, without tagged hashing a BIP340 signature could also be valid for a signature scheme where the only difference is that the arguments to the hash function are reordered. Worse, if the BIP340 nonce derivation function was copied or independently created, then the nonce could be accidentally reused in the other scheme leaking the secret key.
|
||||
|
||||
This proposal suggests to include the tag by prefixing the hashed data with ''SHA256(tag) || SHA256(tag)''. Because this is a 64-byte long context-specific constant and the ''SHA256'' block size is also 64 bytes, optimized implementations are possible (identical to SHA256 itself, but with a modified initial state). Using SHA256 of the tag name itself is reasonably simple and efficient for implementations that don't choose to use the optimization. In general, tags can be arbitrary byte arrays, but are suggested to be textual descriptions in UTF-8 encoding.
|
||||
|
||||
'''Final scheme''' As a result, our final scheme ends up using public key ''pk'' which is the X coordinate of a point ''P'' on the curve whose Y coordinate is even and signatures ''(r,s)'' where ''r'' is the X coordinate of a point ''R'' whose Y coordinate is even. The signature satisfies ''s⋅G = R + tagged_hash(r || pk || m)⋅P''.
|
||||
|
||||
=== Specification ===
|
||||
|
||||
The following conventions are used, with constants as defined for [https://www.secg.org/sec2-v2.pdf secp256k1]. We note that adapting this specification to other elliptic curves is not straightforward and can result in an insecure scheme<ref>Among other pitfalls, using the specification with a curve whose order is not close to the size of the range of the nonce derivation function is insecure.</ref>.
|
||||
* Lowercase variables represent integers or byte arrays.
|
||||
** The constant ''p'' refers to the field size, ''0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F''.
|
||||
** The constant ''n'' refers to the curve order, ''0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141''.
|
||||
* Uppercase variables refer to points on the curve with equation ''y<sup>2</sup> = x<sup>3</sup> + 7'' over the integers modulo ''p''.
|
||||
** ''is_infinite(P)'' returns whether or not ''P'' is the point at infinity.
|
||||
** ''x(P)'' and ''y(P)'' are integers in the range ''0..p-1'' and refer to the X and Y coordinates of a point ''P'' (assuming it is not infinity).
|
||||
** The constant ''G'' refers to the base point, for which ''x(G) = 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798'' and ''y(G) = 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8''.
|
||||
** Addition of points refers to the usual [https://en.wikipedia.org/wiki/Elliptic_curve#The_group_law elliptic curve group operation].
|
||||
** [https://en.wikipedia.org/wiki/Elliptic_curve_point_multiplication Multiplication (⋅) of an integer and a point] refers to the repeated application of the group operation.
|
||||
* Functions and operations:
|
||||
** ''||'' refers to byte array concatenation.
|
||||
** The function ''x[i:j]'', where ''x'' is a byte array and ''i, j ≥ 0'', returns a ''(j - i)''-byte array with a copy of the ''i''-th byte (inclusive) to the ''j''-th byte (exclusive) of ''x''.
|
||||
** The function ''bytes(x)'', where ''x'' is an integer, returns the 32-byte encoding of ''x'', most significant byte first.
|
||||
** The function ''bytes(P)'', where ''P'' is a point, returns ''bytes(x(P))''.
|
||||
** The function ''int(x)'', where ''x'' is a 32-byte array, returns the 256-bit unsigned integer whose most significant byte first encoding is ''x''.
|
||||
** The function ''has_even_y(P)'', where ''P'' is a point for which ''not is_infinite(P)'', returns ''y(P) mod 2 = 0''.
|
||||
** The function ''lift_x(x)'', where ''x'' is a 256-bit unsigned integer, returns the point ''P'' for which ''x(P) = x''<ref>
|
||||
Given a candidate X coordinate ''x'' in the range ''0..p-1'', there exist either exactly two or exactly zero valid Y coordinates. If no valid Y coordinate exists, then ''x'' is not a valid X coordinate either, i.e., no point ''P'' exists for which ''x(P) = x''. The valid Y coordinates for a given candidate ''x'' are the square roots of ''c = x<sup>3</sup> + 7 mod p'' and they can be computed as ''y = ±c<sup>(p+1)/4</sup> mod p'' (see [https://en.wikipedia.org/wiki/Quadratic_residue#Prime_or_prime_power_modulus Quadratic residue]) if they exist, which can be checked by squaring and comparing with ''c''.</ref> and ''has_even_y(P)'', or fails if ''x'' is greater than ''p-1'' or no such point exists. The function ''lift_x(x)'' is equivalent to the following pseudocode:
|
||||
*** Fail if ''x ≥ p''.
|
||||
*** Let ''c = x<sup>3</sup> + 7 mod p''.
|
||||
*** Let ''y = c<sup>(p+1)/4</sup> mod p''.
|
||||
*** Fail if ''c ≠ y<sup>2</sup> mod p''.
|
||||
*** Return the unique point ''P'' such that ''x(P) = x'' and ''y(P) = y'' if ''y mod 2 = 0'' or ''y(P) = p-y'' otherwise.
|
||||
** The function ''hash<sub>name</sub>(x)'' where ''x'' is a byte array returns the 32-byte hash ''SHA256(SHA256(tag) || SHA256(tag) || x)'', where ''tag'' is the UTF-8 encoding of ''name''.
|
||||
|
||||
==== Public Key Generation ====
|
||||
|
||||
Input:
|
||||
* The secret key ''sk'': a 32-byte array, freshly generated uniformly at random
|
||||
|
||||
The algorithm ''PubKey(sk)'' is defined as:
|
||||
* Let ''d' = int(sk)''.
|
||||
* Fail if ''d' = 0'' or ''d' ≥ n''.
|
||||
* Return ''bytes(d'⋅G)''.
|
||||
|
||||
Note that we use a very different public key format (32 bytes) than the ones used by existing systems (which typically use elliptic curve points as public keys, or 33-byte or 65-byte encodings of them). A side effect is that ''PubKey(sk) = PubKey(bytes(n - int(sk))'', so every public key has two corresponding secret keys.
|
||||
|
||||
==== Public Key Conversion ====
|
||||
|
||||
As an alternative to generating keys randomly, it is also possible and safe to repurpose existing key generation algorithms for ECDSA in a compatible way. The secret keys constructed by such an algorithm can be used as ''sk'' directly. The public keys constructed by such an algorithm (assuming they use the 33-byte compressed encoding) need to be converted by dropping the first byte. Specifically, [[bip-0032.mediawiki|BIP32]] and schemes built on top of it remain usable.
|
||||
|
||||
==== Default Signing ====
|
||||
|
||||
Input:
|
||||
* The secret key ''sk'': a 32-byte array
|
||||
* The message ''m'': a byte array
|
||||
* Auxiliary random data ''a'': a 32-byte array
|
||||
|
||||
The algorithm ''Sign(sk, m)'' is defined as:
|
||||
* Let ''d' = int(sk)''
|
||||
* Fail if ''d' = 0'' or ''d' ≥ n''
|
||||
* Let ''P = d'⋅G''
|
||||
* Let ''d = d' '' if ''has_even_y(P)'', otherwise let ''d = n - d' ''.
|
||||
* Let ''t'' be the byte-wise xor of ''bytes(d)'' and ''hash<sub>BIP0340/aux</sub>(a)''<ref>The auxiliary random data is hashed (with a unique tag) as a precaution against situations where the randomness may be correlated with the private key itself. It is xored with the private key (rather than combined with it in a hash) to reduce the number of operations exposed to the actual secret key.</ref>.
|
||||
* Let ''rand = hash<sub>BIP0340/nonce</sub>(t || bytes(P) || m)''<ref>Including the [https://moderncrypto.org/mail-archive/curves/2020/001012.html public key as input to the nonce hash] helps ensure the robustness of the signing algorithm by preventing leakage of the secret key if the calculation of the public key ''P'' is performed incorrectly or maliciously, for example if it is left to the caller for performance reasons.</ref>.
|
||||
* Let ''k' = int(rand) mod n''<ref>Note that in general, taking a uniformly random 256-bit integer modulo the curve order will produce an unacceptably biased result. However, for the secp256k1 curve, the order is sufficiently close to ''2<sup>256</sup>'' that this bias is not observable (''1 - n / 2<sup>256</sup>'' is around ''1.27 * 2<sup>-128</sup>'').</ref>.
|
||||
* Fail if ''k' = 0''.
|
||||
* Let ''R = k'⋅G''.
|
||||
* Let ''k = k' '' if ''has_even_y(R)'', otherwise let ''k = n - k' ''.
|
||||
* Let ''e = int(hash<sub>BIP0340/challenge</sub>(bytes(R) || bytes(P) || m)) mod n''.
|
||||
* Let ''sig = bytes(R) || bytes((k + ed) mod n)''.
|
||||
* If ''Verify(bytes(P), m, sig)'' (see below) returns failure, abort<ref>Verifying the signature before leaving the signer prevents random or attacker provoked computation errors. This prevents publishing invalid signatures which may leak information about the secret key. It is recommended, but can be omitted if the computation cost is prohibitive.</ref>.
|
||||
* Return the signature ''sig''.
|
||||
|
||||
The auxiliary random data should be set to fresh randomness generated at signing time, resulting in what is called a ''synthetic nonce''. Using 32 bytes of randomness is optimal. If obtaining randomness is expensive, 16 random bytes can be padded with 16 null bytes to obtain a 32-byte array. If randomness is not available at all at signing time, a simple counter wide enough to not repeat in practice (e.g., 64 bits or wider) and padded with null bytes to a 32 byte-array can be used, or even the constant array with 32 null bytes. Using any non-repeating value increases protection against [https://moderncrypto.org/mail-archive/curves/2017/000925.html fault injection attacks]. Using unpredictable randomness additionally increases protection against other side-channel attacks, and is '''recommended whenever available'''. Note that while this means the resulting nonce is not deterministic, the randomness is only supplemental to security. The normal security properties (excluding side-channel attacks) do not depend on the quality of the signing-time RNG.
|
||||
|
||||
==== Alternative Signing ====
|
||||
|
||||
It should be noted that various alternative signing algorithms can be used to produce equally valid signatures. The 32-byte ''rand'' value may be generated in other ways, producing a different but still valid signature (in other words, this is not a ''unique'' signature scheme). '''No matter which method is used to generate the ''rand'' value, the value must be a fresh uniformly random 32-byte string which is not even partially predictable for the attacker.''' For nonces without randomness this implies that the same inputs must not be presented in another context. This can be most reliably accomplished by not reusing the same private key across different signing schemes. For example, if the ''rand'' value was computed as per RFC6979 and the same secret key is used in deterministic ECDSA with RFC6979, the signatures can leak the secret key through nonce reuse.
|
||||
|
||||
'''Nonce exfiltration protection''' It is possible to strengthen the nonce generation algorithm using a second device. In this case, the second device contributes randomness which the actual signer provably incorporates into its nonce. This prevents certain attacks where the signer device is compromised and intentionally tries to leak the secret key through its nonce selection.
|
||||
|
||||
'''Multisignatures''' This signature scheme is compatible with various types of multisignature and threshold schemes such as [https://eprint.iacr.org/2018/068 MuSig], where a single public key requires holders of multiple secret keys to participate in signing (see Applications below).
|
||||
'''It is important to note that multisignature signing schemes in general are insecure with the ''rand'' generation from the default signing algorithm above (or any other deterministic method).'''
|
||||
|
||||
'''Precomputed public key data''' For many uses the compressed 33-byte encoding of the public key corresponding to the secret key may already be known, making it easy to evaluate ''has_even_y(P)'' and ''bytes(P)''. As such, having signers supply this directly may be more efficient than recalculating the public key from the secret key. However, if this optimization is used and additionally the signature verification at the end of the signing algorithm is dropped for increased efficiency, signers must ensure the public key is correctly calculated and not taken from untrusted sources.
|
||||
|
||||
==== Verification ====
|
||||
|
||||
Input:
|
||||
* The public key ''pk'': a 32-byte array
|
||||
* The message ''m'': a byte array
|
||||
* A signature ''sig'': a 64-byte array
|
||||
|
||||
The algorithm ''Verify(pk, m, sig)'' is defined as:
|
||||
* Let ''P = lift_x(int(pk))''; fail if that fails.
|
||||
* Let ''r = int(sig[0:32])''; fail if ''r ≥ p''.
|
||||
* Let ''s = int(sig[32:64])''; fail if ''s ≥ n''.
|
||||
* Let ''e = int(hash<sub>BIP0340/challenge</sub>(bytes(r) || bytes(P) || m)) mod n''.
|
||||
* Let ''R = s⋅G - e⋅P''.
|
||||
* Fail if ''is_infinite(R)''.
|
||||
* Fail if ''not has_even_y(R)''.
|
||||
* Fail if ''x(R) ≠ r''.
|
||||
* Return success iff no failure occurred before reaching this point.
|
||||
|
||||
For every valid secret key ''sk'' and message ''m'', ''Verify(PubKey(sk),m,Sign(sk,m))'' will succeed.
|
||||
|
||||
Note that the correctness of verification relies on the fact that ''lift_x'' always returns a point with an even Y coordinate. A hypothetical verification algorithm that treats points as public keys, and takes the point ''P'' directly as input would fail any time a point with odd Y is used. While it is possible to correct for this by negating points with odd Y coordinate before further processing, this would result in a scheme where every (message, signature) pair is valid for two public keys (a type of malleability that exists for ECDSA as well, but we don't wish to retain). We avoid these problems by treating just the X coordinate as public key.
|
||||
|
||||
==== Batch Verification ====
|
||||
|
||||
Input:
|
||||
* The number ''u'' of signatures
|
||||
* The public keys ''pk<sub>1..u</sub>'': ''u'' 32-byte arrays
|
||||
* The messages ''m<sub>1..u</sub>'': ''u'' byte arrays
|
||||
* The signatures ''sig<sub>1..u</sub>'': ''u'' 64-byte arrays
|
||||
|
||||
The algorithm ''BatchVerify(pk<sub>1..u</sub>, m<sub>1..u</sub>, sig<sub>1..u</sub>)'' is defined as:
|
||||
* Generate ''u-1'' random integers ''a<sub>2...u</sub>'' in the range ''1...n-1''. They are generated deterministically using a [https://en.wikipedia.org/wiki/Cryptographically_secure_pseudorandom_number_generator CSPRNG] seeded by a cryptographic hash of all inputs of the algorithm, i.e. ''seed = seed_hash(pk<sub>1</sub>..pk<sub>u</sub> || m<sub>1</sub>..m<sub>u</sub> || sig<sub>1</sub>..sig<sub>u</sub> )''. A safe choice is to instantiate ''seed_hash'' with SHA256 and use [https://tools.ietf.org/html/rfc8439 ChaCha20] with key ''seed'' as a CSPRNG to generate 256-bit integers, skipping integers not in the range ''1...n-1''.
|
||||
* For ''i = 1 .. u'':
|
||||
** Let ''P<sub>i</sub> = lift_x(int(pk<sub>i</sub>))''; fail if it fails.
|
||||
** Let ''r<sub>i</sub> = int(sig<sub>i</sub>[0:32])''; fail if ''r<sub>i</sub> ≥ p''.
|
||||
** Let ''s<sub>i</sub> = int(sig<sub>i</sub>[32:64])''; fail if ''s<sub>i</sub> ≥ n''.
|
||||
** Let ''e<sub>i</sub> = int(hash<sub>BIP0340/challenge</sub>(bytes(r<sub>i</sub>) || bytes(P<sub>i</sub>) || m<sub>i</sub>)) mod n''.
|
||||
** Let ''R<sub>i</sub> = lift_x(r<sub>i</sub>)''; fail if ''lift_x(r<sub>i</sub>)'' fails.
|
||||
* Fail if ''(s<sub>1</sub> + a<sub>2</sub>s<sub>2</sub> + ... + a<sub>u</sub>s<sub>u</sub>)⋅G ≠ R<sub>1</sub> + a<sub>2</sub>⋅R<sub>2</sub> + ... + a<sub>u</sub>⋅R<sub>u</sub> + e<sub>1</sub>⋅P<sub>1</sub> + (a<sub>2</sub>e<sub>2</sub>)⋅P<sub>2</sub> + ... + (a<sub>u</sub>e<sub>u</sub>)⋅P<sub>u</sub>''.
|
||||
* Return success iff no failure occurred before reaching this point.
|
||||
|
||||
If all individual signatures are valid (i.e., ''Verify'' would return success for them), ''BatchVerify'' will always return success. If at least one signature is invalid, ''BatchVerify'' will return success with at most a negligible probability.
|
||||
|
||||
=== Usage Considerations ===
|
||||
|
||||
==== Messages of Arbitrary Size ====
|
||||
|
||||
The signature scheme specified in this BIP accepts byte strings of arbitrary size as input messages.<ref>In theory, the message size is restricted due to the fact that SHA256 accepts byte strings only up to size of 2^61-1 bytes.</ref>
|
||||
It is understood that implementations may reject messages which are too large in their environment or application context,
|
||||
e.g., messages which exceed predefined buffers or would otherwise cause resource exhaustion.
|
||||
|
||||
Earlier revisions of this BIP required messages to be exactly 32 bytes.
|
||||
This restriction puts a burden on callers
|
||||
who typically need to perform pre-hashing of the actual input message by feeding it through SHA256 (or another collision-resistant cryptographic hash function)
|
||||
to create a 32-byte digest which can be passed to signing or verification
|
||||
(as for example done in [[bip-0341.mediawiki|BIP341]].)
|
||||
|
||||
Since pre-hashing may not always be desirable,
|
||||
e.g., when actual messages are shorter than 32 bytes,<ref>Another reason to omit pre-hashing is to protect against certain types of cryptanalytic advances against the hash function used for pre-hashing: If pre-hashing is used, an attacker that can find collisions in the pre-hashing function can necessarily forge signatures under chosen-message attacks. If pre-hashing is not used, an attacker that can find collisions in SHA256 (as used inside the signature scheme) may not be able to forge signatures. However, this seeming advantage is mostly irrelevant in the context of Bitcoin, which already relies on collision resistance of SHA256 in other places, e.g., for transaction hashes.</ref>
|
||||
the restriction to 32-byte messages has been lifted.
|
||||
We note that pre-hashing is recommended for performance reasons in applications that deal with large messages.
|
||||
If large messages are not pre-hashed,
|
||||
the algorithms of the signature scheme will perform more hashing internally.
|
||||
In particular, the signing algorithm needs two sequential hashing passes over the message,
|
||||
which means that the full message must necessarily be kept in memory during signing,
|
||||
and large messages entail a runtime penalty.<ref>Typically, messages of 56 bytes or longer enjoy a performance benefit from pre-hashing, assuming the speed of SHA256 inside the signing algorithm matches that of the pre-hashing done by the calling application.</ref>
|
||||
|
||||
==== Domain Separation ====
|
||||
|
||||
It is good cryptographic practice to use a key pair only for a single purpose.
|
||||
Nevertheless, there may be situations in which it may be desirable to use the same key pair in multiple contexts,
|
||||
i.e., to sign different types of messages within the same application
|
||||
or even messages in entirely different applications
|
||||
(e.g., a secret key may be used to sign Bitcoin transactions as well plain text messages).
|
||||
|
||||
As a consequence, applications should ensure that a signed application message intended for one context is never deemed valid in a different context
|
||||
(e.g., a signed plain text message should never be misinterpreted as a signed Bitcoin transaction, because this could cause unintended loss of funds).
|
||||
This is called "domain separation" and it is typically realized by partitioning the message space.
|
||||
Even if key pairs are intended to be used only within a single context,
|
||||
domain separation is a good idea because it makes it easy to add more contexts later.
|
||||
|
||||
As a best practice, we recommend applications to use exactly one of the following methods to pre-process application messages before passing it to the signature scheme:
|
||||
* Either, pre-hash the application message using ''hash<sub>name</sub>'', where ''name'' identifies the context uniquely (e.g., "foo-app/signed-bar"),
|
||||
* or prefix the actual message with a 33-byte string that identifies the context uniquely (e.g., the UTF-8 encoding of "foo-app/signed-bar", padded with null bytes to 33 bytes).
|
||||
|
||||
As the two pre-processing methods yield different message sizes (32 bytes vs. at least 33 bytes), there is no risk of collision between them.
|
||||
|
||||
== Applications ==
|
||||
|
||||
There are several interesting applications beyond simple signatures.
|
||||
While recent academic papers claim that they are also possible with ECDSA, consensus support for Schnorr signature verification would significantly simplify the constructions.
|
||||
|
||||
=== Multisignatures and Threshold Signatures ===
|
||||
|
||||
By means of an interactive scheme such as [https://eprint.iacr.org/2018/068 MuSig], participants can aggregate their public keys into a single public key which they can jointly sign for. This allows ''n''-of-''n'' multisignatures which, from a verifier's perspective, are no different from ordinary signatures, giving improved privacy and efficiency versus ''CHECKMULTISIG'' or other means.
|
||||
|
||||
Moreover, Schnorr signatures are compatible with [https://web.archive.org/web/20031003232851/http://www.research.ibm.com/security/dkg.ps distributed key generation], which enables interactive threshold signatures schemes, e.g., the schemes described by [http://cacr.uwaterloo.ca/techreports/2001/corr2001-13.ps Stinson and Strobl (2001)] or [https://web.archive.org/web/20060911151529/http://theory.lcs.mit.edu/~stasio/Papers/gjkr03.pdf Gennaro, Jarecki and Krawczyk (2003)]. These protocols make it possible to realize ''k''-of-''n'' threshold signatures, which ensure that any subset of size ''k'' of the set of ''n'' signers can sign but no subset of size less than ''k'' can produce a valid Schnorr signature. However, the practicality of the existing schemes is limited: most schemes in the literature have been proven secure only for the case ''k-1 < n/2'', are not secure when used concurrently in multiple sessions, or require a reliable broadcast mechanism to be secure. Further research is necessary to improve this situation.
|
||||
|
||||
=== Adaptor Signatures ===
|
||||
|
||||
[https://download.wpsoftware.net/bitcoin/wizardry/mw-slides/2018-05-18-l2/slides.pdf Adaptor signatures] can be produced by a signer by offsetting his public nonce ''R'' with a known point ''T = t⋅G'', but not offsetting the signature's ''s'' value.
|
||||
A correct signature (or partial signature, as individual signers' contributions to a multisignature are called) on the same message with same nonce will then be equal to the adaptor signature offset by ''t'', meaning that learning ''t'' is equivalent to learning a correct signature.
|
||||
This can be used to enable atomic swaps or even [https://eprint.iacr.org/2018/472 general payment channels] in which the atomicity of disjoint transactions is ensured using the signatures themselves, rather than Bitcoin script support. The resulting transactions will appear to verifiers to be no different from ordinary single-signer transactions, except perhaps for the inclusion of locktime refund logic.
|
||||
|
||||
Adaptor signatures, beyond the efficiency and privacy benefits of encoding script semantics into constant-sized signatures, have additional benefits over traditional hash-based payment channels. Specifically, the secret values ''t'' may be reblinded between hops, allowing long chains of transactions to be made atomic while even the participants cannot identify which transactions are part of the chain. Also, because the secret values are chosen at signing time, rather than key generation time, existing outputs may be repurposed for different applications without recourse to the blockchain, even multiple times.
|
||||
|
||||
=== Blind Signatures ===
|
||||
|
||||
A blind signature protocol is an interactive protocol that enables a signer to sign a message at the behest of another party without learning any information about the signed message or the signature. Schnorr signatures admit a very [http://publikationen.ub.uni-frankfurt.de/files/4292/schnorr.blind_sigs_attack.2001.pdf simple blind signature scheme] which is however insecure because it's vulnerable to [https://www.iacr.org/archive/crypto2002/24420288/24420288.pdf Wagner's attack]. A known mitigation is to let the signer abort a signing session with a certain probability, and the resulting scheme can be [https://eprint.iacr.org/2019/877 proven secure under non-standard cryptographic assumptions].
|
||||
|
||||
Blind Schnorr signatures could for example be used in [https://github.com/ElementsProject/scriptless-scripts/blob/master/md/partially-blind-swap.md Partially Blind Atomic Swaps], a construction to enable transferring of coins, mediated by an untrusted escrow agent, without connecting the transactors in the public blockchain transaction graph.
|
||||
|
||||
== Test Vectors and Reference Code ==
|
||||
|
||||
For development and testing purposes, we provide a [[bip-0340/test-vectors.csv|collection of test vectors in CSV format]] and a naive, highly inefficient, and non-constant time [[bip-0340/reference.py|pure Python 3.7 reference implementation of the signing and verification algorithm]].
|
||||
The reference implementation is for demonstration purposes only and not to be used in production environments.
|
||||
|
||||
== Changelog ==
|
||||
|
||||
To help implementors understand updates to this BIP, we keep a list of substantial changes.
|
||||
|
||||
* 2022-08: Fix function signature of lift_x in reference code
|
||||
* 2023-04: Allow messages of arbitrary size
|
||||
|
||||
== Footnotes ==
|
||||
|
||||
<references />
|
||||
|
||||
== Acknowledgements ==
|
||||
|
||||
This document is the result of many discussions around Schnorr based signatures over the years, and had input from Johnson Lau, Greg Maxwell, Andrew Poelstra, Rusty Russell, and Anthony Towns. The authors further wish to thank all those who provided valuable feedback and reviews, including the participants of the [https://github.com/ajtowns/taproot-review structured reviews].
|
||||
244
pkg/crypto/ec/schnorr/bip/bip340/reference.py
Normal file
244
pkg/crypto/ec/schnorr/bip/bip340/reference.py
Normal file
@@ -0,0 +1,244 @@
|
||||
from typing import Tuple, Optional, Any
|
||||
import hashlib
|
||||
import binascii
|
||||
|
||||
# Set DEBUG to True to get a detailed debug output including
|
||||
# intermediate values during key generation, signing, and
|
||||
# verification. This is implemented via calls to the
|
||||
# debug_print_vars() function.
|
||||
#
|
||||
# If you want to print values on an individual basis, use
|
||||
# the pretty() function, e.g., print(pretty(foo)).
|
||||
DEBUG = False
|
||||
|
||||
p = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F
|
||||
n = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
|
||||
|
||||
# Points are tuples of X and Y coordinates and the point at infinity is
|
||||
# represented by the None keyword.
|
||||
G = (0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798,
|
||||
0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8)
|
||||
|
||||
Point = Tuple[int, int]
|
||||
|
||||
|
||||
# This implementation can be sped up by storing the midstate after hashing
|
||||
# tag_hash instead of rehashing it all the time.
|
||||
def tagged_hash(tag: str, msg: bytes) -> bytes:
|
||||
tag_hash = hashlib.sha256(tag.encode()).digest()
|
||||
return hashlib.sha256(tag_hash + tag_hash + msg).digest()
|
||||
|
||||
|
||||
def is_infinite(P: Optional[Point]) -> bool:
|
||||
return P is None
|
||||
|
||||
|
||||
def x(P: Point) -> int:
|
||||
assert not is_infinite(P)
|
||||
return P[0]
|
||||
|
||||
|
||||
def y(P: Point) -> int:
|
||||
assert not is_infinite(P)
|
||||
return P[1]
|
||||
|
||||
|
||||
def point_add(P1: Optional[Point], P2: Optional[Point]) -> Optional[Point]:
|
||||
if P1 is None:
|
||||
return P2
|
||||
if P2 is None:
|
||||
return P1
|
||||
if (x(P1) == x(P2)) and (y(P1) != y(P2)):
|
||||
return None
|
||||
if P1 == P2:
|
||||
lam = (3 * x(P1) * x(P1) * pow(2 * y(P1), p - 2, p)) % p
|
||||
else:
|
||||
lam = ((y(P2) - y(P1)) * pow(x(P2) - x(P1), p - 2, p)) % p
|
||||
x3 = (lam * lam - x(P1) - x(P2)) % p
|
||||
return (x3, (lam * (x(P1) - x3) - y(P1)) % p)
|
||||
|
||||
|
||||
def point_mul(P: Optional[Point], n: int) -> Optional[Point]:
|
||||
R = None
|
||||
for i in range(256):
|
||||
if (n >> i) & 1:
|
||||
R = point_add(R, P)
|
||||
P = point_add(P, P)
|
||||
return R
|
||||
|
||||
|
||||
def bytes_from_int(x: int) -> bytes:
|
||||
return x.to_bytes(32, byteorder="big")
|
||||
|
||||
|
||||
def bytes_from_point(P: Point) -> bytes:
|
||||
return bytes_from_int(x(P))
|
||||
|
||||
|
||||
def xor_bytes(b0: bytes, b1: bytes) -> bytes:
|
||||
return bytes(x ^ y for (x, y) in zip(b0, b1))
|
||||
|
||||
|
||||
def lift_x(x: int) -> Optional[Point]:
|
||||
if x >= p:
|
||||
return None
|
||||
y_sq = (pow(x, 3, p) + 7) % p
|
||||
y = pow(y_sq, (p + 1) // 4, p)
|
||||
if pow(y, 2, p) != y_sq:
|
||||
return None
|
||||
return (x, y if y & 1 == 0 else p - y)
|
||||
|
||||
|
||||
def int_from_bytes(b: bytes) -> int:
|
||||
return int.from_bytes(b, byteorder="big")
|
||||
|
||||
|
||||
def hash_sha256(b: bytes) -> bytes:
|
||||
return hashlib.sha256(b).digest()
|
||||
|
||||
|
||||
def has_even_y(P: Point) -> bool:
|
||||
assert not is_infinite(P)
|
||||
return y(P) % 2 == 0
|
||||
|
||||
|
||||
def pubkey_gen(seckey: bytes) -> bytes:
|
||||
d0 = int_from_bytes(seckey)
|
||||
if not (1 <= d0 <= n - 1):
|
||||
raise ValueError('The secret key must be an integer in the range 1..n-1.')
|
||||
P = point_mul(G, d0)
|
||||
assert P is not None
|
||||
return bytes_from_point(P)
|
||||
|
||||
|
||||
def schnorr_sign(msg: bytes, seckey: bytes, aux_rand: bytes) -> bytes:
|
||||
d0 = int_from_bytes(seckey)
|
||||
if not (1 <= d0 <= n - 1):
|
||||
raise ValueError('The secret key must be an integer in the range 1..n-1.')
|
||||
if len(aux_rand) != 32:
|
||||
raise ValueError('aux_rand must be 32 bytes instead of %i.' % len(aux_rand))
|
||||
P = point_mul(G, d0)
|
||||
assert P is not None
|
||||
d = d0 if has_even_y(P) else n - d0
|
||||
t = xor_bytes(bytes_from_int(d), tagged_hash("BIP0340/aux", aux_rand))
|
||||
k0 = int_from_bytes(tagged_hash("BIP0340/nonce", t + bytes_from_point(P) + msg)) % n
|
||||
if k0 == 0:
|
||||
raise RuntimeError('Failure. This happens only with negligible probability.')
|
||||
R = point_mul(G, k0)
|
||||
assert R is not None
|
||||
k = n - k0 if not has_even_y(R) else k0
|
||||
e = int_from_bytes(tagged_hash("BIP0340/challenge", bytes_from_point(R) + bytes_from_point(P) + msg)) % n
|
||||
sig = bytes_from_point(R) + bytes_from_int((k + e * d) % n)
|
||||
debug_print_vars()
|
||||
if not schnorr_verify(msg, bytes_from_point(P), sig):
|
||||
raise RuntimeError('The created signature does not pass verification.')
|
||||
return sig
|
||||
|
||||
|
||||
def schnorr_verify(msg: bytes, pubkey: bytes, sig: bytes) -> bool:
|
||||
if len(pubkey) != 32:
|
||||
raise ValueError('The public key must be a 32-byte array.')
|
||||
if len(sig) != 64:
|
||||
raise ValueError('The signature must be a 64-byte array.')
|
||||
P = lift_x(int_from_bytes(pubkey))
|
||||
r = int_from_bytes(sig[0:32])
|
||||
s = int_from_bytes(sig[32:64])
|
||||
if (P is None) or (r >= p) or (s >= n):
|
||||
debug_print_vars()
|
||||
return False
|
||||
e = int_from_bytes(tagged_hash("BIP0340/challenge", sig[0:32] + pubkey + msg)) % n
|
||||
R = point_add(point_mul(G, s), point_mul(P, n - e))
|
||||
if (R is None) or (not has_even_y(R)) or (x(R) != r):
|
||||
debug_print_vars()
|
||||
return False
|
||||
debug_print_vars()
|
||||
return True
|
||||
|
||||
|
||||
#
|
||||
# The following code is only used to verify the test vectors.
|
||||
#
|
||||
import csv
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def test_vectors() -> bool:
|
||||
all_passed = True
|
||||
with open(os.path.join(sys.path[0], 'test-vectors.csv'), newline='') as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
reader.__next__()
|
||||
for row in reader:
|
||||
(index, seckey_hex, pubkey_hex, aux_rand_hex, msg_hex, sig_hex, result_str, comment) = row
|
||||
pubkey = bytes.fromhex(pubkey_hex)
|
||||
msg = bytes.fromhex(msg_hex)
|
||||
sig = bytes.fromhex(sig_hex)
|
||||
result = result_str == 'TRUE'
|
||||
print('\nTest vector', ('#' + index).rjust(3, ' ') + ':')
|
||||
if seckey_hex != '':
|
||||
seckey = bytes.fromhex(seckey_hex)
|
||||
pubkey_actual = pubkey_gen(seckey)
|
||||
if pubkey != pubkey_actual:
|
||||
print(' * Failed key generation.')
|
||||
print(' Expected key:', pubkey.hex().upper())
|
||||
print(' Actual key:', pubkey_actual.hex().upper())
|
||||
aux_rand = bytes.fromhex(aux_rand_hex)
|
||||
try:
|
||||
sig_actual = schnorr_sign(msg, seckey, aux_rand)
|
||||
if sig == sig_actual:
|
||||
print(' * Passed signing test.')
|
||||
else:
|
||||
print(' * Failed signing test.')
|
||||
print(' Expected signature:', sig.hex().upper())
|
||||
print(' Actual signature:', sig_actual.hex().upper())
|
||||
all_passed = False
|
||||
except RuntimeError as e:
|
||||
print(' * Signing test raised exception:', e)
|
||||
all_passed = False
|
||||
result_actual = schnorr_verify(msg, pubkey, sig)
|
||||
if result == result_actual:
|
||||
print(' * Passed verification test.')
|
||||
else:
|
||||
print(' * Failed verification test.')
|
||||
print(' Expected verification result:', result)
|
||||
print(' Actual verification result:', result_actual)
|
||||
if comment:
|
||||
print(' Comment:', comment)
|
||||
all_passed = False
|
||||
print()
|
||||
if all_passed:
|
||||
print('All test vectors passed.')
|
||||
else:
|
||||
print('Some test vectors failed.')
|
||||
return all_passed
|
||||
|
||||
|
||||
#
|
||||
# The following code is only used for debugging
|
||||
#
|
||||
import inspect
|
||||
|
||||
|
||||
def pretty(v: Any) -> Any:
|
||||
if isinstance(v, bytes):
|
||||
return '0x' + v.hex()
|
||||
if isinstance(v, int):
|
||||
return pretty(bytes_from_int(v))
|
||||
if isinstance(v, tuple):
|
||||
return tuple(map(pretty, v))
|
||||
return v
|
||||
|
||||
|
||||
def debug_print_vars() -> None:
|
||||
if DEBUG:
|
||||
current_frame = inspect.currentframe()
|
||||
assert current_frame is not None
|
||||
frame = current_frame.f_back
|
||||
assert frame is not None
|
||||
print(' Variables in function ', frame.f_code.co_name, ' at line ', frame.f_lineno, ':', sep='')
|
||||
for var_name, var_val in frame.f_locals.items():
|
||||
print(' ' + var_name.rjust(11, ' '), '==', pretty(var_val))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_vectors()
|
||||
20
pkg/crypto/ec/schnorr/bip/bip340/test-vectors.csv
Normal file
20
pkg/crypto/ec/schnorr/bip/bip340/test-vectors.csv
Normal file
@@ -0,0 +1,20 @@
|
||||
index,secret key,public key,aux_rand,message,signature,verification result,comment
|
||||
0,0000000000000000000000000000000000000000000000000000000000000003,F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9,0000000000000000000000000000000000000000000000000000000000000000,0000000000000000000000000000000000000000000000000000000000000000,E907831F80848D1069A5371B402410364BDF1C5F8307B0084C55F1CE2DCA821525F66A4A85EA8B71E482A74F382D2CE5EBEEE8FDB2172F477DF4900D310536C0,TRUE,
|
||||
1,B7E151628AED2A6ABF7158809CF4F3C762E7160F38B4DA56A784D9045190CFEF,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,0000000000000000000000000000000000000000000000000000000000000001,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6896BD60EEAE296DB48A229FF71DFE071BDE413E6D43F917DC8DCF8C78DE33418906D11AC976ABCCB20B091292BFF4EA897EFCB639EA871CFA95F6DE339E4B0A,TRUE,
|
||||
2,C90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B14E5C9,DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8,C87AA53824B4D7AE2EB035A2B5BBBCCC080E76CDC6D1692C4B0B62D798E6D906,7E2D58D8B3BCDF1ABADEC7829054F90DDA9805AAB56C77333024B9D0A508B75C,5831AAEED7B44BB74E5EAB94BA9D4294C49BCF2A60728D8B4C200F50DD313C1BAB745879A5AD954A72C45A91C3A51D3C7ADEA98D82F8481E0E1E03674A6F3FB7,TRUE,
|
||||
3,0B432B2677937381AEF05BB02A66ECD012773062CF3FA2549E44F58ED2401710,25D1DFF95105F5253C4022F628A996AD3A0D95FBF21D468A1B33F8C160D8F517,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,7EB0509757E246F19449885651611CB965ECC1A187DD51B64FDA1EDC9637D5EC97582B9CB13DB3933705B32BA982AF5AF25FD78881EBB32771FC5922EFC66EA3,TRUE,test fails if msg is reduced modulo p or n
|
||||
4,,D69C3509BB99E412E68B0FE8544E72837DFA30746D8BE2AA65975F29D22DC7B9,,4DF3C3F68FCC83B27E9D42C90431A72499F17875C81A599B566C9889B9696703,00000000000000000000003B78CE563F89A0ED9414F5AA28AD0D96D6795F9C6376AFB1548AF603B3EB45C9F8207DEE1060CB71C04E80F593060B07D28308D7F4,TRUE,
|
||||
5,,EEFDEA4CDB677750A420FEE807EACF21EB9898AE79B9768766E4FAA04A2D4A34,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,public key not on the curve
|
||||
6,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,FFF97BD5755EEEA420453A14355235D382F6472F8568A18B2F057A14602975563CC27944640AC607CD107AE10923D9EF7A73C643E166BE5EBEAFA34B1AC553E2,FALSE,has_even_y(R) is false
|
||||
7,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,1FA62E331EDBC21C394792D2AB1100A7B432B013DF3F6FF4F99FCB33E0E1515F28890B3EDB6E7189B630448B515CE4F8622A954CFE545735AAEA5134FCCDB2BD,FALSE,negated message
|
||||
8,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769961764B3AA9B2FFCB6EF947B6887A226E8D7C93E00C5ED0C1834FF0D0C2E6DA6,FALSE,negated s value
|
||||
9,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,0000000000000000000000000000000000000000000000000000000000000000123DDA8328AF9C23A94C1FEECFD123BA4FB73476F0D594DCB65C6425BD186051,FALSE,sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 0
|
||||
10,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,00000000000000000000000000000000000000000000000000000000000000017615FBAF5AE28864013C099742DEADB4DBA87F11AC6754F93780D5A1837CF197,FALSE,sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 1
|
||||
11,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,4A298DACAE57395A15D0795DDBFD1DCB564DA82B0F269BC70A74F8220429BA1D69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,sig[0:32] is not an X coordinate on the curve
|
||||
12,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,sig[0:32] is equal to field size
|
||||
13,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141,FALSE,sig[32:64] is equal to curve order
|
||||
14,,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,public key is not a valid X coordinate because it exceeds the field size
|
||||
15,0340034003400340034003400340034003400340034003400340034003400340,778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117,0000000000000000000000000000000000000000000000000000000000000000,,71535DB165ECD9FBBC046E5FFAEA61186BB6AD436732FCCC25291A55895464CF6069CE26BF03466228F19A3A62DB8A649F2D560FAC652827D1AF0574E427AB63,TRUE,message of size 0 (added 2022-12)
|
||||
16,0340034003400340034003400340034003400340034003400340034003400340,778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117,0000000000000000000000000000000000000000000000000000000000000000,11,08A20A0AFEF64124649232E0693C583AB1B9934AE63B4C3511F3AE1134C6A303EA3173BFEA6683BD101FA5AA5DBC1996FE7CACFC5A577D33EC14564CEC2BACBF,TRUE,message of size 1 (added 2022-12)
|
||||
17,0340034003400340034003400340034003400340034003400340034003400340,778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117,0000000000000000000000000000000000000000000000000000000000000000,0102030405060708090A0B0C0D0E0F1011,5130F39A4059B43BC7CAC09A19ECE52B5D8699D1A71E3C52DA9AFDB6B50AC370C4A482B77BF960F8681540E25B6771ECE1E5A37FD80E5A51897C5566A97EA5A5,TRUE,message of size 17 (added 2022-12)
|
||||
18,0340034003400340034003400340034003400340034003400340034003400340,778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117,0000000000000000000000000000000000000000000000000000000000000000,99999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999,403B12B0D8555A344175EA7EC746566303321E5DBFA8BE6F091635163ECA79A8585ED3E3170807E7C03B720FC54C7B23897FCBA0E9D0B4A06894CFD249F22367,TRUE,message of size 100 (added 2022-12)
|
||||
|
335
pkg/crypto/ec/schnorr/bip/bip340/test-vectors.py
Normal file
335
pkg/crypto/ec/schnorr/bip/bip340/test-vectors.py
Normal file
@@ -0,0 +1,335 @@
|
||||
import sys
|
||||
from reference import *
|
||||
|
||||
|
||||
def is_square(x):
|
||||
return int(pow(x, (p - 1) // 2, p)) == 1
|
||||
|
||||
|
||||
def has_square_y(P):
|
||||
"""Determine if P has a square Y coordinate. Used in an earlier draft of BIP340."""
|
||||
assert not is_infinite(P)
|
||||
return is_square(P[1])
|
||||
|
||||
|
||||
def vector0():
|
||||
seckey = bytes_from_int(3)
|
||||
msg = bytes_from_int(0)
|
||||
aux_rand = bytes_from_int(0)
|
||||
sig = schnorr_sign(msg, seckey, aux_rand)
|
||||
pubkey = pubkey_gen(seckey)
|
||||
|
||||
# We should have at least one test vector where the seckey needs to be
|
||||
# negated and one where it doesn't. In this one the seckey doesn't need to
|
||||
# be negated.
|
||||
x = int_from_bytes(seckey)
|
||||
P = point_mul(G, x)
|
||||
assert (y(P) % 2 == 0)
|
||||
|
||||
# For historical reasons (pubkey tiebreaker was squareness and not evenness)
|
||||
# we should have at least one test vector where the the point reconstructed
|
||||
# from the public key has a square and one where it has a non-square Y
|
||||
# coordinate. In this one Y is non-square.
|
||||
pubkey_point = lift_x(pubkey)
|
||||
assert (not has_square_y(pubkey_point))
|
||||
|
||||
# For historical reasons (R tiebreaker was squareness and not evenness)
|
||||
# we should have at least one test vector where the the point reconstructed
|
||||
# from the R.x coordinate has a square and one where it has a non-square Y
|
||||
# coordinate. In this one Y is non-square.
|
||||
R = lift_x(sig[0:32])
|
||||
assert (not has_square_y(R))
|
||||
|
||||
return seckey, pubkey, aux_rand, msg, sig, "TRUE", None
|
||||
|
||||
|
||||
def vector1():
|
||||
seckey = bytes_from_int(0xB7E151628AED2A6ABF7158809CF4F3C762E7160F38B4DA56A784D9045190CFEF)
|
||||
msg = bytes_from_int(0x243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89)
|
||||
aux_rand = bytes_from_int(1)
|
||||
|
||||
sig = schnorr_sign(msg, seckey, aux_rand)
|
||||
|
||||
# The point reconstructed from the R.x coordinate has a square Y coordinate.
|
||||
R = lift_x(sig[0:32])
|
||||
assert (has_square_y(R))
|
||||
|
||||
return seckey, pubkey_gen(seckey), aux_rand, msg, sig, "TRUE", None
|
||||
|
||||
|
||||
def vector2():
|
||||
seckey = bytes_from_int(0xC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B14E5C9)
|
||||
msg = bytes_from_int(0x7E2D58D8B3BCDF1ABADEC7829054F90DDA9805AAB56C77333024B9D0A508B75C)
|
||||
aux_rand = bytes_from_int(0xC87AA53824B4D7AE2EB035A2B5BBBCCC080E76CDC6D1692C4B0B62D798E6D906)
|
||||
sig = schnorr_sign(msg, seckey, aux_rand)
|
||||
|
||||
# The point reconstructed from the public key has a square Y coordinate.
|
||||
pubkey = pubkey_gen(seckey)
|
||||
pubkey_point = lift_x(pubkey)
|
||||
assert (has_square_y(pubkey_point))
|
||||
|
||||
# This signature vector would not verify if the implementer checked the
|
||||
# evenness of the X coordinate of R instead of the Y coordinate.
|
||||
R = lift_x(sig[0:32])
|
||||
assert (R[0] % 2 == 1)
|
||||
|
||||
return seckey, pubkey, aux_rand, msg, sig, "TRUE", None
|
||||
|
||||
|
||||
def vector3():
|
||||
seckey = bytes_from_int(0x0B432B2677937381AEF05BB02A66ECD012773062CF3FA2549E44F58ED2401710)
|
||||
|
||||
# Need to negate this seckey before signing
|
||||
x = int_from_bytes(seckey)
|
||||
P = point_mul(G, x)
|
||||
assert (y(P) % 2 != 0)
|
||||
|
||||
msg = bytes_from_int(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF)
|
||||
aux_rand = bytes_from_int(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF)
|
||||
|
||||
sig = schnorr_sign(msg, seckey, aux_rand)
|
||||
return seckey, pubkey_gen(seckey), aux_rand, msg, sig, "TRUE", "test fails if msg is reduced modulo p or n"
|
||||
|
||||
|
||||
# Signs with a given nonce. This can be INSECURE and is only INTENDED FOR
|
||||
# GENERATING TEST VECTORS. Results in an invalid signature if y(kG) is not
|
||||
# even.
|
||||
def insecure_schnorr_sign_fixed_nonce(msg, seckey0, k):
|
||||
if len(msg) != 32:
|
||||
raise ValueError('The message must be a 32-byte array.')
|
||||
seckey0 = int_from_bytes(seckey0)
|
||||
if not (1 <= seckey0 <= n - 1):
|
||||
raise ValueError('The secret key must be an integer in the range 1..n-1.')
|
||||
P = point_mul(G, seckey0)
|
||||
seckey = seckey0 if has_even_y(P) else n - seckey0
|
||||
R = point_mul(G, k)
|
||||
e = int_from_bytes(tagged_hash("BIP0340/challenge", bytes_from_point(R) + bytes_from_point(P) + msg)) % n
|
||||
return bytes_from_point(R) + bytes_from_int((k + e * seckey) % n)
|
||||
|
||||
|
||||
# Creates a singature with a small x(R) by using k = -1/2
|
||||
def vector4():
|
||||
one_half = n - 0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0
|
||||
seckey = bytes_from_int(0x763758E5CBEEDEE4F7D3FC86F531C36578933228998226672F13C4F0EBE855EB)
|
||||
msg = bytes_from_int(0x4DF3C3F68FCC83B27E9D42C90431A72499F17875C81A599B566C9889B9696703)
|
||||
sig = insecure_schnorr_sign_fixed_nonce(msg, seckey, one_half)
|
||||
return None, pubkey_gen(seckey), None, msg, sig, "TRUE", None
|
||||
|
||||
|
||||
default_seckey = bytes_from_int(0xB7E151628AED2A6ABF7158809CF4F3C762E7160F38B4DA56A784D9045190CFEF)
|
||||
default_msg = bytes_from_int(0x243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89)
|
||||
default_aux_rand = bytes_from_int(0xC87AA53824B4D7AE2EB035A2B5BBBCCC080E76CDC6D1692C4B0B62D798E6D906)
|
||||
|
||||
|
||||
# Public key is not on the curve
|
||||
def vector5():
|
||||
# This creates a dummy signature that doesn't have anything to do with the
|
||||
# public key.
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
sig = schnorr_sign(msg, seckey, default_aux_rand)
|
||||
|
||||
pubkey = bytes_from_int(0xEEFDEA4CDB677750A420FEE807EACF21EB9898AE79B9768766E4FAA04A2D4A34)
|
||||
assert (lift_x(pubkey) is None)
|
||||
|
||||
return None, pubkey, None, msg, sig, "FALSE", "public key not on the curve"
|
||||
|
||||
|
||||
def vector6():
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
k = 6
|
||||
sig = insecure_schnorr_sign_fixed_nonce(msg, seckey, k)
|
||||
|
||||
# Y coordinate of R is not even
|
||||
R = point_mul(G, k)
|
||||
assert (not has_even_y(R))
|
||||
|
||||
return None, pubkey_gen(seckey), None, msg, sig, "FALSE", "has_even_y(R) is false"
|
||||
|
||||
|
||||
def vector7():
|
||||
seckey = default_seckey
|
||||
msg = int_from_bytes(default_msg)
|
||||
neg_msg = bytes_from_int(n - msg)
|
||||
sig = schnorr_sign(neg_msg, seckey, default_aux_rand)
|
||||
return None, pubkey_gen(seckey), None, bytes_from_int(msg), sig, "FALSE", "negated message"
|
||||
|
||||
|
||||
def vector8():
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
sig = schnorr_sign(msg, seckey, default_aux_rand)
|
||||
sig = sig[0:32] + bytes_from_int(n - int_from_bytes(sig[32:64]))
|
||||
return None, pubkey_gen(seckey), None, msg, sig, "FALSE", "negated s value"
|
||||
|
||||
|
||||
def bytes_from_point_inf0(P):
|
||||
if P is None:
|
||||
return bytes_from_int(0)
|
||||
return bytes_from_int(P[0])
|
||||
|
||||
|
||||
def vector9():
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
|
||||
# Override bytes_from_point in schnorr_sign to allow creating a signature
|
||||
# with k = 0.
|
||||
k = 0
|
||||
bytes_from_point_tmp = bytes_from_point.__code__
|
||||
bytes_from_point.__code__ = bytes_from_point_inf0.__code__
|
||||
sig = insecure_schnorr_sign_fixed_nonce(msg, seckey, k)
|
||||
bytes_from_point.__code__ = bytes_from_point_tmp
|
||||
|
||||
return (None, pubkey_gen(seckey), None, msg, sig, "FALSE",
|
||||
"sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 0")
|
||||
|
||||
|
||||
def bytes_from_point_inf1(P):
|
||||
if P == None:
|
||||
return bytes_from_int(1)
|
||||
return bytes_from_int(P[0])
|
||||
|
||||
|
||||
def vector10():
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
|
||||
# Override bytes_from_point in schnorr_sign to allow creating a signature
|
||||
# with k = 0.
|
||||
k = 0
|
||||
bytes_from_point_tmp = bytes_from_point.__code__
|
||||
bytes_from_point.__code__ = bytes_from_point_inf1.__code__
|
||||
sig = insecure_schnorr_sign_fixed_nonce(msg, seckey, k)
|
||||
bytes_from_point.__code__ = bytes_from_point_tmp
|
||||
|
||||
return (None, pubkey_gen(seckey), None, msg, sig, "FALSE",
|
||||
"sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 1")
|
||||
|
||||
|
||||
# It's cryptographically impossible to create a test vector that fails if run
|
||||
# in an implementation which merely misses the check that sig[0:32] is an X
|
||||
# coordinate on the curve. This test vector just increases test coverage.
|
||||
def vector11():
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
sig = schnorr_sign(msg, seckey, default_aux_rand)
|
||||
|
||||
# Replace R's X coordinate with an X coordinate that's not on the curve
|
||||
x_not_on_curve = bytes_from_int(0x4A298DACAE57395A15D0795DDBFD1DCB564DA82B0F269BC70A74F8220429BA1D)
|
||||
assert (lift_x(x_not_on_curve) is None)
|
||||
sig = x_not_on_curve + sig[32:64]
|
||||
|
||||
return None, pubkey_gen(seckey), None, msg, sig, "FALSE", "sig[0:32] is not an X coordinate on the curve"
|
||||
|
||||
|
||||
# It's cryptographically impossible to create a test vector that fails if run
|
||||
# in an implementation which merely misses the check that sig[0:32] is smaller
|
||||
# than the field size. This test vector just increases test coverage.
|
||||
def vector12():
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
sig = schnorr_sign(msg, seckey, default_aux_rand)
|
||||
|
||||
# Replace R's X coordinate with an X coordinate that's equal to field size
|
||||
sig = bytes_from_int(p) + sig[32:64]
|
||||
|
||||
return None, pubkey_gen(seckey), None, msg, sig, "FALSE", "sig[0:32] is equal to field size"
|
||||
|
||||
|
||||
# It's cryptographically impossible to create a test vector that fails if run
|
||||
# in an implementation which merely misses the check that sig[32:64] is smaller
|
||||
# than the curve order. This test vector just increases test coverage.
|
||||
def vector13():
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
sig = schnorr_sign(msg, seckey, default_aux_rand)
|
||||
|
||||
# Replace s with a number that's equal to the curve order
|
||||
sig = sig[0:32] + bytes_from_int(n)
|
||||
|
||||
return None, pubkey_gen(seckey), None, msg, sig, "FALSE", "sig[32:64] is equal to curve order"
|
||||
|
||||
|
||||
# Test out of range pubkey
|
||||
# It's cryptographically impossible to create a test vector that fails if run
|
||||
# in an implementation which accepts out of range pubkeys because we can't find
|
||||
# a secret key for such a public key and therefore can not create a signature.
|
||||
# This test vector just increases test coverage.
|
||||
def vector14():
|
||||
# This creates a dummy signature that doesn't have anything to do with the
|
||||
# public key.
|
||||
seckey = default_seckey
|
||||
msg = default_msg
|
||||
sig = schnorr_sign(msg, seckey, default_aux_rand)
|
||||
pubkey_int = p + 1
|
||||
pubkey = bytes_from_int(pubkey_int)
|
||||
assert (lift_x(pubkey) is None)
|
||||
# If an implementation would reduce a given public key modulo p then the
|
||||
# pubkey would be valid
|
||||
assert (lift_x(bytes_from_int(pubkey_int % p)) is not None)
|
||||
|
||||
return (
|
||||
None, pubkey, None, msg, sig, "FALSE", "public key is not a valid X coordinate because it exceeds the field size")
|
||||
|
||||
|
||||
def varlen_vector(msg_int):
|
||||
seckey = bytes_from_int(int(16 * "0340", 16))
|
||||
pubkey = pubkey_gen(seckey)
|
||||
aux_rand = bytes_from_int(0)
|
||||
msg = msg_int.to_bytes((msg_int.bit_length() + 7) // 8, "big")
|
||||
sig = schnorr_sign(msg, seckey, aux_rand)
|
||||
comment = "message of size %d (added 2022-12)"
|
||||
return seckey, pubkey, aux_rand, msg, sig, "TRUE", comment % len(msg)
|
||||
|
||||
|
||||
vector15 = lambda: varlen_vector(0)
|
||||
vector16 = lambda: varlen_vector(0x11)
|
||||
vector17 = lambda: varlen_vector(0x0102030405060708090A0B0C0D0E0F1011)
|
||||
vector18 = lambda: varlen_vector(int(100 * "99", 16))
|
||||
|
||||
vectors = [
|
||||
vector0(),
|
||||
vector1(),
|
||||
vector2(),
|
||||
vector3(),
|
||||
vector4(),
|
||||
vector5(),
|
||||
vector6(),
|
||||
vector7(),
|
||||
vector8(),
|
||||
vector9(),
|
||||
vector10(),
|
||||
vector11(),
|
||||
vector12(),
|
||||
vector13(),
|
||||
vector14(),
|
||||
vector15(),
|
||||
vector16(),
|
||||
vector17(),
|
||||
vector18(),
|
||||
]
|
||||
|
||||
|
||||
# Converts the byte strings of a test vector into hex strings
|
||||
def bytes_to_hex(seckey, pubkey, aux_rand, msg, sig, result, comment):
|
||||
return (seckey.hex().upper() if seckey is not None else None, pubkey.hex().upper(),
|
||||
aux_rand.hex().upper() if aux_rand is not None else None, msg.hex().upper(), sig.hex().upper(), result,
|
||||
comment)
|
||||
|
||||
|
||||
vectors = list(
|
||||
map(lambda vector: bytes_to_hex(vector[0], vector[1], vector[2], vector[3], vector[4], vector[5], vector[6]),
|
||||
vectors))
|
||||
|
||||
|
||||
def print_csv(vectors):
|
||||
writer = csv.writer(sys.stdout)
|
||||
writer.writerow(
|
||||
("index", "secret key", "public key", "aux_rand", "message", "signature", "verification result", "comment"))
|
||||
for (i, v) in enumerate(vectors):
|
||||
writer.writerow((i,) + v)
|
||||
|
||||
|
||||
print_csv(vectors)
|
||||
103
pkg/crypto/ec/schnorr/doc.go
Normal file
103
pkg/crypto/ec/schnorr/doc.go
Normal file
@@ -0,0 +1,103 @@
|
||||
// Copyright (c) 2020-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package schnorr provides custom Schnorr signing and verification via
|
||||
// secp256k1.
|
||||
//
|
||||
// This package provides data structures and functions necessary to produce and
|
||||
// verify deterministic canonical Schnorr signatures using a custom scheme named
|
||||
// EC-Schnorr-DCRv0 that is described herein. The signatures and implementation
|
||||
// are optimized specifically for the secp256k1 curve. See
|
||||
// https://www.secg.org/sec2-v2.pdf for details on the secp256k1 standard.
|
||||
//
|
||||
// It also provides functions to parse and serialize the Schnorr signatures
|
||||
// according to the specification described herein.
|
||||
//
|
||||
// A comprehensive suite of tests is provided to ensure proper functionality.
|
||||
//
|
||||
// # Overview
|
||||
//
|
||||
// A Schnorr signature is a digital signature scheme that is known for its
|
||||
// simplicity, provable security and efficient generation of short signatures.
|
||||
//
|
||||
// It provides many advantages over ECDSA signatures that make them ideal for
|
||||
// use with the only real downside being that they are not well standardized at
|
||||
// the time of this writing.
|
||||
//
|
||||
// Some of the advantages over ECDSA include:
|
||||
//
|
||||
// - They are linear which makes them easier to aggregate and use in
|
||||
// protocols that build on them such as multi-party signatures, threshold
|
||||
// signatures, adaptor signatures, and blind signatures
|
||||
// - They are provably secure with weaker assumptions than the best known
|
||||
// security proofs for ECDSA
|
||||
// - Specifically Schnorr signatures are provably secure under SUF-CMA (Strong
|
||||
// Existential Unforgeability under Chosen Message Attack) in the ROM
|
||||
// (Random Oracle Model) which guarantees that as long as the hash
|
||||
// function behaves ideally, the only way to break Schnorr signatures is
|
||||
// by solving the ECDLP (Elliptic Curve Discrete Logarithm Problem).
|
||||
// - Their relatively straightforward and efficient aggregation properties
|
||||
// make them excellent for scalability and allow them to provide some nice
|
||||
// secrecy characteristics
|
||||
// - They support faster batch verification unlike the standardized version of
|
||||
// ECDSA signatures
|
||||
//
|
||||
// # Custom Schnorr-based Signature Scheme
|
||||
//
|
||||
// As mentioned in the overview, the primary downside of Schnorr signatures for
|
||||
// elliptic curves is that they are not standardized as well as ECDSA signatures,
|
||||
// which means there are a number of variations that are not compatible with
|
||||
// each other.
|
||||
//
|
||||
// In addition, many of the standardization attempts have had various
|
||||
// disadvantages that make them unsuitable for use in Decred. Some of these
|
||||
// details and some insight into the design decisions made are discussed further
|
||||
// in the README.md file.
|
||||
//
|
||||
// Consequently, this package implements a custom Schnorr-based signature scheme
|
||||
// named EC-Schnorr-DCRv0 suitable for use in Decred.
|
||||
//
|
||||
// The following provides a high-level overview of the key design features of
|
||||
// the scheme:
|
||||
//
|
||||
// - Uses signatures of the form (R, s)
|
||||
// - Produces 64-byte signatures by only encoding the x coordinate of R
|
||||
// - Enforces even y coordinates for R to support efficient verification by
|
||||
// disambiguating the two possible y coordinates
|
||||
// - Canonically encodes by both components of the signature with 32-bytes
|
||||
// each
|
||||
// - Uses BLAKE-256 with 14 rounds for the hash function to calculate
|
||||
// challenge e
|
||||
// - Uses RFC6979 to obviate the need for an entropy source at signing time
|
||||
// - Produces deterministic signatures for a given message and secret key pair
|
||||
//
|
||||
// # EC-Schnorr-DCRv0 Specification
|
||||
//
|
||||
// See the README.md file for the specific details of the signing and
|
||||
// verification algorithm as well as the signature serialization format.
|
||||
//
|
||||
// # Future Design Considerations
|
||||
//
|
||||
// It is worth noting that there are some additional optimizations and
|
||||
// modifications that have been identified since the introduction of
|
||||
// EC-Schnorr-DCRv0 that can be made to further harden security for multi-party
|
||||
// and threshold signature use cases as well provide the opportunity for faster
|
||||
// signature verification with a sufficiently optimized implementation.
|
||||
//
|
||||
// However, the v0 scheme is used in the existing consensus rules and any
|
||||
// changes to the signature scheme would invalidate existing uses. Therefore
|
||||
// changes in this regard will need to come in the form of a v1 signature scheme
|
||||
// and be accompanied by the necessary consensus updates.
|
||||
//
|
||||
// # Schnorr use in Decred
|
||||
//
|
||||
// At the time of this writing, Schnorr signatures are not yet in widespread use
|
||||
// on the Decred network, largely due to the current lack of support in wallets
|
||||
// and infrastructure for secure multi-party and threshold signatures.
|
||||
//
|
||||
// However, the consensus rules and scripting engine supports the necessary
|
||||
// primitives and given many of the beneficial properties of Schnorr signatures,
|
||||
// a good goal is to work towards providing the additional infrastructure to
|
||||
// increase their usage.
|
||||
package schnorr
|
||||
71
pkg/crypto/ec/schnorr/error.go
Normal file
71
pkg/crypto/ec/schnorr/error.go
Normal file
@@ -0,0 +1,71 @@
|
||||
// Copyright (c) 2013-2017 The btcsuite developers
|
||||
// Copyright (c) 2014 Conformal Systems LLC.
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package schnorr
|
||||
|
||||
// ErrorKind identifies a kind of error. It has full support for errors.Is and
|
||||
// errors.As, so the caller can directly check against an error kind when
|
||||
// determining the reason for an error.
|
||||
type ErrorKind string
|
||||
|
||||
// These constants are used to identify a specific RuleError.
|
||||
const (
|
||||
// ErrInvalidHashLen indicates that the input hash to sign or verify is not
|
||||
// the required length.
|
||||
ErrInvalidHashLen = ErrorKind("ErrInvalidHashLen")
|
||||
// ErrSecretKeyIsZero indicates an attempt was made to sign a message with
|
||||
// a secret key that is equal to zero.
|
||||
ErrSecretKeyIsZero = ErrorKind("ErrSecretKeyIsZero")
|
||||
ErrPrivateKeyIsZero = ErrSecretKeyIsZero
|
||||
// ErrSchnorrHashValue indicates that the hash of (R || m) was too large and
|
||||
// so a new nonce should be used.
|
||||
ErrSchnorrHashValue = ErrorKind("ErrSchnorrHashValue")
|
||||
// ErrPubKeyNotOnCurve indicates that a point was not on the given elliptic
|
||||
// curve.
|
||||
ErrPubKeyNotOnCurve = ErrorKind("ErrPubKeyNotOnCurve")
|
||||
// ErrSigRYIsOdd indicates that the calculated Y value of R was odd.
|
||||
ErrSigRYIsOdd = ErrorKind("ErrSigRYIsOdd")
|
||||
// ErrSigRNotOnCurve indicates that the calculated or given point R for some
|
||||
// signature was not on the curve.
|
||||
ErrSigRNotOnCurve = ErrorKind("ErrSigRNotOnCurve")
|
||||
// ErrUnequalRValues indicates that the calculated point R for some
|
||||
// signature was not the same as the given R value for the signature.
|
||||
ErrUnequalRValues = ErrorKind("ErrUnequalRValues")
|
||||
// ErrSigTooShort is returned when a signature that should be a Schnorr
|
||||
// signature is too short.
|
||||
ErrSigTooShort = ErrorKind("ErrSigTooShort")
|
||||
// ErrSigTooLong is returned when a signature that should be a Schnorr
|
||||
// signature is too long.
|
||||
ErrSigTooLong = ErrorKind("ErrSigTooLong")
|
||||
// ErrSigRTooBig is returned when a signature has r with a value that is
|
||||
// greater than or equal to the prime of the field underlying the group.
|
||||
ErrSigRTooBig = ErrorKind("ErrSigRTooBig")
|
||||
// ErrSigSTooBig is returned when a signature has s with a value that is
|
||||
// greater than or equal to the group order.
|
||||
ErrSigSTooBig = ErrorKind("ErrSigSTooBig")
|
||||
)
|
||||
|
||||
// Error satisfies the error interface and prints human-readable errors.
|
||||
func (err ErrorKind) Error() string { return string(err) }
|
||||
|
||||
// Error identifies an error related to a schnorr signature. It has full support
|
||||
// for errors.Is and errors.As, so the caller can ascertain the specific reason
|
||||
// for the error by checking the underlying error.
|
||||
type Error struct {
|
||||
Err error
|
||||
Description string
|
||||
}
|
||||
|
||||
// Error satisfies the error interface and prints human-readable errors.
|
||||
func (err Error) Error() string { return err.Description }
|
||||
|
||||
// Unwrap returns the underlying wrapped error.
|
||||
func (err Error) Unwrap() (ee error) { return err.Err }
|
||||
|
||||
// signatureError creates an Error given a set of arguments.
|
||||
func signatureError(kind ErrorKind, desc string) (err error) {
|
||||
return Error{Err: kind, Description: desc}
|
||||
}
|
||||
50
pkg/crypto/ec/schnorr/pubkey.go
Normal file
50
pkg/crypto/ec/schnorr/pubkey.go
Normal file
@@ -0,0 +1,50 @@
|
||||
// Copyright (c) 2013-2017 The btcsuite developers
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package schnorr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// These constants define the lengths of serialized public keys.
|
||||
|
||||
const (
|
||||
PubKeyBytesLen = 32
|
||||
)
|
||||
|
||||
// ParsePubKey parses a public key for a koblitz curve from a bytestring into a
|
||||
// btcec.Publickey, verifying that it is valid. It only supports public keys in
|
||||
// the BIP-340 32-byte format.
|
||||
func ParsePubKey(pubKeyStr []byte) (*btcec.PublicKey, error) {
|
||||
if pubKeyStr == nil {
|
||||
err := fmt.Errorf("nil pubkey byte string")
|
||||
return nil, err
|
||||
}
|
||||
if len(pubKeyStr) != PubKeyBytesLen {
|
||||
err := fmt.Errorf(
|
||||
"bad pubkey byte string size (want %v, have %v)",
|
||||
PubKeyBytesLen, len(pubKeyStr),
|
||||
)
|
||||
return nil, err
|
||||
}
|
||||
// We'll manually prepend the compressed byte so we can re-use the existing
|
||||
// pubkey parsing routine of the main btcec package.
|
||||
var keyCompressed [btcec.PubKeyBytesLenCompressed]byte
|
||||
keyCompressed[0] = secp256k1.PubKeyFormatCompressedEven
|
||||
copy(keyCompressed[1:], pubKeyStr)
|
||||
return btcec.ParsePubKey(keyCompressed[:])
|
||||
}
|
||||
|
||||
// SerializePubKey serializes a public key as specified by BIP 340. Public keys
|
||||
// in this format are 32 bytes in length and are assumed to have an even y
|
||||
// coordinate.
|
||||
func SerializePubKey(pub *btcec.PublicKey) []byte {
|
||||
pBytes := pub.SerializeCompressed()
|
||||
return pBytes[1:]
|
||||
}
|
||||
142
pkg/crypto/ec/schnorr/schnorrerror_test.go
Normal file
142
pkg/crypto/ec/schnorr/schnorrerror_test.go
Normal file
@@ -0,0 +1,142 @@
|
||||
// Copyright (c) 2020 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package schnorr
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestErrorKindStringer tests the stringized output for the ErrorKind type.
|
||||
func TestErrorKindStringer(t *testing.T) {
|
||||
tests := []struct {
|
||||
in ErrorKind
|
||||
want string
|
||||
}{
|
||||
{ErrInvalidHashLen, "ErrInvalidHashLen"},
|
||||
{ErrSecretKeyIsZero, "ErrSecretKeyIsZero"},
|
||||
{ErrSchnorrHashValue, "ErrSchnorrHashValue"},
|
||||
{ErrPubKeyNotOnCurve, "ErrPubKeyNotOnCurve"},
|
||||
{ErrSigRYIsOdd, "ErrSigRYIsOdd"},
|
||||
{ErrSigRNotOnCurve, "ErrSigRNotOnCurve"},
|
||||
{ErrUnequalRValues, "ErrUnequalRValues"},
|
||||
{ErrSigTooShort, "ErrSigTooShort"},
|
||||
{ErrSigTooLong, "ErrSigTooLong"},
|
||||
{ErrSigRTooBig, "ErrSigRTooBig"},
|
||||
{ErrSigSTooBig, "ErrSigSTooBig"},
|
||||
}
|
||||
for i, test := range tests {
|
||||
result := test.in.Error()
|
||||
if result != test.want {
|
||||
t.Errorf("#%d: got: %s want: %s", i, result, test.want)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestError tests the error output for the Error type.
|
||||
func TestError(t *testing.T) {
|
||||
tests := []struct {
|
||||
in Error
|
||||
want string
|
||||
}{
|
||||
{
|
||||
Error{Description: "some error"},
|
||||
"some error",
|
||||
}, {
|
||||
Error{Description: "human-readable error"},
|
||||
"human-readable error",
|
||||
},
|
||||
}
|
||||
|
||||
for i, test := range tests {
|
||||
result := test.in.Error()
|
||||
if result != test.want {
|
||||
t.Errorf("#%d: got: %s want: %s", i, result, test.want)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestErrorKindIsAs ensures both ErrorKind and Error can be identified
|
||||
// as being a specific error via errors.Is and unwrapped via errors.As.
|
||||
func TestErrorKindIsAs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
err error
|
||||
target error
|
||||
wantMatch bool
|
||||
wantAs ErrorKind
|
||||
}{
|
||||
{
|
||||
name: "ErrInvalidHashLen == ErrInvalidHashLen",
|
||||
err: ErrInvalidHashLen,
|
||||
target: ErrInvalidHashLen,
|
||||
wantMatch: true,
|
||||
wantAs: ErrInvalidHashLen,
|
||||
}, {
|
||||
name: "Error.ErrInvalidHashLen == ErrInvalidHashLen",
|
||||
err: signatureError(ErrInvalidHashLen, ""),
|
||||
target: ErrInvalidHashLen,
|
||||
wantMatch: true,
|
||||
wantAs: ErrInvalidHashLen,
|
||||
}, {
|
||||
name: "Error.ErrInvalidHashLen == Error.ErrInvalidHashLen",
|
||||
err: signatureError(ErrInvalidHashLen, ""),
|
||||
target: signatureError(ErrInvalidHashLen, ""),
|
||||
wantMatch: true,
|
||||
wantAs: ErrInvalidHashLen,
|
||||
}, {
|
||||
name: "ErrSecretKeyIsZero != ErrInvalidHashLen",
|
||||
err: ErrSecretKeyIsZero,
|
||||
target: ErrInvalidHashLen,
|
||||
wantMatch: false,
|
||||
wantAs: ErrSecretKeyIsZero,
|
||||
}, {
|
||||
name: "Error.ErrSecretKeyIsZero != ErrInvalidHashLen",
|
||||
err: signatureError(ErrSecretKeyIsZero, ""),
|
||||
target: ErrInvalidHashLen,
|
||||
wantMatch: false,
|
||||
wantAs: ErrSecretKeyIsZero,
|
||||
}, {
|
||||
name: "ErrSecretKeyIsZero != Error.ErrInvalidHashLen",
|
||||
err: ErrSecretKeyIsZero,
|
||||
target: signatureError(ErrInvalidHashLen, ""),
|
||||
wantMatch: false,
|
||||
wantAs: ErrSecretKeyIsZero,
|
||||
}, {
|
||||
name: "Error.ErrSecretKeyIsZero != Error.ErrInvalidHashLen",
|
||||
err: signatureError(ErrSecretKeyIsZero, ""),
|
||||
target: signatureError(ErrInvalidHashLen, ""),
|
||||
wantMatch: false,
|
||||
wantAs: ErrSecretKeyIsZero,
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
// Ensure the error matches or not depending on the expected result.
|
||||
result := errors.Is(test.err, test.target)
|
||||
if result != test.wantMatch {
|
||||
t.Errorf(
|
||||
"%s: incorrect error identification -- got %v, want %v",
|
||||
test.name, result, test.wantMatch,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Ensure the underlying error kind can be unwrapped and is the
|
||||
// expected code.
|
||||
var code ErrorKind
|
||||
if !errors.As(test.err, &code) {
|
||||
t.Errorf("%s: unable to unwrap to error", test.name)
|
||||
continue
|
||||
}
|
||||
if !errors.Is(code, test.wantAs) {
|
||||
t.Errorf(
|
||||
"%s: unexpected unwrapped error -- got %v, want %v",
|
||||
test.name, code, test.wantAs,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
515
pkg/crypto/ec/schnorr/signature.go
Normal file
515
pkg/crypto/ec/schnorr/signature.go
Normal file
@@ -0,0 +1,515 @@
|
||||
// Copyright (c) 2013-2022 The btcsuite developers
|
||||
|
||||
package schnorr
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/chainhash"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
const (
|
||||
// SignatureSize is the size of an encoded Schnorr signature.
|
||||
SignatureSize = 64
|
||||
// scalarSize is the size of an encoded big endian scalar.
|
||||
scalarSize = 32
|
||||
)
|
||||
|
||||
var (
|
||||
// rfc6979ExtraDataV0 is the extra data to feed to RFC6979 when generating
|
||||
// the deterministic nonce for the BIP-340 scheme. This ensures the same
|
||||
// nonce is not generated for the same message and key as for other signing
|
||||
// algorithms such as ECDSA.
|
||||
//
|
||||
// It is equal to SHA-256(by("BIP-340")).
|
||||
rfc6979ExtraDataV0 = [32]uint8{
|
||||
0xa3, 0xeb, 0x4c, 0x18, 0x2f, 0xae, 0x7e, 0xf4,
|
||||
0xe8, 0x10, 0xc6, 0xee, 0x13, 0xb0, 0xe9, 0x26,
|
||||
0x68, 0x6d, 0x71, 0xe8, 0x7f, 0x39, 0x4f, 0x79,
|
||||
0x9c, 0x00, 0xa5, 0x21, 0x03, 0xcb, 0x4e, 0x17,
|
||||
}
|
||||
)
|
||||
|
||||
// Signature is a type representing a Schnorr signature.
|
||||
type Signature struct {
|
||||
r btcec.FieldVal
|
||||
s btcec.ModNScalar
|
||||
}
|
||||
|
||||
// NewSignature instantiates a new signature given some r and s values.
|
||||
func NewSignature(r *btcec.FieldVal, s *btcec.ModNScalar) *Signature {
|
||||
var sig Signature
|
||||
sig.r.Set(r).Normalize()
|
||||
sig.s.Set(s)
|
||||
return &sig
|
||||
}
|
||||
|
||||
// Serialize returns the Schnorr signature in a stricter format.
|
||||
//
|
||||
// The signatures are encoded as
|
||||
//
|
||||
// sig[0:32]
|
||||
// x coordinate of the point R, encoded as a big-endian uint256
|
||||
// sig[32:64]
|
||||
// s, encoded also as big-endian uint256
|
||||
func (sig Signature) Serialize() []byte {
|
||||
// Total length of returned signature is the length of r and s.
|
||||
var b [SignatureSize]byte
|
||||
sig.r.PutBytesUnchecked(b[0:32])
|
||||
sig.s.PutBytesUnchecked(b[32:64])
|
||||
return b[:]
|
||||
}
|
||||
|
||||
// ParseSignature parses a signature according to the BIP-340 specification and
|
||||
// enforces the following additional restrictions specific to secp256k1:
|
||||
//
|
||||
// - The r component must be in the valid range for secp256k1 field elements
|
||||
//
|
||||
// - The s component must be in the valid range for secp256k1 scalars
|
||||
func ParseSignature(sig []byte) (*Signature, error) {
|
||||
// The signature must be the correct length.
|
||||
sigLen := len(sig)
|
||||
if sigLen < SignatureSize {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: too short: %d < %d", sigLen,
|
||||
SignatureSize,
|
||||
)
|
||||
return nil, signatureError(ErrSigTooShort, str)
|
||||
}
|
||||
if sigLen > SignatureSize {
|
||||
str := fmt.Sprintf(
|
||||
"malformed signature: too long: %d > %d", sigLen,
|
||||
SignatureSize,
|
||||
)
|
||||
return nil, signatureError(ErrSigTooLong, str)
|
||||
}
|
||||
// The signature is validly encoded at this point, however, enforce
|
||||
// additional restrictions to ensure r is in the range [0, p-1], and s is in
|
||||
// the range [0, n-1] since valid Schnorr signatures are required to be in
|
||||
// that range per spec.
|
||||
var r btcec.FieldVal
|
||||
if overflow := r.SetByteSlice(sig[0:32]); overflow {
|
||||
str := "invalid signature: r >= field prime"
|
||||
return nil, signatureError(ErrSigRTooBig, str)
|
||||
}
|
||||
var s btcec.ModNScalar
|
||||
s.SetByteSlice(sig[32:64])
|
||||
// Return the signature.
|
||||
return NewSignature(&r, &s), nil
|
||||
}
|
||||
|
||||
// IsEqual compares this Signature instance to the one passed, returning true if
|
||||
// both Signatures are equivalent. A signature is equivalent to another if they
|
||||
// both have the same scalar value for R and S.
|
||||
func (sig Signature) IsEqual(otherSig *Signature) bool {
|
||||
return sig.r.Equals(&otherSig.r) && sig.s.Equals(&otherSig.s)
|
||||
}
|
||||
|
||||
// schnorrVerify attempt to verify the signature for the provided hash and
|
||||
// secp256k1 public key and either returns nil if successful or a specific error
|
||||
// indicating why it failed if not successful.
|
||||
//
|
||||
// This differs from the exported Verify method in that it returns a specific
|
||||
// error to support better testing, while the exported method simply returns a
|
||||
// bool indicating success or failure.
|
||||
func schnorrVerify(sig *Signature, hash []byte, pubKeyBytes []byte) error {
|
||||
// The algorithm for producing a BIP-340 signature is described in
|
||||
// README.md and is reproduced here for reference:
|
||||
//
|
||||
// 1. Fail if m is not 32 bytes
|
||||
// 2. P = lift_x(int(pk)).
|
||||
// 3. r = int(sig[0:32]); fail is r >= p.
|
||||
// 4. s = int(sig[32:64]); fail if s >= n.
|
||||
// 5. e = int(tagged_hash("BIP0340/challenge", bytes(r) || bytes(P) || M)) mod n.
|
||||
// 6. R = s*G - e*P
|
||||
// 7. Fail if is_infinite(R)
|
||||
// 8. Fail if not hash_even_y(R)
|
||||
// 9. Fail is x(R) != r.
|
||||
// 10. Return success iff not failure occured before reachign this
|
||||
// point.
|
||||
|
||||
// // Step 1.
|
||||
// //
|
||||
// // Fail if m is not 32 bytes
|
||||
// if len(hash) != scalarSize {
|
||||
// str := fmt.Sprintf("wrong size for message (got %v, want %v)",
|
||||
// len(hash), scalarSize)
|
||||
// return signatureError(schnorr.ErrInvalidHashLen, str)
|
||||
// }
|
||||
|
||||
// Step 2.
|
||||
//
|
||||
// P = lift_x(int(pk))
|
||||
//
|
||||
// Fail if P is not a point on the curve
|
||||
pubKey, err := ParsePubKey(pubKeyBytes)
|
||||
if chk.E(err) {
|
||||
return err
|
||||
}
|
||||
if !pubKey.IsOnCurve() {
|
||||
str := "pubkey point is not on curve"
|
||||
return signatureError(ErrPubKeyNotOnCurve, str)
|
||||
}
|
||||
// Step 3.
|
||||
//
|
||||
// Fail if r >= p
|
||||
//
|
||||
// Note this is already handled by the fact r is a field element.
|
||||
//
|
||||
// Step 4.
|
||||
//
|
||||
// Fail if s >= n
|
||||
//
|
||||
// Note this is already handled by the fact s is a mod n scalar.
|
||||
//
|
||||
// Step 5.
|
||||
//
|
||||
// e = int(tagged_hash("BIP0340/challenge", bytes(r) || bytes(P) || M)) mod n.
|
||||
var rBytes [32]byte
|
||||
sig.r.PutBytesUnchecked(rBytes[:])
|
||||
pBytes := SerializePubKey(pubKey)
|
||||
commitment := chainhash.TaggedHash(
|
||||
chainhash.TagBIP0340Challenge, rBytes[:], pBytes, hash,
|
||||
)
|
||||
var e btcec.ModNScalar
|
||||
e.SetBytes((*[32]byte)(commitment))
|
||||
// Negate e here so we can use AddNonConst below to subtract the s*G
|
||||
// point from e*P.
|
||||
e.Negate()
|
||||
// Step 6.
|
||||
//
|
||||
// R = s*G - e*P
|
||||
var P, R, sG, eP btcec.JacobianPoint
|
||||
pubKey.AsJacobian(&P)
|
||||
btcec.ScalarBaseMultNonConst(&sig.s, &sG)
|
||||
btcec.ScalarMultNonConst(&e, &P, &eP)
|
||||
btcec.AddNonConst(&sG, &eP, &R)
|
||||
// Step 7.
|
||||
//
|
||||
// Fail if R is the point at infinity
|
||||
if (R.X.IsZero() && R.Y.IsZero()) || R.Z.IsZero() {
|
||||
str := "calculated R point is the point at infinity"
|
||||
return signatureError(ErrSigRNotOnCurve, str)
|
||||
}
|
||||
// Step 8.
|
||||
//
|
||||
// Fail if R.y is odd
|
||||
//
|
||||
// Note that R must be in affine coordinates for this check.
|
||||
R.ToAffine()
|
||||
if R.Y.IsOdd() {
|
||||
str := "calculated R y-value is odd"
|
||||
return signatureError(ErrSigRYIsOdd, str)
|
||||
}
|
||||
// Step 9.
|
||||
//
|
||||
// Verified if R.x == r
|
||||
//
|
||||
// Note that R must be in affine coordinates for this check.
|
||||
if !sig.r.Equals(&R.X) {
|
||||
str := "calculated R point was not given R"
|
||||
return signatureError(ErrUnequalRValues, str)
|
||||
}
|
||||
// Step 10.
|
||||
//
|
||||
// Return success iff not failure occured before reachign this
|
||||
return nil
|
||||
}
|
||||
|
||||
// Verify returns whether or not the signature is valid for the provided hash
|
||||
// and secp256k1 public key.
|
||||
func (sig *Signature) Verify(hash []byte, pubKey *btcec.PublicKey) bool {
|
||||
pubkeyBytes := SerializePubKey(pubKey)
|
||||
return schnorrVerify(sig, hash, pubkeyBytes) == nil
|
||||
}
|
||||
|
||||
// zeroArray zeroes the memory of a scalar array.
|
||||
func zeroArray(a *[scalarSize]byte) {
|
||||
for i := 0; i < scalarSize; i++ {
|
||||
a[i] = 0x00
|
||||
}
|
||||
}
|
||||
|
||||
// schnorrSign generates an BIP-340 signature over the secp256k1 curve for the
|
||||
// provided hash (which should be the result of hashing a larger message) using
|
||||
// the given nonce and secret key. The produced signature is deterministic (the
|
||||
// same message, nonce, and key yield the same signature) and canonical.
|
||||
//
|
||||
// WARNING: The hash MUST be 32 bytes, and both the nonce and secret keys must
|
||||
// NOT be 0. Since this is an internal use function, these preconditions MUST be
|
||||
// satisified by the caller.
|
||||
func schnorrSign(
|
||||
privKey, nonce *btcec.ModNScalar, pubKey *btcec.PublicKey,
|
||||
hash []byte, opts *signOptions,
|
||||
) (*Signature, error) {
|
||||
|
||||
// The algorithm for producing a BIP-340 signature is described in
|
||||
// README.md and is reproduced here for reference:
|
||||
//
|
||||
// G = curve generator
|
||||
// n = curve order
|
||||
// d = secret key
|
||||
// m = message
|
||||
// a = input randmoness
|
||||
// r, s = signature
|
||||
//
|
||||
// 1. d' = int(d)
|
||||
// 2. Fail if m is not 32 bytes
|
||||
// 3. Fail if d = 0 or d >= n
|
||||
// 4. P = d'*G
|
||||
// 5. Negate d if P.y is odd
|
||||
// 6. t = bytes(d) xor tagged_hash("BIP0340/aux", t || bytes(P) || m)
|
||||
// 7. rand = tagged_hash("BIP0340/nonce", a)
|
||||
// 8. k' = int(rand) mod n
|
||||
// 9. Fail if k' = 0
|
||||
// 10. R = 'k*G
|
||||
// 11. Negate k if R.y id odd
|
||||
// 12. e = tagged_hash("BIP0340/challenge", bytes(R) || bytes(P) || m) mod n
|
||||
// 13. sig = bytes(R) || bytes((k + e*d)) mod n
|
||||
// 14. If Verify(bytes(P), m, sig) fails, abort.
|
||||
// 15. return sig.
|
||||
//
|
||||
// Note that the set of functional options passed in may modify the
|
||||
// above algorithm. Namely if CustomNonce is used, then steps 6-8 are
|
||||
// replaced with a process that generates the nonce using rfc6979. If
|
||||
// FastSign is passed, then we skip set 14.
|
||||
|
||||
// NOTE: Steps 1-9 are performed by the caller.
|
||||
|
||||
//
|
||||
// Step 10.
|
||||
//
|
||||
// R = kG
|
||||
var R btcec.JacobianPoint
|
||||
k := *nonce
|
||||
btcec.ScalarBaseMultNonConst(&k, &R)
|
||||
// Step 11.
|
||||
//
|
||||
// Negate nonce k if R.y is odd (R.y is the y coordinate of the point R)
|
||||
//
|
||||
// Note that R must be in affine coordinates for this check.
|
||||
R.ToAffine()
|
||||
if R.Y.IsOdd() {
|
||||
k.Negate()
|
||||
}
|
||||
// Step 12.
|
||||
//
|
||||
// e = tagged_hash("BIP0340/challenge", bytes(R) || bytes(P) || m) mod n
|
||||
var rBytes [32]byte
|
||||
r := &R.X
|
||||
r.PutBytesUnchecked(rBytes[:])
|
||||
pBytes := SerializePubKey(pubKey)
|
||||
commitment := chainhash.TaggedHash(
|
||||
chainhash.TagBIP0340Challenge, rBytes[:], pBytes, hash,
|
||||
)
|
||||
var e btcec.ModNScalar
|
||||
if overflow := e.SetBytes((*[32]byte)(commitment)); overflow != 0 {
|
||||
k.Zero()
|
||||
str := "hash of (r || P || m) too big"
|
||||
return nil, signatureError(ErrSchnorrHashValue, str)
|
||||
}
|
||||
// Step 13.
|
||||
//
|
||||
// s = k + e*d mod n
|
||||
s := new(btcec.ModNScalar).Mul2(&e, privKey).Add(&k)
|
||||
k.Zero()
|
||||
sig := NewSignature(r, s)
|
||||
// Step 14.
|
||||
//
|
||||
// If Verify(bytes(P), m, sig) fails, abort.
|
||||
if !opts.fastSign {
|
||||
if err := schnorrVerify(sig, hash, pBytes); chk.T(err) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
// Step 15.
|
||||
//
|
||||
// Return (r, s)
|
||||
return sig, nil
|
||||
}
|
||||
|
||||
// SignOption is a functional option argument that allows callers to modify the
|
||||
// way we generate BIP-340 schnorr signatures.
|
||||
type SignOption func(*signOptions)
|
||||
|
||||
// signOptions houses the set of functional options that can be used to modify
|
||||
// the method used to generate the BIP-340 signature.
|
||||
type signOptions struct {
|
||||
// fastSign determines if we'll skip the check at the end of the routine
|
||||
// where we attempt to verify the produced signature.
|
||||
fastSign bool
|
||||
// authNonce allows the user to pass in their own nonce information, which
|
||||
// is useful for schemes like mu-sig.
|
||||
authNonce *[32]byte
|
||||
}
|
||||
|
||||
// defaultSignOptions returns the default set of signing operations.
|
||||
func defaultSignOptions() *signOptions { return &signOptions{} }
|
||||
|
||||
// FastSign forces signing to skip the extra verification step at the end.
|
||||
// Peformance sensitive applications may opt to use this option to speed up the
|
||||
// signing operation.
|
||||
func FastSign() SignOption {
|
||||
return func(o *signOptions) { o.fastSign = true }
|
||||
}
|
||||
|
||||
// CustomNonce allows users to pass in a custom set of auxData that's used as
|
||||
// input randomness to generate the nonce used during signing. Users may want
|
||||
// to specify this custom value when using multi-signatures schemes such as
|
||||
// Mu-Sig2. If this option isn't set, then rfc6979 will be used to generate the
|
||||
// nonce material.
|
||||
func CustomNonce(auxData [32]byte) SignOption {
|
||||
return func(o *signOptions) { o.authNonce = &auxData }
|
||||
}
|
||||
|
||||
// Sign generates an BIP-340 signature over the secp256k1 curve for the provided
|
||||
// hash (which should be the result of hashing a larger message) using the given
|
||||
// secret key. The produced signature is deterministic (the same message and the
|
||||
// same key yield the same signature) and canonical.
|
||||
//
|
||||
// Note that the current signing implementation has a few remaining variable
|
||||
// time aspects which make use of the secret key and the generated nonce, which
|
||||
// can expose the signer to constant time attacks. As a result, this function
|
||||
// should not be used in situations where there is the possibility of someone
|
||||
// having EM field/cache/etc access.
|
||||
func Sign(
|
||||
privKey *btcec.SecretKey, hash []byte,
|
||||
signOpts ...SignOption,
|
||||
) (*Signature, error) {
|
||||
// First, parse the set of optional signing options.
|
||||
opts := defaultSignOptions()
|
||||
for _, option := range signOpts {
|
||||
option(opts)
|
||||
}
|
||||
// The algorithm for producing a BIP-340 signature is described in README.md
|
||||
// and is reproduced here for reference:
|
||||
//
|
||||
// G = curve generator
|
||||
// n = curve order
|
||||
// d = secret key
|
||||
// m = message
|
||||
// a = input randmoness
|
||||
// r, s = signature
|
||||
//
|
||||
// 1. d' = int(d)
|
||||
// 2. Fail if m is not 32 bytes
|
||||
// 3. Fail if d = 0 or d >= n
|
||||
// 4. P = d'*G
|
||||
// 5. Negate d if P.y is odd
|
||||
// 6. t = bytes(d) xor tagged_hash("BIP0340/aux", t || bytes(P) || m)
|
||||
// 7. rand = tagged_hash("BIP0340/nonce", a)
|
||||
// 8. k' = int(rand) mod n
|
||||
// 9. Fail if k' = 0
|
||||
// 10. R = 'k*G
|
||||
// 11. Negate k if R.y id odd
|
||||
// 12. e = tagged_hash("BIP0340/challenge", bytes(R) || bytes(P) || mod) mod n
|
||||
// 13. sig = bytes(R) || bytes((k + e*d)) mod n
|
||||
// 14. If Verify(bytes(P), m, sig) fails, abort.
|
||||
// 15. return sig.
|
||||
//
|
||||
// Note that the set of functional options passed in may modify the above
|
||||
// algorithm. Namely if CustomNonce is used, then steps 6-8 are replaced
|
||||
// with a process that generates the nonce using rfc6979. If FastSign is
|
||||
// passed, then we skip set 14.
|
||||
//
|
||||
// Step 1.
|
||||
//
|
||||
// d' = int(d)
|
||||
var privKeyScalar btcec.ModNScalar
|
||||
privKeyScalar.Set(&privKey.Key)
|
||||
|
||||
// Step 2.
|
||||
//
|
||||
// Fail if m is not 32 bytes
|
||||
// if len(hash) != scalarSize {
|
||||
// str := fmt.Sprintf("wrong size for message hash (got %v, want %v)",
|
||||
// len(hash), scalarSize)
|
||||
// return nil, signatureError(schnorr.ErrInvalidHashLen, str)
|
||||
// }
|
||||
//
|
||||
// Step 3.
|
||||
//
|
||||
// Fail if d = 0 or d >= n
|
||||
if privKeyScalar.IsZero() {
|
||||
str := "secret key is zero"
|
||||
return nil, signatureError(ErrSecretKeyIsZero, str)
|
||||
}
|
||||
// Step 4.
|
||||
//
|
||||
// P = 'd*G
|
||||
pub := privKey.PubKey()
|
||||
// Step 5.
|
||||
//
|
||||
// Negate d if P.y is odd.
|
||||
pubKeyBytes := pub.SerializeCompressed()
|
||||
if pubKeyBytes[0] == secp256k1.PubKeyFormatCompressedOdd {
|
||||
privKeyScalar.Negate()
|
||||
}
|
||||
// At this point, we check to see if a CustomNonce has been passed in, and
|
||||
// if so, then we'll deviate from the main routine here by generating the
|
||||
// nonce value as specified by BIP-0340.
|
||||
if opts.authNonce != nil {
|
||||
// Step 6.
|
||||
//
|
||||
// t = bytes(d) xor tagged_hash("BIP0340/aux", a)
|
||||
privBytes := privKeyScalar.Bytes()
|
||||
t := chainhash.TaggedHash(
|
||||
chainhash.TagBIP0340Aux, (*opts.authNonce)[:],
|
||||
)
|
||||
for i := 0; i < len(t); i++ {
|
||||
t[i] ^= privBytes[i]
|
||||
}
|
||||
// Step 7.
|
||||
//
|
||||
// rand = tagged_hash("BIP0340/nonce", t || bytes(P) || m)
|
||||
//
|
||||
// We snip off the first byte of the serialized pubkey, as we only need
|
||||
// the x coordinate and not the market byte.
|
||||
rand := chainhash.TaggedHash(
|
||||
chainhash.TagBIP0340Nonce, t[:], pubKeyBytes[1:], hash,
|
||||
)
|
||||
// Step 8.
|
||||
//
|
||||
// k'= int(rand) mod n
|
||||
var kPrime btcec.ModNScalar
|
||||
kPrime.SetBytes((*[32]byte)(rand))
|
||||
// Step 9.
|
||||
//
|
||||
// Fail if k' = 0
|
||||
if kPrime.IsZero() {
|
||||
str := fmt.Sprintf("generated nonce is zero")
|
||||
return nil, signatureError(ErrSchnorrHashValue, str)
|
||||
}
|
||||
sig, err := schnorrSign(&privKeyScalar, &kPrime, pub, hash, opts)
|
||||
kPrime.Zero()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return sig, nil
|
||||
}
|
||||
var privKeyBytes [scalarSize]byte
|
||||
privKeyScalar.PutBytes(&privKeyBytes)
|
||||
defer zeroArray(&privKeyBytes)
|
||||
for iteration := uint32(0); ; iteration++ {
|
||||
var k *secp256k1.ModNScalar
|
||||
// Step 6-9.
|
||||
//
|
||||
// Use RFC6979 to generate a deterministic nonce k in [1, n-1]
|
||||
// parameterized by the secret key, message being signed, extra data
|
||||
// that identifies the scheme, and an iteration count
|
||||
k = btcec.NonceRFC6979(
|
||||
privKeyBytes[:], hash, rfc6979ExtraDataV0[:], nil, iteration,
|
||||
)
|
||||
// Steps 10-15.
|
||||
sig, err := schnorrSign(&privKeyScalar, k, pub, hash, opts)
|
||||
k.Zero()
|
||||
if err != nil {
|
||||
// Try again with a new nonce.
|
||||
continue
|
||||
}
|
||||
return sig, nil
|
||||
}
|
||||
}
|
||||
323
pkg/crypto/ec/schnorr/signature_test.go
Normal file
323
pkg/crypto/ec/schnorr/signature_test.go
Normal file
@@ -0,0 +1,323 @@
|
||||
// Copyright (c) 2013-2017 The btcsuite developers
|
||||
// Copyright (c) 2015-2021 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package schnorr
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
"testing"
|
||||
"testing/quick"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
"next.orly.dev/pkg/crypto/ec"
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
)
|
||||
|
||||
type bip340Test struct {
|
||||
secretKey string
|
||||
publicKey string
|
||||
auxRand string
|
||||
message string
|
||||
signature string
|
||||
verifyResult bool
|
||||
validPubKey bool
|
||||
expectErr error
|
||||
rfc6979 bool
|
||||
}
|
||||
|
||||
var bip340TestVectors = []bip340Test{
|
||||
{
|
||||
secretKey: "0000000000000000000000000000000000000000000000000000000000000003",
|
||||
publicKey: "F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
auxRand: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
message: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
signature: "04E7F9037658A92AFEB4F25BAE5339E3DDCA81A353493827D26F16D92308E49E2A25E92208678A2DF86970DA91B03A8AF8815A8A60498B358DAF560B347AA557",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
rfc6979: true,
|
||||
},
|
||||
{
|
||||
secretKey: "0000000000000000000000000000000000000000000000000000000000000003",
|
||||
publicKey: "F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9",
|
||||
auxRand: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
message: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
signature: "E907831F80848D1069A5371B402410364BDF1C5F8307B0084C55F1CE2DCA821525F66A4A85EA8B71E482A74F382D2CE5EBEEE8FDB2172F477DF4900D310536C0",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
secretKey: "B7E151628AED2A6ABF7158809CF4F3C762E7160F38B4DA56A784D9045190CFEF",
|
||||
publicKey: "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
auxRand: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "6896BD60EEAE296DB48A229FF71DFE071BDE413E6D43F917DC8DCF8C78DE33418906D11AC976ABCCB20B091292BFF4EA897EFCB639EA871CFA95F6DE339E4B0A",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
secretKey: "C90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B14E5C9",
|
||||
publicKey: "DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8",
|
||||
auxRand: "C87AA53824B4D7AE2EB035A2B5BBBCCC080E76CDC6D1692C4B0B62D798E6D906",
|
||||
message: "7E2D58D8B3BCDF1ABADEC7829054F90DDA9805AAB56C77333024B9D0A508B75C",
|
||||
signature: "5831AAEED7B44BB74E5EAB94BA9D4294C49BCF2A60728D8B4C200F50DD313C1BAB745879A5AD954A72C45A91C3A51D3C7ADEA98D82F8481E0E1E03674A6F3FB7",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
secretKey: "0B432B2677937381AEF05BB02A66ECD012773062CF3FA2549E44F58ED2401710",
|
||||
publicKey: "25D1DFF95105F5253C4022F628A996AD3A0D95FBF21D468A1B33F8C160D8F517",
|
||||
auxRand: "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
message: "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
signature: "7EB0509757E246F19449885651611CB965ECC1A187DD51B64FDA1EDC9637D5EC97582B9CB13DB3933705B32BA982AF5AF25FD78881EBB32771FC5922EFC66EA3",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
publicKey: "D69C3509BB99E412E68B0FE8544E72837DFA30746D8BE2AA65975F29D22DC7B9",
|
||||
message: "4DF3C3F68FCC83B27E9D42C90431A72499F17875C81A599B566C9889B9696703",
|
||||
signature: "00000000000000000000003B78CE563F89A0ED9414F5AA28AD0D96D6795F9C6376AFB1548AF603B3EB45C9F8207DEE1060CB71C04E80F593060B07D28308D7F4",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
publicKey: "EEFDEA4CDB677750A420FEE807EACF21EB9898AE79B9768766E4FAA04A2D4A34",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B",
|
||||
verifyResult: false,
|
||||
validPubKey: false,
|
||||
expectErr: secp256k1.ErrPubKeyNotOnCurve,
|
||||
},
|
||||
{
|
||||
publicKey: "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "FFF97BD5755EEEA420453A14355235D382F6472F8568A18B2F057A14602975563CC27944640AC607CD107AE10923D9EF7A73C643E166BE5EBEAFA34B1AC553E2",
|
||||
verifyResult: false,
|
||||
validPubKey: true,
|
||||
expectErr: ErrSigRYIsOdd,
|
||||
},
|
||||
{
|
||||
publicKey: "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "1FA62E331EDBC21C394792D2AB1100A7B432B013DF3F6FF4F99FCB33E0E1515F28890B3EDB6E7189B630448B515CE4F8622A954CFE545735AAEA5134FCCDB2BD",
|
||||
verifyResult: false,
|
||||
validPubKey: true,
|
||||
expectErr: ErrSigRYIsOdd,
|
||||
},
|
||||
{
|
||||
publicKey: "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769961764B3AA9B2FFCB6EF947B6887A226E8D7C93E00C5ED0C1834FF0D0C2E6DA6",
|
||||
verifyResult: false,
|
||||
validPubKey: true,
|
||||
expectErr: ErrUnequalRValues,
|
||||
},
|
||||
{
|
||||
publicKey: "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "0000000000000000000000000000000000000000000000000000000000000000123DDA8328AF9C23A94C1FEECFD123BA4FB73476F0D594DCB65C6425BD186051",
|
||||
verifyResult: false,
|
||||
validPubKey: true,
|
||||
expectErr: ErrSigRNotOnCurve,
|
||||
},
|
||||
{
|
||||
publicKey: "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "00000000000000000000000000000000000000000000000000000000000000017615FBAF5AE28864013C099742DEADB4DBA87F11AC6754F93780D5A1837CF197",
|
||||
verifyResult: false,
|
||||
validPubKey: true,
|
||||
expectErr: ErrSigRNotOnCurve,
|
||||
},
|
||||
{
|
||||
publicKey: "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "4A298DACAE57395A15D0795DDBFD1DCB564DA82B0F269BC70A74F8220429BA1D69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B",
|
||||
verifyResult: false,
|
||||
validPubKey: true,
|
||||
expectErr: ErrUnequalRValues,
|
||||
},
|
||||
{
|
||||
publicKey: "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30",
|
||||
message: "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89",
|
||||
signature: "6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B",
|
||||
verifyResult: false,
|
||||
validPubKey: false,
|
||||
expectErr: secp256k1.ErrPubKeyXTooBig,
|
||||
},
|
||||
{
|
||||
secretKey: "0340034003400340034003400340034003400340034003400340034003400340",
|
||||
publicKey: "778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117",
|
||||
auxRand: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
message: "",
|
||||
signature: "71535DB165ECD9FBBC046E5FFAEA61186BB6AD436732FCCC25291A55895464CF6069CE26BF03466228F19A3A62DB8A649F2D560FAC652827D1AF0574E427AB63",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
secretKey: "0340034003400340034003400340034003400340034003400340034003400340",
|
||||
publicKey: "778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117",
|
||||
auxRand: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
message: "11",
|
||||
signature: "08A20A0AFEF64124649232E0693C583AB1B9934AE63B4C3511F3AE1134C6A303EA3173BFEA6683BD101FA5AA5DBC1996FE7CACFC5A577D33EC14564CEC2BACBF",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
secretKey: "0340034003400340034003400340034003400340034003400340034003400340",
|
||||
publicKey: "778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117",
|
||||
auxRand: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
message: "0102030405060708090A0B0C0D0E0F1011",
|
||||
signature: "5130F39A4059B43BC7CAC09A19ECE52B5D8699D1A71E3C52DA9AFDB6B50AC370C4A482B77BF960F8681540E25B6771ECE1E5A37FD80E5A51897C5566A97EA5A5",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
{
|
||||
secretKey: "0340034003400340034003400340034003400340034003400340034003400340",
|
||||
publicKey: "778CAA53B4393AC467774D09497A87224BF9FAB6F6E68B23086497324D6FD117",
|
||||
auxRand: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
message: "99999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999",
|
||||
signature: "403B12B0D8555A344175EA7EC746566303321E5DBFA8BE6F091635163ECA79A8585ED3E3170807E7C03B720FC54C7B23897FCBA0E9D0B4A06894CFD249F22367",
|
||||
verifyResult: true,
|
||||
validPubKey: true,
|
||||
},
|
||||
}
|
||||
|
||||
// decodeHex decodes the passed hex string and returns the resulting bytes. It
|
||||
// panics if an error occurs. This is only used in the tests as a helper since
|
||||
// the only way it can fail is if there is an error in the test source code.
|
||||
func decodeHex(hexStr string) []byte {
|
||||
b, err := hex.Dec(hexStr)
|
||||
if err != nil {
|
||||
panic(
|
||||
"invalid hex string in test source: err " + err.Error() +
|
||||
", hex: " + hexStr,
|
||||
)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func TestSchnorrSign(t *testing.T) {
|
||||
// t.Parallel()
|
||||
for i, test := range bip340TestVectors {
|
||||
if len(test.secretKey) == 0 {
|
||||
continue
|
||||
}
|
||||
d := decodeHex(test.secretKey)
|
||||
privKey, _ := btcec.SecKeyFromBytes(d)
|
||||
var auxBytes [32]byte
|
||||
aux := decodeHex(test.auxRand)
|
||||
copy(auxBytes[:], aux)
|
||||
msg := decodeHex(test.message)
|
||||
var signOpts []SignOption
|
||||
if !test.rfc6979 {
|
||||
signOpts = []SignOption{CustomNonce(auxBytes)}
|
||||
}
|
||||
sig, err := Sign(privKey, msg, signOpts...)
|
||||
if err != nil {
|
||||
t.Fatalf("test #%v: sig generation failed: %v", i+1, err)
|
||||
}
|
||||
if strings.ToUpper(hex.Enc(sig.Serialize())) != test.signature {
|
||||
t.Fatalf(
|
||||
"test #%v: got signature %x : "+
|
||||
"want %s", i+1, sig.Serialize(), test.signature,
|
||||
)
|
||||
}
|
||||
pubKeyBytes := decodeHex(test.publicKey)
|
||||
err = schnorrVerify(sig, msg, pubKeyBytes)
|
||||
if err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
verify := err == nil
|
||||
if test.verifyResult != verify {
|
||||
t.Fatalf(
|
||||
"test #%v: verification mismatch: "+
|
||||
"expected %v, got %v", i+1, test.verifyResult, verify,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchnorrVerify(t *testing.T) {
|
||||
t.Parallel()
|
||||
for i, test := range bip340TestVectors {
|
||||
pubKeyBytes := decodeHex(test.publicKey)
|
||||
_, err := ParsePubKey(pubKeyBytes)
|
||||
switch {
|
||||
case !test.validPubKey && err != nil:
|
||||
if !errors.Is(err, test.expectErr) {
|
||||
t.Fatalf(
|
||||
"test #%v: pubkey validation should "+
|
||||
"have failed, expected %v, got %v", i,
|
||||
test.expectErr, err,
|
||||
)
|
||||
}
|
||||
continue
|
||||
case err != nil:
|
||||
t.Fatalf("test #%v: unable to parse pubkey: %v", i, err)
|
||||
}
|
||||
msg := decodeHex(test.message)
|
||||
sig, err := ParseSignature(decodeHex(test.signature))
|
||||
if err != nil {
|
||||
t.Fatalf("unable to parse sig: %v", err)
|
||||
}
|
||||
err = schnorrVerify(sig, msg, pubKeyBytes)
|
||||
if err != nil && test.verifyResult {
|
||||
t.Fatalf(
|
||||
"test #%v: verification shouldn't have failed: %v", i+1,
|
||||
err,
|
||||
)
|
||||
}
|
||||
verify := err == nil
|
||||
if test.verifyResult != verify {
|
||||
t.Fatalf(
|
||||
"test #%v: verificaiton mismatch: expected "+
|
||||
"%v, got %v", i, test.verifyResult, verify,
|
||||
)
|
||||
}
|
||||
if !test.verifyResult && test.expectErr != nil {
|
||||
if !errors.Is(err, test.expectErr) {
|
||||
t.Fatalf(
|
||||
"test #%v: expect error %v : got %v", i,
|
||||
test.expectErr, err,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestSchnorrSignNoMutate tests that generating a schnorr signature doesn't
|
||||
// modify/mutate the underlying secret key.
|
||||
func TestSchnorrSignNoMutate(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Assert that given a random secret key and message, we can generate
|
||||
// a signature from that w/o modifying the underlying secret key.
|
||||
f := func(privBytes, msg [32]byte) bool {
|
||||
privBytesCopy := privBytes
|
||||
privKey, _ := btcec.SecKeyFromBytes(privBytesCopy[:])
|
||||
// Generate a signature for secret key with our message.
|
||||
_, err := Sign(privKey, msg[:])
|
||||
if err != nil {
|
||||
t.Logf("unable to gen sig: %v", err)
|
||||
return false
|
||||
}
|
||||
// We should be able to re-derive the secret key from raw
|
||||
// bytes and have that match up again.
|
||||
privKeyCopy, _ := btcec.SecKeyFromBytes(privBytes[:])
|
||||
if *privKey != *privKeyCopy {
|
||||
t.Logf(
|
||||
"secret doesn't match: expected %v, got %v",
|
||||
spew.Sdump(privKeyCopy), spew.Sdump(privKey),
|
||||
)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
if err := quick.Check(f, nil); chk.T(err) {
|
||||
t.Fatalf("secret key modified: %v", err)
|
||||
}
|
||||
}
|
||||
BIN
pkg/crypto/ec/sec2-v2.pdf
Normal file
BIN
pkg/crypto/ec/sec2-v2.pdf
Normal file
Binary file not shown.
48
pkg/crypto/ec/seckey.go
Normal file
48
pkg/crypto/ec/seckey.go
Normal file
@@ -0,0 +1,48 @@
|
||||
// Copyright (c) 2013-2016 The btcsuite developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package btcec
|
||||
|
||||
import (
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
)
|
||||
|
||||
// SecretKey wraps an ecdsa.SecretKey as a convenience mainly for signing things with the secret key without having to
|
||||
// directly import the ecdsa package.
|
||||
type SecretKey = secp256k1.SecretKey
|
||||
|
||||
// PrivateKey wraps an ecdsa.SecretKey as a convenience mainly for signing things with the secret key without having to
|
||||
// directly import the ecdsa package.
|
||||
//
|
||||
// Deprecated: use SecretKey - secret = one person; private = two or more (you don't share secret keys!)
|
||||
type PrivateKey = SecretKey
|
||||
|
||||
// SecKeyFromBytes returns a secret and public key for `curve' based on the
|
||||
// secret key passed as an argument as a byte slice.
|
||||
func SecKeyFromBytes(pk []byte) (*SecretKey, *PublicKey) {
|
||||
privKey := secp256k1.SecKeyFromBytes(pk)
|
||||
return privKey, privKey.PubKey()
|
||||
}
|
||||
|
||||
var PrivKeyFromBytes = SecKeyFromBytes
|
||||
|
||||
// NewSecretKey is a wrapper for ecdsa.GenerateKey that returns a SecretKey instead of the normal ecdsa.PrivateKey.
|
||||
func NewSecretKey() (*SecretKey, error) { return secp256k1.GenerateSecretKey() }
|
||||
|
||||
// NewPrivateKey is a wrapper for ecdsa.GenerateKey that returns a SecretKey instead of the normal ecdsa.PrivateKey.
|
||||
//
|
||||
// Deprecated: use SecretKey - secret = one person; private = two or more (you don't share secret keys!)
|
||||
var NewPrivateKey = NewSecretKey
|
||||
|
||||
// SecKeyFromScalar instantiates a new secret key from a scalar encoded as a
|
||||
// big integer.
|
||||
func SecKeyFromScalar(key *ModNScalar) *SecretKey {
|
||||
return &SecretKey{Key: *key}
|
||||
}
|
||||
|
||||
var PrivKeyFromScalar = SecKeyFromScalar
|
||||
|
||||
// SecKeyBytesLen defines the length in bytes of a serialized secret key.
|
||||
const SecKeyBytesLen = 32
|
||||
const PrivKeyBytesLen = SecKeyBytesLen
|
||||
23
pkg/crypto/ec/secp256k1/LICENSE
Normal file
23
pkg/crypto/ec/secp256k1/LICENSE
Normal file
@@ -0,0 +1,23 @@
|
||||
Due to the presence of substantial material derived from btcec this license is
|
||||
required.
|
||||
|
||||
However, where it differs, the changed parts are CC0 as with the rest of the
|
||||
content of this repository.
|
||||
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2013-2017 The btcsuite developers
|
||||
Copyright (c) 2015-2020 The Decred developers
|
||||
Copyright (c) 2017 The Lightning Network Developers
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
54
pkg/crypto/ec/secp256k1/README.md
Normal file
54
pkg/crypto/ec/secp256k1/README.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# secp256k1
|
||||
|
||||
> Due to the terrible state of the btcec library, and the ethical character of
|
||||
> dcred, the two main secp256k1 EC libraries in the Go language, it has become
|
||||
> necessary to refactor and clean up the mess of btcec's module versioning.
|
||||
>
|
||||
> In addition, the code has been updated to use several new features of Go that
|
||||
> were added to the language since these libraries were first created, notably the
|
||||
> precomps are here directly generated as binary data instead of nasty base64
|
||||
> source code.
|
||||
|
||||
Package secp256k1 implements optimized secp256k1 elliptic curve operations.
|
||||
|
||||
This package provides an optimized pure Go implementation of elliptic curve
|
||||
cryptography operations over the secp256k1 curve as well as data structures and
|
||||
functions for working with public and secret secp256k1 keys. See
|
||||
https://www.secg.org/sec2-v2.pdf for details on the standard.
|
||||
|
||||
In addition, sub packages are provided to produce, verify, parse, and serialize
|
||||
ECDSA signatures and EC-Schnorr-DCRv0 (a custom Schnorr-based signature scheme
|
||||
specific to Decred) signatures. See the README.md files in the relevant sub
|
||||
packages for more details about those aspects.
|
||||
|
||||
An overview of the features provided by this package are as follows:
|
||||
|
||||
- Secret key generation, serialization, and parsing
|
||||
- Public key generation, serialization and parsing per ANSI X9.62-1998
|
||||
- Parses uncompressed, compressed, and hybrid public keys
|
||||
- Serializes uncompressed and compressed public keys
|
||||
- Specialized types for performing optimized and constant time field operations
|
||||
- `FieldVal` type for working modulo the secp256k1 field prime
|
||||
- `ModNScalar` type for working modulo the secp256k1 group order
|
||||
- Elliptic curve operations in Jacobian projective coordinates
|
||||
- Point addition
|
||||
- Point doubling
|
||||
- Scalar multiplication with an arbitrary point
|
||||
- Scalar multiplication with the base point (group generator)
|
||||
- Point decompression from a given x coordinate
|
||||
- Nonce generation via RFC6979 with support for extra data and version
|
||||
information that can be used to prevent nonce reuse between signing algorithms
|
||||
|
||||
It also provides an implementation of the Go standard library `crypto/elliptic`
|
||||
`Curve` interface via the `S256` function so that it may be used with other
|
||||
packages in the standard library such as `crypto/tls`, `crypto/x509`, and
|
||||
`crypto/ecdsa`. However, in the case of ECDSA, it is highly recommended to use
|
||||
the `ecdsa` sub package of this package instead since it is optimized
|
||||
specifically for secp256k1 and is significantly faster as a result.
|
||||
|
||||
Although this package was primarily written for dcrd, it has intentionally been
|
||||
designed so it can be used as a standalone package for any projects needing to
|
||||
use optimized secp256k1 elliptic curve cryptography.
|
||||
|
||||
Finally, a comprehensive suite of tests is provided to provide a high level of
|
||||
quality assurance.
|
||||
177
pkg/crypto/ec/secp256k1/bench_test.go
Normal file
177
pkg/crypto/ec/secp256k1/bench_test.go
Normal file
@@ -0,0 +1,177 @@
|
||||
// Copyright 2013-2016 The btcsuite developers
|
||||
// Copyright (c) 2015-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// BenchmarkAddNonConst benchmarks the secp256k1 curve AddNonConst function with
|
||||
// Z values of 1 so that the associated optimizations are used.
|
||||
func BenchmarkAddNonConst(b *testing.B) {
|
||||
p1 := jacobianPointFromHex(
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
)
|
||||
p2 := jacobianPointFromHex(
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var result JacobianPoint
|
||||
for i := 0; i < b.N; i++ {
|
||||
AddNonConst(&p1, &p2, &result)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkAddNonConstNotZOne benchmarks the secp256k1 curve AddNonConst
|
||||
// function with Z values other than one so the optimizations associated with
|
||||
// Z=1 aren't used.
|
||||
func BenchmarkAddNonConstNotZOne(b *testing.B) {
|
||||
x1 := new(FieldVal).SetHex("d3e5183c393c20e4f464acf144ce9ae8266a82b67f553af33eb37e88e7fd2718")
|
||||
y1 := new(FieldVal).SetHex("5b8f54deb987ec491fb692d3d48f3eebb9454b034365ad480dda0cf079651190")
|
||||
z1 := new(FieldVal).SetHex("2")
|
||||
x2 := new(FieldVal).SetHex("91abba6a34b7481d922a4bd6a04899d5a686f6cf6da4e66a0cb427fb25c04bd4")
|
||||
y2 := new(FieldVal).SetHex("03fede65e30b4e7576a2abefc963ddbf9fdccbf791b77c29beadefe49951f7d1")
|
||||
z2 := new(FieldVal).SetHex("3")
|
||||
p1 := MakeJacobianPoint(x1, y1, z1)
|
||||
p2 := MakeJacobianPoint(x2, y2, z2)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var result JacobianPoint
|
||||
for i := 0; i < b.N; i++ {
|
||||
AddNonConst(&p1, &p2, &result)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkScalarBaseMultNonConst benchmarks multiplying a scalar by the base
|
||||
// point of the curve.
|
||||
func BenchmarkScalarBaseMultNonConst(b *testing.B) {
|
||||
k := hexToModNScalar("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575")
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var result JacobianPoint
|
||||
for i := 0; i < b.N; i++ {
|
||||
ScalarBaseMultNonConst(k, &result)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkSplitK benchmarks decomposing scalars into a balanced length-two
|
||||
// representation.
|
||||
func BenchmarkSplitK(b *testing.B) {
|
||||
// Values computed from the group half order and lambda such that they
|
||||
// exercise the decomposition edge cases and maximize the bit lengths of the
|
||||
// produced scalars.
|
||||
h := "7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0"
|
||||
negOne := new(ModNScalar).NegateVal(oneModN)
|
||||
halfOrder := hexToModNScalar(h)
|
||||
halfOrderMOne := new(ModNScalar).Add2(halfOrder, negOne)
|
||||
halfOrderPOne := new(ModNScalar).Add2(halfOrder, oneModN)
|
||||
lambdaMOne := new(ModNScalar).Add2(endoLambda, negOne)
|
||||
lambdaPOne := new(ModNScalar).Add2(endoLambda, oneModN)
|
||||
negLambda := new(ModNScalar).NegateVal(endoLambda)
|
||||
halfOrderMOneMLambda := new(ModNScalar).Add2(halfOrderMOne, negLambda)
|
||||
halfOrderMLambda := new(ModNScalar).Add2(halfOrder, negLambda)
|
||||
halfOrderPOneMLambda := new(ModNScalar).Add2(halfOrderPOne, negLambda)
|
||||
lambdaPHalfOrder := new(ModNScalar).Add2(endoLambda, halfOrder)
|
||||
lambdaPOnePHalfOrder := new(ModNScalar).Add2(lambdaPOne, halfOrder)
|
||||
scalars := []*ModNScalar{
|
||||
new(ModNScalar), // zero
|
||||
oneModN, // one
|
||||
negOne, // group order - 1 (aka -1 mod N)
|
||||
halfOrderMOneMLambda, // group half order - 1 - lambda
|
||||
halfOrderMLambda, // group half order - lambda
|
||||
halfOrderPOneMLambda, // group half order + 1 - lambda
|
||||
halfOrderMOne, // group half order - 1
|
||||
halfOrder, // group half order
|
||||
halfOrderPOne, // group half order + 1
|
||||
lambdaMOne, // lambda - 1
|
||||
endoLambda, // lambda
|
||||
lambdaPOne, // lambda + 1
|
||||
lambdaPHalfOrder, // lambda + group half order
|
||||
lambdaPOnePHalfOrder, // lambda + 1 + group half order
|
||||
}
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i += len(scalars) {
|
||||
for j := 0; j < len(scalars); j++ {
|
||||
_, _ = splitK(scalars[j])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkScalarMultNonConst benchmarks multiplying a scalar by an arbitrary
|
||||
// point on the curve.
|
||||
func BenchmarkScalarMultNonConst(b *testing.B) {
|
||||
k := hexToModNScalar("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575")
|
||||
point := jacobianPointFromHex(
|
||||
"34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
"0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
"1",
|
||||
)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var result JacobianPoint
|
||||
for i := 0; i < b.N; i++ {
|
||||
ScalarMultNonConst(k, &point, &result)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkNAF benchmarks conversion of a positive integer into its
|
||||
// non-adjacent form representation.
|
||||
func BenchmarkNAF(b *testing.B) {
|
||||
k := fromHex("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575")
|
||||
kBytes := k.Bytes()
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
naf(kBytes)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkPubKeyDecompress benchmarks how long it takes to decompress the y
|
||||
// coordinate from a given public key x coordinate.
|
||||
func BenchmarkPubKeyDecompress(b *testing.B) {
|
||||
// Randomly generated keypair.
|
||||
// Secret key: 9e0699c91ca1e3b7e3c9ba71eb71c89890872be97576010fe593fbf3fd57e66d
|
||||
pubKeyX := new(FieldVal).SetHex("d2e670a19c6d753d1a6d8b20bd045df8a08fb162cf508956c31268c6d81ffdab")
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
var y FieldVal
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = DecompressY(pubKeyX, false, &y)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkParsePubKeyCompressed benchmarks how long it takes to parse a
|
||||
// compressed public key with an even y coordinate.
|
||||
func BenchmarkParsePubKeyCompressed(b *testing.B) {
|
||||
format := "02"
|
||||
x := "ce0b14fb842b1ba549fdd675c98075f12e9c510f8ef52bd021a9a1f4809d3b4d"
|
||||
pubKeyBytes := hexToBytes(format + x)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
ParsePubKey(pubKeyBytes)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkParsePubKeyUncompressed benchmarks how long it takes to parse an
|
||||
// uncompressed public key.
|
||||
func BenchmarkParsePubKeyUncompressed(b *testing.B) {
|
||||
format := "04"
|
||||
x := "11db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5c"
|
||||
y := "b2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3"
|
||||
pubKeyBytes := hexToBytes(format + x + y)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
ParsePubKey(pubKeyBytes)
|
||||
}
|
||||
}
|
||||
1222
pkg/crypto/ec/secp256k1/curve.go
Normal file
1222
pkg/crypto/ec/secp256k1/curve.go
Normal file
File diff suppressed because it is too large
Load Diff
1012
pkg/crypto/ec/secp256k1/curve_test.go
Normal file
1012
pkg/crypto/ec/secp256k1/curve_test.go
Normal file
File diff suppressed because it is too large
Load Diff
58
pkg/crypto/ec/secp256k1/doc.go
Normal file
58
pkg/crypto/ec/secp256k1/doc.go
Normal file
@@ -0,0 +1,58 @@
|
||||
// Copyright (c) 2013-2014 The btcsuite developers
|
||||
// Copyright (c) 2015-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package secp256k1 implements optimized secp256k1 elliptic curve operations in
|
||||
// pure Go. This is an update that uses the Go 1.16 embed library instead of
|
||||
// generated code for the data.
|
||||
//
|
||||
// This package provides an optimized pure Go implementation of elliptic curve
|
||||
// cryptography operations over the secp256k1 curve as well as data structures and
|
||||
// functions for working with public and secret secp256k1 keys. See
|
||||
// https://www.secg.org/sec2-v2.pdf for details on the standard.
|
||||
//
|
||||
// In addition, sub packages are provided to produce, verify, parse, and serialize
|
||||
// ECDSA signatures and EC-Schnorr-DCRv0 (a custom Schnorr-based signature scheme
|
||||
// specific to Decred) signatures. See the README.md files in the relevant sub
|
||||
// packages for more details about those aspects.
|
||||
//
|
||||
// An overview of the features provided by this package are as follows:
|
||||
//
|
||||
// - Secret key generation, serialization, and parsing
|
||||
// - Public key generation, serialization and parsing per ANSI X9.62-1998
|
||||
// - Parses uncompressed, compressed, and hybrid public keys
|
||||
// - Serializes uncompressed and compressed public keys
|
||||
// - Specialized types for performing optimized and constant time field operations
|
||||
// - FieldVal type for working modulo the secp256k1 field prime
|
||||
// - ModNScalar type for working modulo the secp256k1 group order
|
||||
// - Elliptic curve operations in Jacobian projective coordinates
|
||||
// - Point addition
|
||||
// - Point doubling
|
||||
// - Scalar multiplication with an arbitrary point
|
||||
// - Scalar multiplication with the base point (group generator)
|
||||
// - Point decompression from a given x coordinate
|
||||
// - Nonce generation via RFC6979 with support for extra data and version
|
||||
// information that can be used to prevent nonce reuse between signing
|
||||
// algorithms
|
||||
//
|
||||
// It also provides an implementation of the Go standard library crypto/elliptic
|
||||
// Curve interface via the S256 function so that it may be used with other packages
|
||||
// in the standard library such as crypto/tls, crypto/x509, and crypto/ecdsa.
|
||||
// However, in the case of ECDSA, it is highly recommended to use the ecdsa sub
|
||||
// package of this package instead since it is optimized specifically for secp256k1
|
||||
// and is significantly faster as a result.
|
||||
//
|
||||
// Although this package was primarily written for dcrd, it has intentionally been
|
||||
// designed so it can be used as a standalone package for any projects needing to
|
||||
// use optimized secp256k1 elliptic curve cryptography.
|
||||
//
|
||||
// Finally, a comprehensive suite of tests is provided to provide a high level of
|
||||
// quality assurance.
|
||||
//
|
||||
// # Use of secp256k1 in Decred
|
||||
//
|
||||
// At the time of this writing, the primary public key cryptography in widespread
|
||||
// use on the Decred network used to secure coins is based on elliptic curves
|
||||
// defined by the secp256k1 domain parameters.
|
||||
package secp256k1
|
||||
21
pkg/crypto/ec/secp256k1/ecdh.go
Normal file
21
pkg/crypto/ec/secp256k1/ecdh.go
Normal file
@@ -0,0 +1,21 @@
|
||||
// Copyright (c) 2015 The btcsuite developers
|
||||
// Copyright (c) 2015-2023 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
// GenerateSharedSecret generates a shared secret based on a secret key and a
|
||||
// public key using Diffie-Hellman key exchange (ECDH) (RFC 5903).
|
||||
// RFC5903 Section 9 states we should only return x.
|
||||
//
|
||||
// It is recommended to securely hash the result before using as a cryptographic
|
||||
// key.
|
||||
func GenerateSharedSecret(seckey *SecretKey, pubkey *PublicKey) []byte {
|
||||
var point, result JacobianPoint
|
||||
pubkey.AsJacobian(&point)
|
||||
ScalarMultNonConst(&seckey.Key, &point, &result)
|
||||
result.ToAffine()
|
||||
xBytes := result.X.Bytes()
|
||||
return xBytes[:]
|
||||
}
|
||||
35
pkg/crypto/ec/secp256k1/ecdh_test.go
Normal file
35
pkg/crypto/ec/secp256k1/ecdh_test.go
Normal file
@@ -0,0 +1,35 @@
|
||||
// Copyright (c) 2015-2016 The btcsuite developers
|
||||
// Copyright (c) 2015-2017 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"next.orly.dev/pkg/utils"
|
||||
)
|
||||
|
||||
func TestGenerateSharedSecret(t *testing.T) {
|
||||
secKey1, err := GenerateSecretKey()
|
||||
if err != nil {
|
||||
t.Errorf("secret key generation error: %s", err)
|
||||
return
|
||||
}
|
||||
secKey2, err := GenerateSecretKey()
|
||||
if err != nil {
|
||||
t.Errorf("secret key generation error: %s", err)
|
||||
return
|
||||
}
|
||||
pubKey1 := secKey1.PubKey()
|
||||
pubKey2 := secKey2.PubKey()
|
||||
secret1 := GenerateSharedSecret(secKey1, pubKey2)
|
||||
secret2 := GenerateSharedSecret(secKey2, pubKey1)
|
||||
if !utils.FastEqual(secret1, secret2) {
|
||||
t.Errorf(
|
||||
"ECDH failed, secrets mismatch - first: %x, second: %x",
|
||||
secret1, secret2,
|
||||
)
|
||||
}
|
||||
}
|
||||
247
pkg/crypto/ec/secp256k1/ellipticadaptor.go
Normal file
247
pkg/crypto/ec/secp256k1/ellipticadaptor.go
Normal file
@@ -0,0 +1,247 @@
|
||||
// Copyright 2020-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
// References:
|
||||
// [SECG]: Recommended Elliptic Curve Domain Parameters
|
||||
// https://www.secg.org/sec2-v2.pdf
|
||||
//
|
||||
// [GECC]: Guide to Elliptic Curve Cryptography (Hankerson, Menezes, Vanstone)
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
// CurveParams contains the parameters for the secp256k1 curve.
|
||||
type CurveParams struct {
|
||||
// P is the prime used in the secp256k1 field.
|
||||
P *big.Int
|
||||
// N is the order of the secp256k1 curve group generated by the base point.
|
||||
N *big.Int
|
||||
// Gx and Gy are the x and y coordinate of the base point, respectively.
|
||||
Gx, Gy *big.Int
|
||||
// BitSize is the size of the underlying secp256k1 field in bits.
|
||||
BitSize int
|
||||
// H is the cofactor of the secp256k1 curve.
|
||||
H int
|
||||
// ByteSize is simply the bit size / 8 and is provided for convenience
|
||||
// since it is calculated repeatedly.
|
||||
ByteSize int
|
||||
}
|
||||
|
||||
// Curve parameters taken from [SECG] section 2.4.1.
|
||||
var curveParams = CurveParams{
|
||||
P: fromHex("fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f"),
|
||||
N: fromHex("fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141"),
|
||||
Gx: fromHex("79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798"),
|
||||
Gy: fromHex("483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8"),
|
||||
BitSize: 256,
|
||||
H: 1,
|
||||
ByteSize: 256 / 8,
|
||||
}
|
||||
|
||||
// Params returns the secp256k1 curve parameters for convenience.
|
||||
func Params() *CurveParams { return &curveParams }
|
||||
|
||||
// KoblitzCurve provides an implementation for secp256k1 that fits the ECC Curve
|
||||
// interface from crypto/elliptic.
|
||||
type KoblitzCurve struct {
|
||||
*elliptic.CurveParams
|
||||
}
|
||||
|
||||
// bigAffineToJacobian takes an affine point (x, y) as big integers and converts
|
||||
// it to Jacobian point with Z=1.
|
||||
func bigAffineToJacobian(x, y *big.Int, result *JacobianPoint) {
|
||||
result.X.SetByteSlice(x.Bytes())
|
||||
result.Y.SetByteSlice(y.Bytes())
|
||||
result.Z.SetInt(1)
|
||||
}
|
||||
|
||||
// jacobianToBigAffine takes a Jacobian point (x, y, z) as field values and
|
||||
// converts it to an affine point as big integers.
|
||||
func jacobianToBigAffine(point *JacobianPoint) (*big.Int, *big.Int) {
|
||||
point.ToAffine()
|
||||
// Convert the field values for the now affine point to big.Ints.
|
||||
x3, y3 := new(big.Int), new(big.Int)
|
||||
x3.SetBytes(point.X.Bytes()[:])
|
||||
y3.SetBytes(point.Y.Bytes()[:])
|
||||
return x3, y3
|
||||
}
|
||||
|
||||
// Params returns the parameters for the curve.
|
||||
//
|
||||
// This is part of the elliptic.Curve interface implementation.
|
||||
func (curve *KoblitzCurve) Params() *elliptic.CurveParams {
|
||||
return curve.CurveParams
|
||||
}
|
||||
|
||||
// IsOnCurve returns whether or not the affine point (x,y) is on the curve.
|
||||
//
|
||||
// This is part of the elliptic.Curve interface implementation. This function
|
||||
// differs from the crypto/elliptic algorithm since a = 0 not -3.
|
||||
func (curve *KoblitzCurve) IsOnCurve(x, y *big.Int) bool {
|
||||
// Convert big ints to a Jacobian point for faster arithmetic.
|
||||
var point JacobianPoint
|
||||
bigAffineToJacobian(x, y, &point)
|
||||
return isOnCurve(&point.X, &point.Y)
|
||||
}
|
||||
|
||||
// Add returns the sum of (x1,y1) and (x2,y2).
|
||||
//
|
||||
// This is part of the elliptic.Curve interface implementation.
|
||||
func (curve *KoblitzCurve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) {
|
||||
// The point at infinity is the identity according to the group law for
|
||||
// elliptic curve cryptography. Thus, ∞ + P = P and P + ∞ = P.
|
||||
if x1.Sign() == 0 && y1.Sign() == 0 {
|
||||
return x2, y2
|
||||
}
|
||||
if x2.Sign() == 0 && y2.Sign() == 0 {
|
||||
return x1, y1
|
||||
}
|
||||
// Convert the affine coordinates from big integers to Jacobian points,
|
||||
// do the point addition in Jacobian projective space, and convert the
|
||||
// Jacobian point back to affine big.Ints.
|
||||
var p1, p2, result JacobianPoint
|
||||
bigAffineToJacobian(x1, y1, &p1)
|
||||
bigAffineToJacobian(x2, y2, &p2)
|
||||
AddNonConst(&p1, &p2, &result)
|
||||
return jacobianToBigAffine(&result)
|
||||
}
|
||||
|
||||
// Double returns 2*(x1,y1).
|
||||
//
|
||||
// This is part of the elliptic.Curve interface implementation.
|
||||
func (curve *KoblitzCurve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) {
|
||||
if y1.Sign() == 0 {
|
||||
return new(big.Int), new(big.Int)
|
||||
}
|
||||
// Convert the affine coordinates from big integers to Jacobian points,
|
||||
// do the point doubling in Jacobian projective space, and convert the
|
||||
// Jacobian point back to affine big.Ints.
|
||||
var point, result JacobianPoint
|
||||
bigAffineToJacobian(x1, y1, &point)
|
||||
DoubleNonConst(&point, &result)
|
||||
return jacobianToBigAffine(&result)
|
||||
}
|
||||
|
||||
// moduloReduce reduces k from more than 32 bytes to 32 bytes and under. This
|
||||
// is done by doing a simple modulo curve.N. We can do this since G^N = 1 and
|
||||
// thus any other valid point on the elliptic curve has the same order.
|
||||
func moduloReduce(k []byte) []byte {
|
||||
// Since the order of G is curve.N, we can use a much smaller number by
|
||||
// doing modulo curve.N
|
||||
if len(k) > curveParams.ByteSize {
|
||||
tmpK := new(big.Int).SetBytes(k)
|
||||
tmpK.Mod(tmpK, curveParams.N)
|
||||
return tmpK.Bytes()
|
||||
}
|
||||
return k
|
||||
}
|
||||
|
||||
// ScalarMult returns k*(Bx, By) where k is a big endian integer.
|
||||
//
|
||||
// This is part of the elliptic.Curve interface implementation.
|
||||
func (curve *KoblitzCurve) ScalarMult(Bx, By *big.Int, k []byte) (
|
||||
*big.Int,
|
||||
*big.Int,
|
||||
) {
|
||||
// Convert the affine coordinates from big integers to Jacobian points,
|
||||
// do the multiplication in Jacobian projective space, and convert the
|
||||
// Jacobian point back to affine big.Ints.
|
||||
var kModN ModNScalar
|
||||
kModN.SetByteSlice(moduloReduce(k))
|
||||
var point, result JacobianPoint
|
||||
bigAffineToJacobian(Bx, By, &point)
|
||||
ScalarMultNonConst(&kModN, &point, &result)
|
||||
return jacobianToBigAffine(&result)
|
||||
}
|
||||
|
||||
// ScalarBaseMult returns k*G where G is the base point of the group and k is a
|
||||
// big endian integer.
|
||||
//
|
||||
// This is part of the elliptic.Curve interface implementation.
|
||||
func (curve *KoblitzCurve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) {
|
||||
// Perform the multiplication and convert the Jacobian point back to affine
|
||||
// big.Ints.
|
||||
var kModN ModNScalar
|
||||
kModN.SetByteSlice(moduloReduce(k))
|
||||
var result JacobianPoint
|
||||
ScalarBaseMultNonConst(&kModN, &result)
|
||||
return jacobianToBigAffine(&result)
|
||||
}
|
||||
|
||||
// X returns the x coordinate of the public key.
|
||||
func (p *PublicKey) X() *big.Int {
|
||||
return new(big.Int).SetBytes(p.x.Bytes()[:])
|
||||
}
|
||||
|
||||
// Y returns the y coordinate of the public key.
|
||||
func (p *PublicKey) Y() *big.Int {
|
||||
return new(big.Int).SetBytes(p.y.Bytes()[:])
|
||||
}
|
||||
|
||||
// ToECDSA returns the public key as a *ecdsa.PublicKey.
|
||||
func (p *PublicKey) ToECDSA() *ecdsa.PublicKey {
|
||||
return &ecdsa.PublicKey{
|
||||
Curve: S256(),
|
||||
X: p.X(),
|
||||
Y: p.Y(),
|
||||
}
|
||||
}
|
||||
|
||||
// ToECDSA returns the secret key as a *ecdsa.SecretKey.
|
||||
func (p *SecretKey) ToECDSA() *ecdsa.PrivateKey {
|
||||
var secretKeyBytes [SecKeyBytesLen]byte
|
||||
p.Key.PutBytes(&secretKeyBytes)
|
||||
var result JacobianPoint
|
||||
ScalarBaseMultNonConst(&p.Key, &result)
|
||||
x, y := jacobianToBigAffine(&result)
|
||||
newSecKey := &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: S256(),
|
||||
X: x,
|
||||
Y: y,
|
||||
},
|
||||
D: new(big.Int).SetBytes(secretKeyBytes[:]),
|
||||
}
|
||||
zeroArray32(&secretKeyBytes)
|
||||
return newSecKey
|
||||
}
|
||||
|
||||
// fromHex converts the passed hex string into a big integer pointer and will
|
||||
// panic is there is an error. This is only provided for the hard-coded
|
||||
// constants so errors in the source code can bet detected. It will only (and
|
||||
// must only) be called for initialization purposes.
|
||||
func fromHex(s string) *big.Int {
|
||||
if s == "" {
|
||||
return big.NewInt(0)
|
||||
}
|
||||
r, ok := new(big.Int).SetString(s, 16)
|
||||
if !ok {
|
||||
panic("invalid hex in source file: " + s)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// secp256k1 is a global instance of the KoblitzCurve implementation which in
|
||||
// turn embeds and implements elliptic.CurveParams.
|
||||
var secp256k1 = &KoblitzCurve{
|
||||
CurveParams: &elliptic.CurveParams{
|
||||
P: curveParams.P,
|
||||
N: curveParams.N,
|
||||
B: fromHex("0000000000000000000000000000000000000000000000000000000000000007"),
|
||||
Gx: curveParams.Gx,
|
||||
Gy: curveParams.Gy,
|
||||
BitSize: curveParams.BitSize,
|
||||
Name: "secp256k1",
|
||||
},
|
||||
}
|
||||
|
||||
// S256 returns an elliptic.Curve which implements secp256k1.
|
||||
func S256() *KoblitzCurve {
|
||||
return secp256k1
|
||||
}
|
||||
51
pkg/crypto/ec/secp256k1/ellipticadaptor_bench_test.go
Normal file
51
pkg/crypto/ec/secp256k1/ellipticadaptor_bench_test.go
Normal file
@@ -0,0 +1,51 @@
|
||||
// Copyright 2013-2016 The btcsuite developers
|
||||
// Copyright (c) 2015-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// BenchmarkScalarBaseMultAdaptor benchmarks multiplying a scalar by the base
|
||||
// point of the curve via the method used to satisfy the elliptic.Curve
|
||||
// interface.
|
||||
func BenchmarkScalarBaseMultAdaptor(b *testing.B) {
|
||||
k := fromHex("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575")
|
||||
curve := S256()
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
curve.ScalarBaseMult(k.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkScalarBaseMultLargeAdaptor benchmarks multiplying an abnormally
|
||||
// large scalar by the base point of the curve via the method used to satisfy
|
||||
// the elliptic.Curve interface.
|
||||
func BenchmarkScalarBaseMultLargeAdaptor(b *testing.B) {
|
||||
k := fromHex("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c005751111111011111110")
|
||||
curve := S256()
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
curve.ScalarBaseMult(k.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkScalarMultAdaptor benchmarks multiplying a scalar by an arbitrary
|
||||
// point on the curve via the method used to satisfy the elliptic.Curve
|
||||
// interface.
|
||||
func BenchmarkScalarMultAdaptor(b *testing.B) {
|
||||
x := fromHex("34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6")
|
||||
y := fromHex("0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232")
|
||||
k := fromHex("d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575")
|
||||
curve := S256()
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
curve.ScalarMult(x, y, k.Bytes())
|
||||
}
|
||||
}
|
||||
427
pkg/crypto/ec/secp256k1/ellipticadaptor_test.go
Normal file
427
pkg/crypto/ec/secp256k1/ellipticadaptor_test.go
Normal file
@@ -0,0 +1,427 @@
|
||||
// Copyright (c) 2020-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"lol.mleku.dev/chk"
|
||||
)
|
||||
|
||||
// randBytes returns a byte slice of the required size created from a random
|
||||
// value generated by the passed rng.
|
||||
func randBytes(t *testing.T, rng *rand.Rand, numBytes uint8) []byte {
|
||||
t.Helper()
|
||||
|
||||
buf := make([]byte, numBytes)
|
||||
if _, err := rng.Read(buf); chk.T(err) {
|
||||
t.Fatalf("failed to read random: %v", err)
|
||||
}
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
// TestIsOnCurveAdaptor ensures the IsOnCurve method used to satisfy the
|
||||
// elliptic.Curve interface works as intended.
|
||||
func TestIsOnCurveAdaptor(t *testing.T) {
|
||||
s256 := S256()
|
||||
if !s256.IsOnCurve(s256.Params().Gx, s256.Params().Gy) {
|
||||
t.Fatal("generator point does not claim to be on the curve")
|
||||
}
|
||||
}
|
||||
|
||||
// isValidAffinePoint returns true if the point (x,y) is on the secp256k1 curve
|
||||
// or is the point at infinity.
|
||||
func isValidAffinePoint(x, y *big.Int) bool {
|
||||
if x.Sign() == 0 && y.Sign() == 0 {
|
||||
return true
|
||||
}
|
||||
return S256().IsOnCurve(x, y)
|
||||
}
|
||||
|
||||
// TestAddAffineAdaptor tests addition of points in affine coordinates via the
|
||||
// method used to satisfy the elliptic.Curve interface works as intended for
|
||||
// some edge cases and known good values.
|
||||
func TestAddAffineAdaptor(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string // test description
|
||||
x1, y1 string // hex encoded coordinates of first point to add
|
||||
x2, y2 string // hex encoded coordinates of second point to add
|
||||
x3, y3 string // hex encoded coordinates of expected point
|
||||
}{
|
||||
{
|
||||
// Addition with the point at infinity (left hand side).
|
||||
name: "∞ + P = P",
|
||||
x1: "0",
|
||||
y1: "0",
|
||||
x2: "d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
y2: "131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
x3: "d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
y3: "131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
}, {
|
||||
// Addition with the point at infinity (right hand side).
|
||||
name: "P + ∞ = P",
|
||||
x1: "d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
y1: "131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
x2: "0",
|
||||
y2: "0",
|
||||
x3: "d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
y3: "131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
}, {
|
||||
// Addition with different x values.
|
||||
name: "P(x1, y1) + P(x2, y2)",
|
||||
x1: "34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
y1: "0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
x2: "d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
y2: "131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
x3: "fd5b88c21d3143518d522cd2796f3d726793c88b3e05636bc829448e053fed69",
|
||||
y3: "21cf4f6a5be5ff6380234c50424a970b1f7e718f5eb58f68198c108d642a137f",
|
||||
}, {
|
||||
// Addition with same x opposite y.
|
||||
name: "P(x, y) + P(x, -y) = ∞",
|
||||
x1: "34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
y1: "0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
x2: "34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
y2: "f48e156428cf0276dc092da5856e182288d7569f97934a56fe44be60f0d359fd",
|
||||
x3: "0",
|
||||
y3: "0",
|
||||
}, {
|
||||
// Addition with same point.
|
||||
name: "P(x, y) + P(x, y) = 2P",
|
||||
x1: "34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
y1: "0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
x2: "34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
y2: "0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
x3: "59477d88ae64a104dbb8d31ec4ce2d91b2fe50fa628fb6a064e22582196b365b",
|
||||
y3: "938dc8c0f13d1e75c987cb1a220501bd614b0d3dd9eb5c639847e1240216e3b6",
|
||||
},
|
||||
}
|
||||
curve := S256()
|
||||
for _, test := range tests {
|
||||
// Parse the test data.
|
||||
x1, y1 := fromHex(test.x1), fromHex(test.y1)
|
||||
x2, y2 := fromHex(test.x2), fromHex(test.y2)
|
||||
x3, y3 := fromHex(test.x3), fromHex(test.y3)
|
||||
// Ensure the test data is using points that are actually on the curve
|
||||
// (or the point at infinity).
|
||||
if !isValidAffinePoint(x1, y1) {
|
||||
t.Errorf("%s: first point is not on curve", test.name)
|
||||
continue
|
||||
}
|
||||
if !isValidAffinePoint(x2, y2) {
|
||||
t.Errorf("%s: second point is not on curve", test.name)
|
||||
continue
|
||||
}
|
||||
if !isValidAffinePoint(x3, y3) {
|
||||
t.Errorf("%s: expected point is not on curve", test.name)
|
||||
continue
|
||||
}
|
||||
// Add the two points and ensure the result matches expected.
|
||||
rx, ry := curve.Add(x1, y1, x2, y2)
|
||||
if rx.Cmp(x3) != 0 || ry.Cmp(y3) != 0 {
|
||||
t.Errorf(
|
||||
"%s: wrong result\ngot: (%x, %x)\nwant: (%x, %x)",
|
||||
test.name, rx, ry, x3, y3,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestDoubleAffineAdaptor tests doubling of points in affine coordinates via
|
||||
// the method used to satisfy the elliptic.Curve interface works as intended for
|
||||
// some edge cases and known good values.
|
||||
func TestDoubleAffineAdaptor(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string // test description
|
||||
x1, y1 string // hex encoded coordinates of point to double
|
||||
x3, y3 string // hex encoded coordinates of expected point
|
||||
}{
|
||||
{
|
||||
// Doubling the point at infinity is still the point at infinity.
|
||||
name: "2*∞ = ∞ (point at infinity)",
|
||||
x1: "0",
|
||||
y1: "0",
|
||||
x3: "0",
|
||||
y3: "0",
|
||||
}, {
|
||||
name: "random point 1",
|
||||
x1: "e41387ffd8baaeeb43c2faa44e141b19790e8ac1f7ff43d480dc132230536f86",
|
||||
y1: "1b88191d430f559896149c86cbcb703193105e3cf3213c0c3556399836a2b899",
|
||||
x3: "88da47a089d333371bd798c548ef7caae76e737c1980b452d367b3cfe3082c19",
|
||||
y3: "3b6f659b09a362821dfcfefdbfbc2e59b935ba081b6c249eb147b3c2100b1bc1",
|
||||
}, {
|
||||
name: "random point 2",
|
||||
x1: "b3589b5d984f03ef7c80aeae444f919374799edf18d375cab10489a3009cff0c",
|
||||
y1: "c26cf343875b3630e15bccc61202815b5d8f1fd11308934a584a5babe69db36a",
|
||||
x3: "e193860172998751e527bb12563855602a227fc1f612523394da53b746bb2fb1",
|
||||
y3: "2bfcf13d2f5ab8bb5c611fab5ebbed3dc2f057062b39a335224c22f090c04789",
|
||||
}, {
|
||||
name: "random point 3",
|
||||
x1: "2b31a40fbebe3440d43ac28dba23eee71c62762c3fe3dbd88b4ab82dc6a82340",
|
||||
y1: "9ba7deb02f5c010e217607fd49d58db78ec273371ea828b49891ce2fd74959a1",
|
||||
x3: "2c8d5ef0d343b1a1a48aa336078eadda8481cb048d9305dc4fdf7ee5f65973a2",
|
||||
y3: "bb4914ac729e26d3cd8f8dc8f702f3f4bb7e0e9c5ae43335f6e94c2de6c3dc95",
|
||||
}, {
|
||||
name: "random point 4",
|
||||
x1: "61c64b760b51981fab54716d5078ab7dffc93730b1d1823477e27c51f6904c7a",
|
||||
y1: "ef6eb16ea1a36af69d7f66524c75a3a5e84c13be8fbc2e811e0563c5405e49bd",
|
||||
x3: "5f0dcdd2595f5ad83318a0f9da481039e36f135005420393e72dfca985b482f4",
|
||||
y3: "a01c849b0837065c1cb481b0932c441f49d1cab1b4b9f355c35173d93f110ae0",
|
||||
},
|
||||
}
|
||||
curve := S256()
|
||||
for _, test := range tests {
|
||||
// Parse test data.
|
||||
x1, y1 := fromHex(test.x1), fromHex(test.y1)
|
||||
x3, y3 := fromHex(test.x3), fromHex(test.y3)
|
||||
// Ensure the test data is using points that are actually on
|
||||
// the curve (or the point at infinity).
|
||||
if !isValidAffinePoint(x1, y1) {
|
||||
t.Errorf("%s: first point is not on the curve", test.name)
|
||||
continue
|
||||
}
|
||||
if !isValidAffinePoint(x3, y3) {
|
||||
t.Errorf("%s: expected point is not on the curve", test.name)
|
||||
continue
|
||||
}
|
||||
// Double the point and ensure the result matches expected.
|
||||
rx, ry := curve.Double(x1, y1)
|
||||
if rx.Cmp(x3) != 0 || ry.Cmp(y3) != 0 {
|
||||
t.Errorf(
|
||||
"%s: wrong result\ngot: (%x, %x)\nwant: (%x, %x)",
|
||||
test.name, rx, ry, x3, y3,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestScalarBaseMultAdaptor ensures the ScalarBaseMult method used to satisfy
|
||||
// the elliptic.Curve interface works as intended for some edge cases and known
|
||||
// good values.
|
||||
func TestScalarBaseMultAdaptor(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string // test description
|
||||
k string // hex encoded scalar
|
||||
rx, ry string // hex encoded coordinates of expected point
|
||||
}{
|
||||
{
|
||||
name: "zero",
|
||||
k: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
rx: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
ry: "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
}, {
|
||||
name: "one (aka 1*G = G)",
|
||||
k: "0000000000000000000000000000000000000000000000000000000000000001",
|
||||
rx: "79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798",
|
||||
ry: "483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8",
|
||||
}, {
|
||||
name: "group order - 1 (aka -1*G = -G)",
|
||||
k: "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140",
|
||||
rx: "79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798",
|
||||
ry: "b7c52588d95c3b9aa25b0403f1eef75702e84bb7597aabe663b82f6f04ef2777",
|
||||
}, {
|
||||
name: "known good point 1",
|
||||
k: "aa5e28d6a97a2479a65527f7290311a3624d4cc0fa1578598ee3c2613bf99522",
|
||||
rx: "34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
ry: "0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
}, {
|
||||
name: "known good point 2",
|
||||
k: "7e2b897b8cebc6361663ad410835639826d590f393d90a9538881735256dfae3",
|
||||
rx: "d74bf844b0862475103d96a611cf2d898447e288d34b360bc885cb8ce7c00575",
|
||||
ry: "131c670d414c4546b88ac3ff664611b1c38ceb1c21d76369d7a7a0969d61d97d",
|
||||
}, {
|
||||
name: "known good point 3",
|
||||
k: "6461e6df0fe7dfd05329f41bf771b86578143d4dd1f7866fb4ca7e97c5fa945d",
|
||||
rx: "e8aecc370aedd953483719a116711963ce201ac3eb21d3f3257bb48668c6a72f",
|
||||
ry: "c25caf2f0eba1ddb2f0f3f47866299ef907867b7d27e95b3873bf98397b24ee1",
|
||||
}, {
|
||||
name: "known good point 4",
|
||||
k: "376a3a2cdcd12581efff13ee4ad44c4044b8a0524c42422a7e1e181e4deeccec",
|
||||
rx: "14890e61fcd4b0bd92e5b36c81372ca6fed471ef3aa60a3e415ee4fe987daba1",
|
||||
ry: "297b858d9f752ab42d3bca67ee0eb6dcd1c2b7b0dbe23397e66adc272263f982",
|
||||
}, {
|
||||
name: "known good point 5",
|
||||
k: "1b22644a7be026548810c378d0b2994eefa6d2b9881803cb02ceff865287d1b9",
|
||||
rx: "f73c65ead01c5126f28f442d087689bfa08e12763e0cec1d35b01751fd735ed3",
|
||||
ry: "f449a8376906482a84ed01479bd18882b919c140d638307f0c0934ba12590bde",
|
||||
},
|
||||
}
|
||||
curve := S256()
|
||||
for _, test := range tests {
|
||||
// Parse the test data.
|
||||
k := fromHex(test.k)
|
||||
xWant, yWant := fromHex(test.rx), fromHex(test.ry)
|
||||
// Ensure the test data is using points that are actually on the curve
|
||||
// (or the point at infinity).
|
||||
if !isValidAffinePoint(xWant, yWant) {
|
||||
t.Errorf("%s: expected point is not on curve", test.name)
|
||||
continue
|
||||
}
|
||||
rx, ry := curve.ScalarBaseMult(k.Bytes())
|
||||
if rx.Cmp(xWant) != 0 || ry.Cmp(yWant) != 0 {
|
||||
t.Errorf(
|
||||
"%s: wrong result:\ngot (%x, %x)\nwant (%x, %x)",
|
||||
test.name, rx, ry, xWant, yWant,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestScalarBaseMultAdaptorRandom ensures that the ScalarBaseMult method used
|
||||
// to satisfy the elliptic.Curve interface works as intended for
|
||||
// randomly-generated scalars of all lengths up to 40 bytes.
|
||||
func TestScalarBaseMultAdaptorRandom(t *testing.T) {
|
||||
// Use a unique random seed each test instance and log it if the tests fail.
|
||||
seed := time.Now().Unix()
|
||||
rng := rand.New(rand.NewSource(seed))
|
||||
defer func(t *testing.T, seed int64) {
|
||||
if t.Failed() {
|
||||
t.Logf("random seed: %d", seed)
|
||||
}
|
||||
}(t, seed)
|
||||
s256 := S256()
|
||||
const maxBytes = 40
|
||||
const iterations = 10
|
||||
for numBytes := uint8(1); numBytes < maxBytes; numBytes++ {
|
||||
for i := 0; i < iterations; i++ {
|
||||
// Generate a random scalar of the current length.
|
||||
k := randBytes(t, rng, numBytes)
|
||||
// Ensure the correct results by performing the multiplication with
|
||||
// both the func under test as well as the generic scalar mult func.
|
||||
x, y := s256.ScalarBaseMult(k)
|
||||
xWant, yWant := s256.ScalarMult(s256.Gx, s256.Gy, k)
|
||||
if x.Cmp(xWant) != 0 || y.Cmp(yWant) != 0 {
|
||||
t.Errorf(
|
||||
"bad output for %x: got (%x, %x), want (%x, %x)", k,
|
||||
x, y, xWant, yWant,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestScalarMultAdaptor ensures the ScalarMult method used to satisfy the
|
||||
// elliptic.Curve interface works as intended for some edge cases and known good
|
||||
// values.
|
||||
func TestScalarMultAdaptor(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string // test description
|
||||
k string // hex encoded scalar
|
||||
x, y string // hex encoded coordinates of point to multiply
|
||||
rx, ry string // hex encoded coordinates of expected point
|
||||
}{
|
||||
{
|
||||
name: "0*P = ∞ (point at infinity)",
|
||||
k: "0",
|
||||
x: "7e660beda020e9cc20391cef85374576853b0f22b8925d5d81c5845bb834c21e",
|
||||
y: "2d114a5edb320cc9806527d1daf1bbb96a8fedc6f9e8ead421eaef2c7208e409",
|
||||
rx: "0",
|
||||
ry: "0",
|
||||
}, {
|
||||
name: "1*P = P",
|
||||
k: "1",
|
||||
x: "c00be8830995d1e44f1420dd3b90d3441fb66f6861c84a35f959c495a3be5440",
|
||||
y: "ecf9665e6eba45720de652a340600c7356efe24d228bfe6ea2043e7791c51bb7",
|
||||
rx: "c00be8830995d1e44f1420dd3b90d3441fb66f6861c84a35f959c495a3be5440",
|
||||
ry: "ecf9665e6eba45720de652a340600c7356efe24d228bfe6ea2043e7791c51bb7",
|
||||
}, {
|
||||
name: "(group order - 1)*P = -P (aka -1*P = -P)",
|
||||
k: "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140",
|
||||
x: "74a1ad6b5f76e39db2dd249410eac7f99e74c59cb83d2d0ed5ff1543da7703e9",
|
||||
y: "cc6157ef18c9c63cd6193d83631bbea0093e0968942e8c33d5737fd790e0db08",
|
||||
rx: "74a1ad6b5f76e39db2dd249410eac7f99e74c59cb83d2d0ed5ff1543da7703e9",
|
||||
ry: "339ea810e73639c329e6c27c9ce4415ff6c1f6976bd173cc2a8c80276f1f2127",
|
||||
}, {
|
||||
name: "(group order - 1)*-P = P (aka -1*-P = -P, with P from prev test)",
|
||||
k: "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140",
|
||||
x: "74a1ad6b5f76e39db2dd249410eac7f99e74c59cb83d2d0ed5ff1543da7703e9",
|
||||
y: "339ea810e73639c329e6c27c9ce4415ff6c1f6976bd173cc2a8c80276f1f2127",
|
||||
rx: "74a1ad6b5f76e39db2dd249410eac7f99e74c59cb83d2d0ed5ff1543da7703e9",
|
||||
ry: "cc6157ef18c9c63cd6193d83631bbea0093e0968942e8c33d5737fd790e0db08",
|
||||
}, {
|
||||
name: "known good point from base mult tests (aka k*G)",
|
||||
k: "aa5e28d6a97a2479a65527f7290311a3624d4cc0fa1578598ee3c2613bf99522",
|
||||
x: "79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798",
|
||||
y: "483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8",
|
||||
rx: "34f9460f0e4f08393d192b3c5133a6ba099aa0ad9fd54ebccfacdfa239ff49c6",
|
||||
ry: "0b71ea9bd730fd8923f6d25a7a91e7dd7728a960686cb5a901bb419e0f2ca232",
|
||||
}, {
|
||||
name: "known good result 1",
|
||||
k: "7e2b897b8cebc6361663ad410835639826d590f393d90a9538881735256dfae3",
|
||||
x: "1697ffa6fd9de627c077e3d2fe541084ce13300b0bec1146f95ae57f0d0bd6a5",
|
||||
y: "b9c398f186806f5d27561506e4557433a2cf15009e498ae7adee9d63d01b2396",
|
||||
rx: "6951f3b50aafbc63e21707dd53623b7f42badd633a0567ef1b37f6e42a4237ad",
|
||||
ry: "9c930796a49110122fbfdedc36418af726197ed950b783a2d29058f8c02130de",
|
||||
}, {
|
||||
name: "known good result 2",
|
||||
k: "6461e6df0fe7dfd05329f41bf771b86578143d4dd1f7866fb4ca7e97c5fa945d",
|
||||
x: "659214ac1a1790023f53c4cf55a0a63b9e20c1151efa971215b395a558aa151",
|
||||
y: "b126363aa4243d2759320a356230569a4eea355d9dabd94ed7f4590701e5364d",
|
||||
rx: "4ffad856833396ef753c0bd4ea40319295f107c476793df0adac2caea53b3df4",
|
||||
ry: "586fa6b1e9a3ff7df8a2b9b3698badcf40aa06af5600fefc56dd8ae4db5451c5",
|
||||
}, {
|
||||
name: "known good result 3",
|
||||
k: "376a3a2cdcd12581efff13ee4ad44c4044b8a0524c42422a7e1e181e4deeccec",
|
||||
x: "3f0e80e574456d8f8fa64e044b2eb72ea22eb53fe1efe3a443933aca7f8cb0e3",
|
||||
y: "cb66d7d7296cbc91e90b9c08485d01b39501253aa65b53a4cb0289e2ea5f404f",
|
||||
rx: "35ae6480b18e48070709d9276ed97a50c6ee1fc05ac44386c85826533233d28f",
|
||||
ry: "f88abee3efabd95e80ce8c664bbc3d4d12b24e1a0f4d2b98ba6542789c6715fd",
|
||||
}, {
|
||||
name: "known good result 4",
|
||||
k: "1b22644a7be026548810c378d0b2994eefa6d2b9881803cb02ceff865287d1b9",
|
||||
x: "d7924d4f7d43ea965a465ae3095ff41131e5946f3c85f79e44adbcf8e27e080e",
|
||||
y: "581e2872a86c72a683842ec228cc6defea40af2bd896d3a5c504dc9ff6a26b58",
|
||||
rx: "cca7f9a4b0d379c31c438050e163a8945f2f910498bd3b545be20ed862bd6cd9",
|
||||
ry: "cfc7bbf37bef62da6e5753ed419168fa1376a3fe949c139a8dd0f5303f4ae947",
|
||||
}, {
|
||||
name: "known good result 5",
|
||||
k: "7f5b2cb4b43840c75e4afad83d792e1965d8c21c1109505f45c7d46df422d73e",
|
||||
x: "bce74de6d5f98dc027740c2bbff05b6aafe5fd8d103f827e48894a2bd3460117",
|
||||
y: "5bea1fa17a41b115525a3e7dbf0d8d5a4f7ce5c6fc73a6f4f216512417c9f6b4",
|
||||
rx: "3d96b9290fe6c4f2d62fe2175f4333907d0c3637fada1010b45c7d80690e16de",
|
||||
ry: "d59c0e8192d7fbd4846172d6479630b751cd03d0d9be0dca2759c6212b70575d",
|
||||
}, {
|
||||
// From btcd issue #709.
|
||||
name: "early implementation regression point",
|
||||
k: "a2e8ba2e8ba2e8ba2e8ba2e8ba2e8ba219b51835b55cc30ebfe2f6599bc56f58",
|
||||
x: "000000000000000000000000000000000000000000000000000000000000002c",
|
||||
y: "420e7a99bba18a9d3952597510fd2b6728cfeafc21a4e73951091d4d8ddbe94e",
|
||||
rx: "a2112dcdfbcd10ae1133a358de7b82db68e0a3eb4b492cc8268d1e7118c98788",
|
||||
ry: "27fc7463b7bb3c5f98ecf2c84a6272bb1681ed553d92c69f2dfe25a9f9fd3836",
|
||||
},
|
||||
}
|
||||
curve := S256()
|
||||
for _, test := range tests {
|
||||
// Parse the test data.
|
||||
k := fromHex(test.k)
|
||||
x, y := fromHex(test.x), fromHex(test.y)
|
||||
xWant, yWant := fromHex(test.rx), fromHex(test.ry)
|
||||
// Ensure the test data is using points that are actually on the curve
|
||||
// (or the point at infinity).
|
||||
if !isValidAffinePoint(x, y) {
|
||||
t.Errorf("%s: point is not on curve", test.name)
|
||||
continue
|
||||
}
|
||||
if !isValidAffinePoint(xWant, yWant) {
|
||||
t.Errorf("%s: expected point is not on curve", test.name)
|
||||
continue
|
||||
}
|
||||
// Perform scalar point multiplication ensure the result matches
|
||||
// expected.
|
||||
rx, ry := curve.ScalarMult(x, y, k.Bytes())
|
||||
if rx.Cmp(xWant) != 0 || ry.Cmp(yWant) != 0 {
|
||||
t.Errorf(
|
||||
"%s: wrong result\ngot: (%x, %x)\nwant: (%x, %x)",
|
||||
test.name, rx, ry, xWant, yWant,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
56
pkg/crypto/ec/secp256k1/error.go
Normal file
56
pkg/crypto/ec/secp256k1/error.go
Normal file
@@ -0,0 +1,56 @@
|
||||
// Copyright (c) 2020 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
// ErrorKind identifies a kind of error. It has full support for errors.Is and
|
||||
// errors.As, so the caller can directly check against an error kind when
|
||||
// determining the reason for an error.
|
||||
type ErrorKind string
|
||||
|
||||
// These constants are used to identify a specific RuleError.
|
||||
const (
|
||||
// ErrPubKeyInvalidLen indicates that the length of a serialized public
|
||||
// key is not one of the allowed lengths.
|
||||
ErrPubKeyInvalidLen = ErrorKind("ErrPubKeyInvalidLen")
|
||||
// ErrPubKeyInvalidFormat indicates an attempt was made to parse a public
|
||||
// key that does not specify one of the supported formats.
|
||||
ErrPubKeyInvalidFormat = ErrorKind("ErrPubKeyInvalidFormat")
|
||||
// ErrPubKeyXTooBig indicates that the x coordinate for a public key
|
||||
// is greater than or equal to the prime of the field underlying the group.
|
||||
ErrPubKeyXTooBig = ErrorKind("ErrPubKeyXTooBig")
|
||||
// ErrPubKeyYTooBig indicates that the y coordinate for a public key is
|
||||
// greater than or equal to the prime of the field underlying the group.
|
||||
ErrPubKeyYTooBig = ErrorKind("ErrPubKeyYTooBig")
|
||||
// ErrPubKeyNotOnCurve indicates that a public key is not a point on the
|
||||
// secp256k1 curve.
|
||||
ErrPubKeyNotOnCurve = ErrorKind("ErrPubKeyNotOnCurve")
|
||||
// ErrPubKeyMismatchedOddness indicates that a hybrid public key specified
|
||||
// an oddness of the y coordinate that does not match the actual oddness of
|
||||
// the provided y coordinate.
|
||||
ErrPubKeyMismatchedOddness = ErrorKind("ErrPubKeyMismatchedOddness")
|
||||
)
|
||||
|
||||
// Error satisfies the error interface and prints human-readable errors.
|
||||
func (err ErrorKind) Error() string { return string(err) }
|
||||
|
||||
// Error identifies an error related to public key cryptography using a
|
||||
// sec256k1 curve. It has full support for errors.Is and errors.As, so the
|
||||
// caller can ascertain the specific reason for the error by checking
|
||||
// the underlying error.
|
||||
type Error struct {
|
||||
Err error
|
||||
Description string
|
||||
}
|
||||
|
||||
// Error satisfies the error interface and prints human-readable errors.
|
||||
func (err Error) Error() string { return err.Description }
|
||||
|
||||
// Unwrap returns the underlying wrapped error.
|
||||
func (err Error) Unwrap() (ee error) { return err.Err }
|
||||
|
||||
// makeError creates an Error given a set of arguments.
|
||||
func makeError(kind ErrorKind, desc string) (err error) {
|
||||
return Error{Err: kind, Description: desc}
|
||||
}
|
||||
136
pkg/crypto/ec/secp256k1/error_test.go
Normal file
136
pkg/crypto/ec/secp256k1/error_test.go
Normal file
@@ -0,0 +1,136 @@
|
||||
// Copyright (c) 2020 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestErrorKindStringer tests the stringized output for the ErrorKind type.
|
||||
func TestErrorKindStringer(t *testing.T) {
|
||||
tests := []struct {
|
||||
in ErrorKind
|
||||
want string
|
||||
}{
|
||||
{ErrPubKeyInvalidLen, "ErrPubKeyInvalidLen"},
|
||||
{ErrPubKeyInvalidFormat, "ErrPubKeyInvalidFormat"},
|
||||
{ErrPubKeyXTooBig, "ErrPubKeyXTooBig"},
|
||||
{ErrPubKeyYTooBig, "ErrPubKeyYTooBig"},
|
||||
{ErrPubKeyNotOnCurve, "ErrPubKeyNotOnCurve"},
|
||||
{ErrPubKeyMismatchedOddness, "ErrPubKeyMismatchedOddness"},
|
||||
}
|
||||
for i, test := range tests {
|
||||
result := test.in.Error()
|
||||
if result != test.want {
|
||||
t.Errorf("#%d: got: %s want: %s", i, result, test.want)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestError tests the error output for the Error type.
|
||||
func TestError(t *testing.T) {
|
||||
tests := []struct {
|
||||
in Error
|
||||
want string
|
||||
}{
|
||||
{
|
||||
Error{Description: "some error"},
|
||||
"some error",
|
||||
}, {
|
||||
Error{Description: "human-readable error"},
|
||||
"human-readable error",
|
||||
},
|
||||
}
|
||||
for i, test := range tests {
|
||||
result := test.in.Error()
|
||||
if result != test.want {
|
||||
t.Errorf("#%d: got: %s want: %s", i, result, test.want)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestErrorKindIsAs ensures both ErrorKind and Error can be identified as being
|
||||
// a specific error kind via errors.Is and unwrapped via errors.As.
|
||||
func TestErrorKindIsAs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
err error
|
||||
target error
|
||||
wantMatch bool
|
||||
wantAs ErrorKind
|
||||
}{
|
||||
{
|
||||
name: "ErrPubKeyInvalidLen == ErrPubKeyInvalidLen",
|
||||
err: ErrPubKeyInvalidLen,
|
||||
target: ErrPubKeyInvalidLen,
|
||||
wantMatch: true,
|
||||
wantAs: ErrPubKeyInvalidLen,
|
||||
}, {
|
||||
name: "Error.ErrPubKeyInvalidLen == ErrPubKeyInvalidLen",
|
||||
err: makeError(ErrPubKeyInvalidLen, ""),
|
||||
target: ErrPubKeyInvalidLen,
|
||||
wantMatch: true,
|
||||
wantAs: ErrPubKeyInvalidLen,
|
||||
}, {
|
||||
name: "Error.ErrPubKeyInvalidLen == Error.ErrPubKeyInvalidLen",
|
||||
err: makeError(ErrPubKeyInvalidLen, ""),
|
||||
target: makeError(ErrPubKeyInvalidLen, ""),
|
||||
wantMatch: true,
|
||||
wantAs: ErrPubKeyInvalidLen,
|
||||
}, {
|
||||
name: "ErrPubKeyInvalidFormat != ErrPubKeyInvalidLen",
|
||||
err: ErrPubKeyInvalidFormat,
|
||||
target: ErrPubKeyInvalidLen,
|
||||
wantMatch: false,
|
||||
wantAs: ErrPubKeyInvalidFormat,
|
||||
}, {
|
||||
name: "Error.ErrPubKeyInvalidFormat != ErrPubKeyInvalidLen",
|
||||
err: makeError(ErrPubKeyInvalidFormat, ""),
|
||||
target: ErrPubKeyInvalidLen,
|
||||
wantMatch: false,
|
||||
wantAs: ErrPubKeyInvalidFormat,
|
||||
}, {
|
||||
name: "ErrPubKeyInvalidFormat != Error.ErrPubKeyInvalidLen",
|
||||
err: ErrPubKeyInvalidFormat,
|
||||
target: makeError(ErrPubKeyInvalidLen, ""),
|
||||
wantMatch: false,
|
||||
wantAs: ErrPubKeyInvalidFormat,
|
||||
}, {
|
||||
name: "Error.ErrPubKeyInvalidFormat != Error.ErrPubKeyInvalidLen",
|
||||
err: makeError(ErrPubKeyInvalidFormat, ""),
|
||||
target: makeError(ErrPubKeyInvalidLen, ""),
|
||||
wantMatch: false,
|
||||
wantAs: ErrPubKeyInvalidFormat,
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
// Ensure the error matches or not depending on the expected result.
|
||||
result := errors.Is(test.err, test.target)
|
||||
if result != test.wantMatch {
|
||||
t.Errorf(
|
||||
"%s: incorrect error identification -- got %v, want %v",
|
||||
test.name, result, test.wantMatch,
|
||||
)
|
||||
continue
|
||||
}
|
||||
// Ensure the underlying error code can be unwrapped and is the expected
|
||||
// code.
|
||||
var kind ErrorKind
|
||||
if !errors.As(test.err, &kind) {
|
||||
t.Errorf("%s: unable to unwrap to error code", test.name)
|
||||
continue
|
||||
}
|
||||
if kind != test.wantAs {
|
||||
t.Errorf(
|
||||
"%s: unexpected unwrapped error code -- got %v, want %v",
|
||||
test.name, kind, test.wantAs,
|
||||
)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
132
pkg/crypto/ec/secp256k1/example_test.go
Normal file
132
pkg/crypto/ec/secp256k1/example_test.go
Normal file
@@ -0,0 +1,132 @@
|
||||
// Copyright (c) 2014 The btcsuite developers
|
||||
// Copyright (c) 2015-2020 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1_test
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
|
||||
"next.orly.dev/pkg/crypto/ec/secp256k1"
|
||||
"next.orly.dev/pkg/crypto/sha256"
|
||||
"next.orly.dev/pkg/encoders/hex"
|
||||
)
|
||||
|
||||
// This example demonstrates use of GenerateSharedSecret to encrypt a message
|
||||
// for a recipient's public key, and subsequently decrypt the message using the
|
||||
// recipient's secret key.
|
||||
func Example_encryptDecryptMessage() {
|
||||
newAEAD := func(key []byte) (cipher.AEAD, error) {
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cipher.NewGCM(block)
|
||||
}
|
||||
// Decode the hex-encoded pubkey of the recipient.
|
||||
pubKeyBytes, err := hex.Dec(
|
||||
"04115c42e757b2efb7671c578530ec191a1359381e6a71127a9d37c486fd30da" +
|
||||
"e57e76dc58f693bd7e7010358ce6b165e483a2921010db67ac11b1b51b651953d2",
|
||||
) // uncompressed pubkey
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
pubKey, err := secp256k1.ParsePubKey(pubKeyBytes)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
// Derive an ephemeral public/secret keypair for performing ECDHE with
|
||||
// the recipient.
|
||||
ephemeralSecKey, err := secp256k1.GenerateSecretKey()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
ephemeralPubKey := ephemeralSecKey.PubKey().SerializeCompressed()
|
||||
// Using ECDHE, derive a shared symmetric key for encryption of the plaintext.
|
||||
cipherKey := sha256.Sum256(
|
||||
secp256k1.GenerateSharedSecret(
|
||||
ephemeralSecKey,
|
||||
pubKey,
|
||||
),
|
||||
)
|
||||
// Seal the message using an AEAD. Here we use AES-256-GCM.
|
||||
// The ephemeral public key must be included in this message, and becomes
|
||||
// the authenticated data for the AEAD.
|
||||
//
|
||||
// Note that unless a unique nonce can be guaranteed, the ephemeral
|
||||
// and/or shared keys must not be reused to encrypt different messages.
|
||||
// Doing so destroys the security of the scheme. Random nonces may be
|
||||
// used if XChaCha20-Poly1305 is used instead, but the message must then
|
||||
// also encode the nonce (which we don't do here).
|
||||
//
|
||||
// Since a new ephemeral key is generated for every message ensuring there
|
||||
// is no key reuse and AES-GCM permits the nonce to be used as a counter,
|
||||
// the nonce is intentionally initialized to all zeros so it acts like the
|
||||
// first (and only) use of a counter.
|
||||
plaintext := []byte("test message")
|
||||
aead, err := newAEAD(cipherKey[:])
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
nonce := make([]byte, aead.NonceSize())
|
||||
ciphertext := make([]byte, 4+len(ephemeralPubKey))
|
||||
binary.LittleEndian.PutUint32(ciphertext, uint32(len(ephemeralPubKey)))
|
||||
copy(ciphertext[4:], ephemeralPubKey)
|
||||
ciphertext = aead.Seal(ciphertext, nonce, plaintext, ephemeralPubKey)
|
||||
// The remainder of this example is performed by the recipient on the
|
||||
// ciphertext shared by the sender.
|
||||
//
|
||||
// Decode the hex-encoded secret key.
|
||||
pkBytes, err := hex.Dec(
|
||||
"a11b0a4e1a132305652ee7a8eb7848f6ad5ea381e3ce20a2c086a2e388230811",
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
secKey := secp256k1.SecKeyFromBytes(pkBytes)
|
||||
// Read the sender's ephemeral public key from the start of the message.
|
||||
// Error handling for inappropriate pubkey lengths is elided here for
|
||||
// brevity.
|
||||
pubKeyLen := binary.LittleEndian.Uint32(ciphertext[:4])
|
||||
senderPubKeyBytes := ciphertext[4 : 4+pubKeyLen]
|
||||
senderPubKey, err := secp256k1.ParsePubKey(senderPubKeyBytes)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
// Derive the key used to seal the message, this time from the
|
||||
// recipient's secret key and the sender's public key.
|
||||
recoveredCipherKey := sha256.Sum256(
|
||||
secp256k1.GenerateSharedSecret(
|
||||
secKey,
|
||||
senderPubKey,
|
||||
),
|
||||
)
|
||||
// Open the sealed message.
|
||||
aead, err = newAEAD(recoveredCipherKey[:])
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
nonce = make([]byte, aead.NonceSize())
|
||||
recoveredPlaintext, err := aead.Open(
|
||||
nil, nonce, ciphertext[4+pubKeyLen:],
|
||||
senderPubKeyBytes,
|
||||
)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
fmt.Println(string(recoveredPlaintext))
|
||||
// Output:
|
||||
// test message
|
||||
}
|
||||
1615
pkg/crypto/ec/secp256k1/field.go
Normal file
1615
pkg/crypto/ec/secp256k1/field.go
Normal file
File diff suppressed because it is too large
Load Diff
92
pkg/crypto/ec/secp256k1/field_bench_test.go
Normal file
92
pkg/crypto/ec/secp256k1/field_bench_test.go
Normal file
@@ -0,0 +1,92 @@
|
||||
// Copyright (c) 2020-2023 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// BenchmarkFieldNormalize benchmarks how long it takes the internal field
|
||||
// to perform normalization (which includes modular reduction).
|
||||
func BenchmarkFieldNormalize(b *testing.B) {
|
||||
// The function is constant time so any value is fine.
|
||||
f := &FieldVal{
|
||||
n: [10]uint32{
|
||||
0x000148f6, 0x03ffffc0, 0x03ffffff, 0x03ffffff, 0x03ffffff,
|
||||
0x03ffffff, 0x03ffffff, 0x03ffffff, 0x03ffffff, 0x00000007,
|
||||
},
|
||||
}
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
f.Normalize()
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkFieldSqrt benchmarks calculating the square root of an unsigned
|
||||
// 256-bit big-endian integer modulo the field prime with the specialized type.
|
||||
func BenchmarkFieldSqrt(b *testing.B) {
|
||||
// The function is constant time so any value is fine.
|
||||
valHex := "16fb970147a9acc73654d4be233cc48b875ce20a2122d24f073d29bd28805aca"
|
||||
f := new(FieldVal).SetHex(valHex).Normalize()
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
var result FieldVal
|
||||
_ = result.SquareRootVal(f)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkBigSqrt benchmarks calculating the square root of an unsigned
|
||||
// 256-bit big-endian integer modulo the field prime with stdlib big integers.
|
||||
func BenchmarkBigSqrt(b *testing.B) {
|
||||
// The function is constant time so any value is fine.
|
||||
valHex := "16fb970147a9acc73654d4be233cc48b875ce20a2122d24f073d29bd28805aca"
|
||||
val, ok := new(big.Int).SetString(valHex, 16)
|
||||
if !ok {
|
||||
b.Fatalf("failed to parse hex %s", valHex)
|
||||
}
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = new(big.Int).ModSqrt(val, curveParams.P)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkFieldIsGtOrEqPrimeMinusOrder benchmarks determining whether a value
|
||||
// is greater than or equal to the field prime minus the group order with the
|
||||
// specialized type.
|
||||
func BenchmarkFieldIsGtOrEqPrimeMinusOrder(b *testing.B) {
|
||||
// The function is constant time so any value is fine.
|
||||
valHex := "16fb970147a9acc73654d4be233cc48b875ce20a2122d24f073d29bd28805aca"
|
||||
f := new(FieldVal).SetHex(valHex).Normalize()
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = f.IsGtOrEqPrimeMinusOrder()
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkBigIsGtOrEqPrimeMinusOrder benchmarks determining whether a value
|
||||
// is greater than or equal to the field prime minus the group order with stdlib
|
||||
// big integers.
|
||||
func BenchmarkBigIsGtOrEqPrimeMinusOrder(b *testing.B) {
|
||||
// Same value used in field val version.
|
||||
valHex := "16fb970147a9acc73654d4be233cc48b875ce20a2122d24f073d29bd28805aca"
|
||||
val, ok := new(big.Int).SetString(valHex, 16)
|
||||
if !ok {
|
||||
b.Fatalf("failed to parse hex %s", valHex)
|
||||
}
|
||||
bigPMinusN := new(big.Int).Sub(curveParams.P, curveParams.N)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
// In practice, the internal value to compare would have to be converted
|
||||
// to a big integer from bytes, so it's a fair comparison to allocate a
|
||||
// new big int here and set all bytes.
|
||||
_ = new(big.Int).SetBytes(val.Bytes()).Cmp(bigPMinusN) >= 0
|
||||
}
|
||||
}
|
||||
2018
pkg/crypto/ec/secp256k1/field_test.go
Normal file
2018
pkg/crypto/ec/secp256k1/field_test.go
Normal file
File diff suppressed because it is too large
Load Diff
88
pkg/crypto/ec/secp256k1/loadprecomputed.go
Normal file
88
pkg/crypto/ec/secp256k1/loadprecomputed.go
Normal file
@@ -0,0 +1,88 @@
|
||||
// Copyright 2015 The btcsuite developers
|
||||
// Copyright (c) 2015-2022 The Decred developers
|
||||
// Use of this source code is governed by an ISC
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package secp256k1
|
||||
|
||||
import (
|
||||
"compress/zlib"
|
||||
"encoding/base64"
|
||||
"io"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
//go:generate go run genprecomps.go
|
||||
|
||||
// bytePointTable describes a table used to house pre-computed values for
|
||||
// accelerating scalar base multiplication.
|
||||
type bytePointTable [32][256]JacobianPoint
|
||||
|
||||
// compressedBytePointsFn is set to a real function by the code generation to
|
||||
// return the compressed pre-computed values for accelerating scalar base
|
||||
// multiplication.
|
||||
var compressedBytePointsFn func() string
|
||||
|
||||
// s256BytePoints houses pre-computed values used to accelerate scalar base
|
||||
// multiplication such that they are only loaded on first use.
|
||||
var s256BytePoints = func() func() *bytePointTable {
|
||||
// mustLoadBytePoints decompresses and deserializes the pre-computed byte
|
||||
// points used to accelerate scalar base multiplication for the secp256k1
|
||||
// curve.
|
||||
//
|
||||
// This approach is used since it allows the compile to use significantly
|
||||
// less ram and be performed much faster than it is with hard-coding the
|
||||
// final in-memory data structure. At the same time, it is quite fast to
|
||||
// generate the in-memory data structure on first use with this approach
|
||||
// versus computing the table.
|
||||
//
|
||||
// It will panic on any errors because the data is hard coded and thus any
|
||||
// errors means something is wrong in the source code.
|
||||
var data *bytePointTable
|
||||
mustLoadBytePoints := func() {
|
||||
// There will be no byte points to load when generating them.
|
||||
if compressedBytePointsFn == nil {
|
||||
return
|
||||
}
|
||||
bp := compressedBytePointsFn()
|
||||
// Decompress the pre-computed table used to accelerate scalar base
|
||||
// multiplication.
|
||||
decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(bp))
|
||||
r, err := zlib.NewReader(decoder)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
serialized, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
// Deserialize the precomputed byte points and set the memory table to
|
||||
// them.
|
||||
offset := 0
|
||||
var bytePoints bytePointTable
|
||||
for byteNum := 0; byteNum < len(bytePoints); byteNum++ {
|
||||
// All points in this window.
|
||||
for i := 0; i < len(bytePoints[byteNum]); i++ {
|
||||
p := &bytePoints[byteNum][i]
|
||||
p.X.SetByteSlice(serialized[offset:])
|
||||
offset += 32
|
||||
p.Y.SetByteSlice(serialized[offset:])
|
||||
offset += 32
|
||||
p.Z.SetInt(1)
|
||||
}
|
||||
}
|
||||
data = &bytePoints
|
||||
}
|
||||
// Return a closure that initializes the data on first access. This is done
|
||||
// because the table takes a non-trivial amount of memory and initializing
|
||||
// it unconditionally would cause anything that imports the package, either
|
||||
// directly, or indirectly via transitive deps, to use that memory even if
|
||||
// the caller never accesses any parts of the package that actually needs
|
||||
// access to it.
|
||||
var loadBytePointsOnce sync.Once
|
||||
return func() *bytePointTable {
|
||||
loadBytePointsOnce.Do(mustLoadBytePoints)
|
||||
return data
|
||||
}
|
||||
}()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user