conesnsu: follow up to removing some consensus params (#2427)

* follow up to removing some consensus params Refs #2382
* change args type to int64 in state#makeParams
* make valsCount and evidenceCount ints again
* MaxEvidenceBytesPerBlock: include magic number in godoc
* [spec] creating a proposal
* test state#TxFilter
* panic if MaxDataBytes is less than 0
* fixes after review
* use amino#UvarintSize to calculate overhead
0c74291f3b/encoder.go (L85-L90)
* avoid cyclic imports
* you can do better Go, come on
* remove testdouble package
This commit is contained in:
Anton Kaliaev 2018-09-21 13:00:36 +04:00 committed by Alexander Simmerl
parent 7b727bf3d0
commit 8d50bb9dad
27 changed files with 347 additions and 175 deletions

View File

@ -60,7 +60,7 @@ func (m *Request) Reset() { *m = Request{} }
func (m *Request) String() string { return proto.CompactTextString(m) }
func (*Request) ProtoMessage() {}
func (*Request) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{0}
return fileDescriptor_types_8495fed925debe52, []int{0}
}
func (m *Request) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -482,7 +482,7 @@ func (m *RequestEcho) Reset() { *m = RequestEcho{} }
func (m *RequestEcho) String() string { return proto.CompactTextString(m) }
func (*RequestEcho) ProtoMessage() {}
func (*RequestEcho) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{1}
return fileDescriptor_types_8495fed925debe52, []int{1}
}
func (m *RequestEcho) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -528,7 +528,7 @@ func (m *RequestFlush) Reset() { *m = RequestFlush{} }
func (m *RequestFlush) String() string { return proto.CompactTextString(m) }
func (*RequestFlush) ProtoMessage() {}
func (*RequestFlush) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{2}
return fileDescriptor_types_8495fed925debe52, []int{2}
}
func (m *RequestFlush) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -568,7 +568,7 @@ func (m *RequestInfo) Reset() { *m = RequestInfo{} }
func (m *RequestInfo) String() string { return proto.CompactTextString(m) }
func (*RequestInfo) ProtoMessage() {}
func (*RequestInfo) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{3}
return fileDescriptor_types_8495fed925debe52, []int{3}
}
func (m *RequestInfo) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -617,7 +617,7 @@ func (m *RequestSetOption) Reset() { *m = RequestSetOption{} }
func (m *RequestSetOption) String() string { return proto.CompactTextString(m) }
func (*RequestSetOption) ProtoMessage() {}
func (*RequestSetOption) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{4}
return fileDescriptor_types_8495fed925debe52, []int{4}
}
func (m *RequestSetOption) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -675,7 +675,7 @@ func (m *RequestInitChain) Reset() { *m = RequestInitChain{} }
func (m *RequestInitChain) String() string { return proto.CompactTextString(m) }
func (*RequestInitChain) ProtoMessage() {}
func (*RequestInitChain) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{5}
return fileDescriptor_types_8495fed925debe52, []int{5}
}
func (m *RequestInitChain) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -753,7 +753,7 @@ func (m *RequestQuery) Reset() { *m = RequestQuery{} }
func (m *RequestQuery) String() string { return proto.CompactTextString(m) }
func (*RequestQuery) ProtoMessage() {}
func (*RequestQuery) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{6}
return fileDescriptor_types_8495fed925debe52, []int{6}
}
func (m *RequestQuery) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -825,7 +825,7 @@ func (m *RequestBeginBlock) Reset() { *m = RequestBeginBlock{} }
func (m *RequestBeginBlock) String() string { return proto.CompactTextString(m) }
func (*RequestBeginBlock) ProtoMessage() {}
func (*RequestBeginBlock) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{7}
return fileDescriptor_types_8495fed925debe52, []int{7}
}
func (m *RequestBeginBlock) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -893,7 +893,7 @@ func (m *RequestCheckTx) Reset() { *m = RequestCheckTx{} }
func (m *RequestCheckTx) String() string { return proto.CompactTextString(m) }
func (*RequestCheckTx) ProtoMessage() {}
func (*RequestCheckTx) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{8}
return fileDescriptor_types_8495fed925debe52, []int{8}
}
func (m *RequestCheckTx) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -940,7 +940,7 @@ func (m *RequestDeliverTx) Reset() { *m = RequestDeliverTx{} }
func (m *RequestDeliverTx) String() string { return proto.CompactTextString(m) }
func (*RequestDeliverTx) ProtoMessage() {}
func (*RequestDeliverTx) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{9}
return fileDescriptor_types_8495fed925debe52, []int{9}
}
func (m *RequestDeliverTx) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -987,7 +987,7 @@ func (m *RequestEndBlock) Reset() { *m = RequestEndBlock{} }
func (m *RequestEndBlock) String() string { return proto.CompactTextString(m) }
func (*RequestEndBlock) ProtoMessage() {}
func (*RequestEndBlock) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{10}
return fileDescriptor_types_8495fed925debe52, []int{10}
}
func (m *RequestEndBlock) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1033,7 +1033,7 @@ func (m *RequestCommit) Reset() { *m = RequestCommit{} }
func (m *RequestCommit) String() string { return proto.CompactTextString(m) }
func (*RequestCommit) ProtoMessage() {}
func (*RequestCommit) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{11}
return fileDescriptor_types_8495fed925debe52, []int{11}
}
func (m *RequestCommit) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1086,7 +1086,7 @@ func (m *Response) Reset() { *m = Response{} }
func (m *Response) String() string { return proto.CompactTextString(m) }
func (*Response) ProtoMessage() {}
func (*Response) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{12}
return fileDescriptor_types_8495fed925debe52, []int{12}
}
func (m *Response) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1539,7 +1539,7 @@ func (m *ResponseException) Reset() { *m = ResponseException{} }
func (m *ResponseException) String() string { return proto.CompactTextString(m) }
func (*ResponseException) ProtoMessage() {}
func (*ResponseException) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{13}
return fileDescriptor_types_8495fed925debe52, []int{13}
}
func (m *ResponseException) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1586,7 +1586,7 @@ func (m *ResponseEcho) Reset() { *m = ResponseEcho{} }
func (m *ResponseEcho) String() string { return proto.CompactTextString(m) }
func (*ResponseEcho) ProtoMessage() {}
func (*ResponseEcho) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{14}
return fileDescriptor_types_8495fed925debe52, []int{14}
}
func (m *ResponseEcho) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1632,7 +1632,7 @@ func (m *ResponseFlush) Reset() { *m = ResponseFlush{} }
func (m *ResponseFlush) String() string { return proto.CompactTextString(m) }
func (*ResponseFlush) ProtoMessage() {}
func (*ResponseFlush) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{15}
return fileDescriptor_types_8495fed925debe52, []int{15}
}
func (m *ResponseFlush) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1675,7 +1675,7 @@ func (m *ResponseInfo) Reset() { *m = ResponseInfo{} }
func (m *ResponseInfo) String() string { return proto.CompactTextString(m) }
func (*ResponseInfo) ProtoMessage() {}
func (*ResponseInfo) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{16}
return fileDescriptor_types_8495fed925debe52, []int{16}
}
func (m *ResponseInfo) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1747,7 +1747,7 @@ func (m *ResponseSetOption) Reset() { *m = ResponseSetOption{} }
func (m *ResponseSetOption) String() string { return proto.CompactTextString(m) }
func (*ResponseSetOption) ProtoMessage() {}
func (*ResponseSetOption) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{17}
return fileDescriptor_types_8495fed925debe52, []int{17}
}
func (m *ResponseSetOption) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1809,7 +1809,7 @@ func (m *ResponseInitChain) Reset() { *m = ResponseInitChain{} }
func (m *ResponseInitChain) String() string { return proto.CompactTextString(m) }
func (*ResponseInitChain) ProtoMessage() {}
func (*ResponseInitChain) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{18}
return fileDescriptor_types_8495fed925debe52, []int{18}
}
func (m *ResponseInitChain) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1871,7 +1871,7 @@ func (m *ResponseQuery) Reset() { *m = ResponseQuery{} }
func (m *ResponseQuery) String() string { return proto.CompactTextString(m) }
func (*ResponseQuery) ProtoMessage() {}
func (*ResponseQuery) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{19}
return fileDescriptor_types_8495fed925debe52, []int{19}
}
func (m *ResponseQuery) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -1967,7 +1967,7 @@ func (m *ResponseBeginBlock) Reset() { *m = ResponseBeginBlock{} }
func (m *ResponseBeginBlock) String() string { return proto.CompactTextString(m) }
func (*ResponseBeginBlock) ProtoMessage() {}
func (*ResponseBeginBlock) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{20}
return fileDescriptor_types_8495fed925debe52, []int{20}
}
func (m *ResponseBeginBlock) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2020,7 +2020,7 @@ func (m *ResponseCheckTx) Reset() { *m = ResponseCheckTx{} }
func (m *ResponseCheckTx) String() string { return proto.CompactTextString(m) }
func (*ResponseCheckTx) ProtoMessage() {}
func (*ResponseCheckTx) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{21}
return fileDescriptor_types_8495fed925debe52, []int{21}
}
func (m *ResponseCheckTx) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2115,7 +2115,7 @@ func (m *ResponseDeliverTx) Reset() { *m = ResponseDeliverTx{} }
func (m *ResponseDeliverTx) String() string { return proto.CompactTextString(m) }
func (*ResponseDeliverTx) ProtoMessage() {}
func (*ResponseDeliverTx) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{22}
return fileDescriptor_types_8495fed925debe52, []int{22}
}
func (m *ResponseDeliverTx) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2206,7 +2206,7 @@ func (m *ResponseEndBlock) Reset() { *m = ResponseEndBlock{} }
func (m *ResponseEndBlock) String() string { return proto.CompactTextString(m) }
func (*ResponseEndBlock) ProtoMessage() {}
func (*ResponseEndBlock) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{23}
return fileDescriptor_types_8495fed925debe52, []int{23}
}
func (m *ResponseEndBlock) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2268,7 +2268,7 @@ func (m *ResponseCommit) Reset() { *m = ResponseCommit{} }
func (m *ResponseCommit) String() string { return proto.CompactTextString(m) }
func (*ResponseCommit) ProtoMessage() {}
func (*ResponseCommit) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{24}
return fileDescriptor_types_8495fed925debe52, []int{24}
}
func (m *ResponseCommit) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2318,7 +2318,7 @@ func (m *ConsensusParams) Reset() { *m = ConsensusParams{} }
func (m *ConsensusParams) String() string { return proto.CompactTextString(m) }
func (*ConsensusParams) ProtoMessage() {}
func (*ConsensusParams) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{25}
return fileDescriptor_types_8495fed925debe52, []int{25}
}
func (m *ConsensusParams) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2364,7 +2364,7 @@ func (m *ConsensusParams) GetEvidenceParams() *EvidenceParams {
// BlockSize contains limits on the block size.
type BlockSize struct {
// Note: must be greater than 0
MaxBytes int32 `protobuf:"varint,1,opt,name=max_bytes,json=maxBytes,proto3" json:"max_bytes,omitempty"`
MaxBytes int64 `protobuf:"varint,1,opt,name=max_bytes,json=maxBytes,proto3" json:"max_bytes,omitempty"`
// Note: must be greater or equal to -1
MaxGas int64 `protobuf:"varint,2,opt,name=max_gas,json=maxGas,proto3" json:"max_gas,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
@ -2376,7 +2376,7 @@ func (m *BlockSize) Reset() { *m = BlockSize{} }
func (m *BlockSize) String() string { return proto.CompactTextString(m) }
func (*BlockSize) ProtoMessage() {}
func (*BlockSize) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{26}
return fileDescriptor_types_8495fed925debe52, []int{26}
}
func (m *BlockSize) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2405,7 +2405,7 @@ func (m *BlockSize) XXX_DiscardUnknown() {
var xxx_messageInfo_BlockSize proto.InternalMessageInfo
func (m *BlockSize) GetMaxBytes() int32 {
func (m *BlockSize) GetMaxBytes() int64 {
if m != nil {
return m.MaxBytes
}
@ -2432,7 +2432,7 @@ func (m *EvidenceParams) Reset() { *m = EvidenceParams{} }
func (m *EvidenceParams) String() string { return proto.CompactTextString(m) }
func (*EvidenceParams) ProtoMessage() {}
func (*EvidenceParams) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{27}
return fileDescriptor_types_8495fed925debe52, []int{27}
}
func (m *EvidenceParams) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2480,7 +2480,7 @@ func (m *LastCommitInfo) Reset() { *m = LastCommitInfo{} }
func (m *LastCommitInfo) String() string { return proto.CompactTextString(m) }
func (*LastCommitInfo) ProtoMessage() {}
func (*LastCommitInfo) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{28}
return fileDescriptor_types_8495fed925debe52, []int{28}
}
func (m *LastCommitInfo) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2553,7 +2553,7 @@ func (m *Header) Reset() { *m = Header{} }
func (m *Header) String() string { return proto.CompactTextString(m) }
func (*Header) ProtoMessage() {}
func (*Header) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{29}
return fileDescriptor_types_8495fed925debe52, []int{29}
}
func (m *Header) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2699,7 +2699,7 @@ func (m *BlockID) Reset() { *m = BlockID{} }
func (m *BlockID) String() string { return proto.CompactTextString(m) }
func (*BlockID) ProtoMessage() {}
func (*BlockID) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{30}
return fileDescriptor_types_8495fed925debe52, []int{30}
}
func (m *BlockID) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2754,7 +2754,7 @@ func (m *PartSetHeader) Reset() { *m = PartSetHeader{} }
func (m *PartSetHeader) String() string { return proto.CompactTextString(m) }
func (*PartSetHeader) ProtoMessage() {}
func (*PartSetHeader) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{31}
return fileDescriptor_types_8495fed925debe52, []int{31}
}
func (m *PartSetHeader) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2811,7 +2811,7 @@ func (m *Validator) Reset() { *m = Validator{} }
func (m *Validator) String() string { return proto.CompactTextString(m) }
func (*Validator) ProtoMessage() {}
func (*Validator) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{32}
return fileDescriptor_types_8495fed925debe52, []int{32}
}
func (m *Validator) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2867,7 +2867,7 @@ func (m *ValidatorUpdate) Reset() { *m = ValidatorUpdate{} }
func (m *ValidatorUpdate) String() string { return proto.CompactTextString(m) }
func (*ValidatorUpdate) ProtoMessage() {}
func (*ValidatorUpdate) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{33}
return fileDescriptor_types_8495fed925debe52, []int{33}
}
func (m *ValidatorUpdate) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2923,7 +2923,7 @@ func (m *VoteInfo) Reset() { *m = VoteInfo{} }
func (m *VoteInfo) String() string { return proto.CompactTextString(m) }
func (*VoteInfo) ProtoMessage() {}
func (*VoteInfo) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{34}
return fileDescriptor_types_8495fed925debe52, []int{34}
}
func (m *VoteInfo) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -2978,7 +2978,7 @@ func (m *PubKey) Reset() { *m = PubKey{} }
func (m *PubKey) String() string { return proto.CompactTextString(m) }
func (*PubKey) ProtoMessage() {}
func (*PubKey) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{35}
return fileDescriptor_types_8495fed925debe52, []int{35}
}
func (m *PubKey) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -3036,7 +3036,7 @@ func (m *Evidence) Reset() { *m = Evidence{} }
func (m *Evidence) String() string { return proto.CompactTextString(m) }
func (*Evidence) ProtoMessage() {}
func (*Evidence) Descriptor() ([]byte, []int) {
return fileDescriptor_types_bfbaec40016cbadd, []int{36}
return fileDescriptor_types_8495fed925debe52, []int{36}
}
func (m *Evidence) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -7737,7 +7737,7 @@ func NewPopulatedConsensusParams(r randyTypes, easy bool) *ConsensusParams {
func NewPopulatedBlockSize(r randyTypes, easy bool) *BlockSize {
this := &BlockSize{}
this.MaxBytes = int32(r.Int31())
this.MaxBytes = int64(r.Int63())
if r.Intn(2) == 0 {
this.MaxBytes *= -1
}
@ -12901,7 +12901,7 @@ func (m *BlockSize) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.MaxBytes |= (int32(b) & 0x7F) << shift
m.MaxBytes |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
@ -14503,12 +14503,12 @@ var (
ErrIntOverflowTypes = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("abci/types/types.proto", fileDescriptor_types_bfbaec40016cbadd) }
func init() { proto.RegisterFile("abci/types/types.proto", fileDescriptor_types_8495fed925debe52) }
func init() {
golang_proto.RegisterFile("abci/types/types.proto", fileDescriptor_types_bfbaec40016cbadd)
golang_proto.RegisterFile("abci/types/types.proto", fileDescriptor_types_8495fed925debe52)
}
var fileDescriptor_types_bfbaec40016cbadd = []byte{
var fileDescriptor_types_8495fed925debe52 = []byte{
// 2062 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x58, 0x4f, 0x6f, 0x23, 0x49,
0x15, 0x4f, 0xdb, 0x8e, 0xed, 0x7e, 0x49, 0xec, 0x4c, 0x25, 0x93, 0x78, 0x0c, 0x24, 0xa3, 0x06,
@ -14595,48 +14595,48 @@ var fileDescriptor_types_bfbaec40016cbadd = []byte{
0xbb, 0x2d, 0xeb, 0x17, 0x06, 0xb4, 0x0b, 0x87, 0x21, 0x07, 0x00, 0x32, 0xe3, 0xc5, 0xde, 0x73,
0x5d, 0x2a, 0xaf, 0xaa, 0x83, 0xa3, 0xc9, 0x1e, 0x7b, 0xcf, 0x99, 0x6d, 0xf6, 0xf5, 0x2f, 0xf9,
0x18, 0xda, 0x4c, 0x15, 0x4c, 0x3a, 0x25, 0x55, 0x72, 0xd8, 0xa1, 0xcb, 0x29, 0xa5, 0x6d, 0x8b,
0xe5, 0xc6, 0xd6, 0x31, 0x98, 0xc9, 0xba, 0xe4, 0x6b, 0x60, 0x4e, 0xe8, 0x4c, 0x95, 0xb1, 0x62,
0xf3, 0x45, 0xbb, 0x39, 0xa1, 0x33, 0xac, 0x60, 0xc9, 0x26, 0x34, 0x04, 0x71, 0x48, 0xe5, 0x0e,
0x55, 0xbb, 0x3e, 0xa1, 0xb3, 0x1f, 0xd2, 0xd8, 0xda, 0x85, 0x56, 0x7e, 0x13, 0xcd, 0xaa, 0x21,
0x45, 0xb2, 0x1e, 0x0f, 0x99, 0xf5, 0x18, 0x5a, 0xf9, 0x4a, 0x51, 0x24, 0x92, 0x28, 0x98, 0xfa,
0xae, 0xda, 0x4e, 0x0e, 0x44, 0x9b, 0x78, 0x19, 0xc8, 0xab, 0xcb, 0x96, 0x86, 0xe7, 0x01, 0x67,
0x99, 0xfa, 0x52, 0xf2, 0x58, 0x7f, 0xac, 0x41, 0x5d, 0x96, 0xad, 0xe4, 0xad, 0x4c, 0xa7, 0x80,
0x98, 0xd4, 0x5b, 0xba, 0x7e, 0xb1, 0xdd, 0xc0, 0xf4, 0x7d, 0xfa, 0x30, 0x6d, 0x1b, 0xd2, 0x44,
0x55, 0xc9, 0x55, 0xd5, 0xba, 0x47, 0xa9, 0xbe, 0x76, 0x8f, 0xb2, 0x09, 0x0d, 0x7f, 0x3a, 0x71,
0xf8, 0x2c, 0xc6, 0x58, 0xab, 0xda, 0x75, 0x7f, 0x3a, 0x79, 0x32, 0x8b, 0x85, 0x4d, 0x79, 0xc0,
0xe9, 0x18, 0x49, 0x32, 0xd8, 0x9a, 0x38, 0x21, 0x88, 0x47, 0xb0, 0x92, 0x41, 0x39, 0xcf, 0x55,
0x25, 0x54, 0x2b, 0x7b, 0xe3, 0xa7, 0x0f, 0x95, 0xba, 0x4b, 0x09, 0xea, 0x9d, 0xba, 0x64, 0x27,
0x5f, 0x92, 0x23, 0x38, 0xca, 0x0c, 0x9d, 0xa9, 0xba, 0x05, 0x34, 0x8a, 0x03, 0x08, 0x77, 0x93,
0x2c, 0x32, 0x5d, 0x37, 0xc5, 0x04, 0x12, 0xdf, 0x86, 0x76, 0x8a, 0x2f, 0x92, 0xc5, 0x94, 0xab,
0xa4, 0xd3, 0xc8, 0xf8, 0x1e, 0xac, 0xfb, 0x6c, 0xc6, 0x9d, 0x22, 0x37, 0x20, 0x37, 0x11, 0xb4,
0xf3, 0xbc, 0xc4, 0xb7, 0xa1, 0x95, 0x06, 0x24, 0xf2, 0x2e, 0xc9, 0xc6, 0x28, 0x99, 0x45, 0xb6,
0x3b, 0xd0, 0x4c, 0xd0, 0x7d, 0x19, 0x19, 0x1a, 0x54, 0x82, 0x7a, 0x52, 0x2f, 0x44, 0x2c, 0x9e,
0x8e, 0xb9, 0x5a, 0x64, 0x05, 0x79, 0xb0, 0x5e, 0xb0, 0xe5, 0x3c, 0xf2, 0x7e, 0x13, 0x56, 0x92,
0x38, 0x40, 0xbe, 0x16, 0xf2, 0x2d, 0xeb, 0x49, 0x64, 0xda, 0x85, 0xd5, 0x30, 0x0a, 0xc2, 0x20,
0x66, 0x91, 0x43, 0x5d, 0x37, 0x62, 0x71, 0xdc, 0x69, 0xcb, 0xf5, 0xf4, 0xfc, 0xb1, 0x9c, 0xb6,
0x7e, 0x0e, 0x0d, 0x65, 0xfd, 0xd2, 0xf6, 0xe9, 0xfb, 0xb0, 0x1c, 0xd2, 0x48, 0x9c, 0x29, 0xdb,
0x44, 0xe9, 0x22, 0xf6, 0x8c, 0x46, 0xa2, 0x6b, 0xce, 0xf5, 0x52, 0x4b, 0xc8, 0x2f, 0xa7, 0xac,
0x7b, 0xb0, 0x92, 0xe3, 0x11, 0x61, 0x80, 0x4e, 0xa1, 0xc3, 0x00, 0x07, 0xc9, 0xce, 0x95, 0x74,
0x67, 0xeb, 0x3e, 0x98, 0x89, 0xa1, 0x45, 0xad, 0xa5, 0xf5, 0x30, 0x94, 0xed, 0xe4, 0x10, 0x01,
0x3a, 0xf8, 0x9c, 0x45, 0xaa, 0xbe, 0x92, 0x03, 0xeb, 0x29, 0xb4, 0x0b, 0xf9, 0x94, 0xec, 0x41,
0x23, 0x9c, 0xf6, 0x1d, 0xdd, 0xd7, 0xa7, 0x9d, 0xe0, 0xd9, 0xb4, 0xff, 0x09, 0xbb, 0xd2, 0x9d,
0x60, 0x88, 0xa3, 0x74, 0xd9, 0x4a, 0x76, 0xd9, 0x31, 0x34, 0x75, 0x68, 0x92, 0xef, 0x82, 0x99,
0xf8, 0x48, 0x21, 0x81, 0x25, 0x5b, 0xab, 0x45, 0x53, 0x46, 0x71, 0xd5, 0xb1, 0x37, 0xf4, 0x99,
0xeb, 0xa4, 0xf1, 0x80, 0x7b, 0x34, 0xed, 0xb6, 0x24, 0x7c, 0xaa, 0x9d, 0xdf, 0x7a, 0x0f, 0xea,
0xf2, 0x6c, 0xc2, 0x3e, 0x62, 0x65, 0x5d, 0x7e, 0x8a, 0xff, 0xd2, 0x4c, 0xfb, 0x27, 0x03, 0x9a,
0x3a, 0x45, 0x95, 0x0a, 0xe5, 0x0e, 0x5d, 0x79, 0xd5, 0x43, 0xcf, 0xeb, 0xcd, 0x75, 0x16, 0xa9,
0xbd, 0x76, 0x16, 0xd9, 0x03, 0x22, 0x93, 0xc5, 0x65, 0xc0, 0x3d, 0x7f, 0xe8, 0x48, 0x5b, 0xcb,
0xac, 0xb1, 0x8a, 0x94, 0x73, 0x24, 0x9c, 0x89, 0xf9, 0xc3, 0x2f, 0x16, 0xa1, 0x7d, 0xdc, 0x7b,
0x70, 0x7a, 0x1c, 0x86, 0x63, 0x6f, 0x40, 0xb1, 0xe6, 0x3d, 0x80, 0x1a, 0x56, 0xf5, 0x25, 0xef,
0x89, 0xdd, 0xb2, 0xf6, 0x92, 0x1c, 0xc2, 0x22, 0x16, 0xf7, 0xa4, 0xec, 0x59, 0xb1, 0x5b, 0xda,
0x65, 0x8a, 0x4d, 0x64, 0xf9, 0x7f, 0xf3, 0x75, 0xb1, 0x5b, 0xd6, 0x6a, 0x92, 0x8f, 0xc1, 0x4c,
0xcb, 0xf2, 0x79, 0x6f, 0x8c, 0xdd, 0xb9, 0x4d, 0xa7, 0x90, 0x4f, 0xab, 0xa1, 0x79, 0x4f, 0x65,
0xdd, 0xb9, 0xdd, 0x19, 0x39, 0x82, 0x86, 0xae, 0x12, 0xcb, 0x5f, 0x01, 0xbb, 0x73, 0x1a, 0x42,
0x61, 0x1e, 0x59, 0x69, 0x97, 0x3d, 0x55, 0x76, 0x4b, 0xbb, 0x56, 0xf2, 0x01, 0xd4, 0x15, 0xec,
0x97, 0xbe, 0x04, 0x76, 0xcb, 0xdb, 0x3a, 0xa1, 0x64, 0xda, 0x6b, 0xcc, 0x7b, 0x4e, 0xed, 0xce,
0x6d, 0xaf, 0xc9, 0x31, 0x40, 0xa6, 0xba, 0x9e, 0xfb, 0x4e, 0xda, 0x9d, 0xdf, 0x36, 0x93, 0xfb,
0xd0, 0x4c, 0x9f, 0x42, 0xca, 0x5f, 0x3e, 0xbb, 0xf3, 0x3a, 0xd9, 0xde, 0xd7, 0xff, 0xf3, 0xf7,
0x2d, 0xe3, 0xd7, 0xd7, 0x5b, 0xc6, 0x6f, 0xaf, 0xb7, 0x8c, 0xaf, 0xae, 0xb7, 0x8c, 0x3f, 0x5c,
0x6f, 0x19, 0x7f, 0xbb, 0xde, 0x32, 0x7e, 0xf7, 0x8f, 0x2d, 0xa3, 0x5f, 0x47, 0xf7, 0x7f, 0xff,
0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x29, 0x33, 0xce, 0x77, 0xac, 0x17, 0x00, 0x00,
0xe5, 0xc6, 0xd6, 0x31, 0x98, 0xc9, 0xba, 0xe4, 0x6b, 0x60, 0x4e, 0xe8, 0x4c, 0x95, 0xb1, 0xb2,
0x00, 0x6a, 0x4e, 0xe8, 0x0c, 0x2b, 0x58, 0xb2, 0x09, 0x0d, 0x41, 0x1c, 0x52, 0xb9, 0x43, 0xd5,
0xae, 0x4f, 0xe8, 0xec, 0x87, 0x34, 0xb6, 0x76, 0xa1, 0x95, 0xdf, 0x44, 0xb3, 0x6a, 0x48, 0x91,
0xac, 0xc7, 0x43, 0x66, 0x3d, 0x86, 0x56, 0xbe, 0x52, 0x14, 0x89, 0x24, 0x0a, 0xa6, 0xbe, 0x8b,
0x8c, 0x8b, 0xb6, 0x1c, 0x88, 0x36, 0xf1, 0x32, 0x90, 0x57, 0x97, 0x2d, 0x0d, 0xcf, 0x03, 0xce,
0x32, 0xf5, 0xa5, 0xe4, 0xb1, 0xfe, 0x58, 0x83, 0xba, 0x2c, 0x5b, 0xc9, 0x5b, 0x99, 0x4e, 0x01,
0x31, 0xa9, 0xb7, 0x74, 0xfd, 0x62, 0xbb, 0x81, 0xe9, 0xfb, 0xf4, 0x61, 0xda, 0x36, 0xa4, 0x89,
0xaa, 0x92, 0xab, 0xaa, 0x75, 0x8f, 0x52, 0x7d, 0xed, 0x1e, 0x65, 0x13, 0x1a, 0xfe, 0x74, 0xe2,
0xf0, 0x59, 0x8c, 0xb1, 0x56, 0xb5, 0xeb, 0xfe, 0x74, 0xf2, 0x64, 0x16, 0x0b, 0x9b, 0xf2, 0x80,
0xd3, 0x31, 0x92, 0x64, 0xb0, 0x35, 0x71, 0x42, 0x10, 0x8f, 0x60, 0x25, 0x83, 0x72, 0x9e, 0xab,
0x4a, 0xa8, 0x56, 0xf6, 0xc6, 0x4f, 0x1f, 0x2a, 0x75, 0x97, 0x12, 0xd4, 0x3b, 0x75, 0xc9, 0x4e,
0xbe, 0x24, 0x47, 0x70, 0x94, 0x19, 0x3a, 0x53, 0x75, 0x0b, 0x68, 0x14, 0x07, 0x10, 0xee, 0x26,
0x59, 0x64, 0xba, 0x6e, 0x8a, 0x09, 0x24, 0xbe, 0x0d, 0xed, 0x14, 0x5f, 0x24, 0x8b, 0x29, 0x57,
0x49, 0xa7, 0x91, 0xf1, 0x3d, 0x58, 0xf7, 0xd9, 0x8c, 0x3b, 0x45, 0x6e, 0x40, 0x6e, 0x22, 0x68,
0xe7, 0x79, 0x89, 0x6f, 0x43, 0x2b, 0x0d, 0x48, 0xe4, 0x5d, 0x92, 0x8d, 0x51, 0x32, 0x8b, 0x6c,
0x77, 0xa0, 0x99, 0xa0, 0xfb, 0x32, 0x32, 0x34, 0xa8, 0x04, 0xf5, 0xa4, 0x5e, 0x88, 0x58, 0x3c,
0x1d, 0x73, 0xb5, 0xc8, 0x0a, 0xf2, 0x60, 0xbd, 0x60, 0xcb, 0x79, 0xe4, 0xfd, 0x26, 0xac, 0x24,
0x71, 0x80, 0x7c, 0x2d, 0xe4, 0x5b, 0xd6, 0x93, 0xc8, 0xb4, 0x0b, 0xab, 0x61, 0x14, 0x84, 0x41,
0xcc, 0x22, 0x87, 0xba, 0x6e, 0xc4, 0xe2, 0xb8, 0xd3, 0x96, 0xeb, 0xe9, 0xf9, 0x63, 0x39, 0x6d,
0xfd, 0x1c, 0x1a, 0xca, 0xfa, 0xa5, 0xed, 0xd3, 0xf7, 0x61, 0x39, 0xa4, 0x91, 0x38, 0x53, 0xb6,
0x89, 0xd2, 0x45, 0xec, 0x19, 0x8d, 0x44, 0xd7, 0x9c, 0xeb, 0xa5, 0x96, 0x90, 0x5f, 0x4e, 0x59,
0xf7, 0x60, 0x25, 0xc7, 0x23, 0xc2, 0x00, 0x9d, 0x42, 0x87, 0x01, 0x0e, 0x92, 0x9d, 0x2b, 0xe9,
0xce, 0xd6, 0x7d, 0x30, 0x13, 0x43, 0x8b, 0x5a, 0x4b, 0xeb, 0x61, 0x28, 0xdb, 0xc9, 0x21, 0x02,
0x74, 0xf0, 0x39, 0x8b, 0x54, 0x7d, 0x25, 0x07, 0xd6, 0x53, 0x68, 0x17, 0xf2, 0x29, 0xd9, 0x83,
0x46, 0x38, 0xed, 0x3b, 0xba, 0xaf, 0x4f, 0x3b, 0xc1, 0xb3, 0x69, 0xff, 0x13, 0x76, 0xa5, 0x3b,
0xc1, 0x10, 0x47, 0xe9, 0xb2, 0x95, 0xec, 0xb2, 0x63, 0x68, 0xea, 0xd0, 0x24, 0xdf, 0x05, 0x33,
0xf1, 0x91, 0x42, 0x02, 0x4b, 0xb6, 0x56, 0x8b, 0xa6, 0x8c, 0xe2, 0xaa, 0x63, 0x6f, 0xe8, 0x33,
0xd7, 0x49, 0xe3, 0x01, 0xf7, 0x68, 0xda, 0x6d, 0x49, 0xf8, 0x54, 0x3b, 0xbf, 0xf5, 0x1e, 0xd4,
0xe5, 0xd9, 0x84, 0x7d, 0xc4, 0xca, 0xba, 0xfc, 0x14, 0xff, 0xa5, 0x99, 0xf6, 0x4f, 0x06, 0x34,
0x75, 0x8a, 0x2a, 0x15, 0xca, 0x1d, 0xba, 0xf2, 0xaa, 0x87, 0x9e, 0xd7, 0x9b, 0xeb, 0x2c, 0x52,
0x7b, 0xed, 0x2c, 0xb2, 0x07, 0x44, 0x26, 0x8b, 0xcb, 0x80, 0x7b, 0xfe, 0xd0, 0x91, 0xb6, 0x96,
0x59, 0x63, 0x15, 0x29, 0xe7, 0x48, 0x38, 0x13, 0xf3, 0x87, 0x5f, 0x2c, 0x42, 0xfb, 0xb8, 0xf7,
0xe0, 0xf4, 0x38, 0x0c, 0xc7, 0xde, 0x80, 0x62, 0xcd, 0x7b, 0x00, 0x35, 0xac, 0xea, 0x4b, 0xde,
0x13, 0xbb, 0x65, 0xed, 0x25, 0x39, 0x84, 0x45, 0x2c, 0xee, 0x49, 0xd9, 0xb3, 0x62, 0xb7, 0xb4,
0xcb, 0x14, 0x9b, 0xc8, 0xf2, 0xff, 0xe6, 0xeb, 0x62, 0xb7, 0xac, 0xd5, 0x24, 0x1f, 0x83, 0x99,
0x96, 0xe5, 0xf3, 0xde, 0x18, 0xbb, 0x73, 0x9b, 0x4e, 0x21, 0x9f, 0x56, 0x43, 0xf3, 0x9e, 0xca,
0xba, 0x73, 0xbb, 0x33, 0x72, 0x04, 0x0d, 0x5d, 0x25, 0x96, 0xbf, 0x02, 0x76, 0xe7, 0x34, 0x84,
0xc2, 0x3c, 0xb2, 0xd2, 0x2e, 0x7b, 0xaa, 0xec, 0x96, 0x76, 0xad, 0xe4, 0x03, 0xa8, 0x2b, 0xd8,
0x2f, 0x7d, 0x09, 0xec, 0x96, 0xb7, 0x75, 0x42, 0xc9, 0xb4, 0xd7, 0x98, 0xf7, 0x9c, 0xda, 0x9d,
0xdb, 0x5e, 0x93, 0x63, 0x80, 0x4c, 0x75, 0x3d, 0xf7, 0x9d, 0xb4, 0x3b, 0xbf, 0x6d, 0x26, 0xf7,
0xa1, 0x99, 0x3e, 0x85, 0x94, 0xbf, 0x7c, 0x76, 0xe7, 0x75, 0xb2, 0xbd, 0xaf, 0xff, 0xe7, 0xef,
0x5b, 0xc6, 0xaf, 0xaf, 0xb7, 0x8c, 0xdf, 0x5e, 0x6f, 0x19, 0x5f, 0x5d, 0x6f, 0x19, 0x7f, 0xb8,
0xde, 0x32, 0xfe, 0x76, 0xbd, 0x65, 0xfc, 0xee, 0x1f, 0x5b, 0x46, 0xbf, 0x8e, 0xee, 0xff, 0xfe,
0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xef, 0x95, 0x6c, 0x08, 0xac, 0x17, 0x00, 0x00,
}

View File

@ -206,7 +206,7 @@ message ConsensusParams {
// BlockSize contains limits on the block size.
message BlockSize {
// Note: must be greater than 0
int32 max_bytes = 1;
int64 max_bytes = 1;
// Note: must be greater or equal to -1
int64 max_gas = 2;
}

View File

@ -148,7 +148,7 @@ func TestMempoolRmBadTx(t *testing.T) {
// check for the tx
for {
txs := cs.mempool.ReapMaxBytesMaxGas(len(txBytes), -1)
txs := cs.mempool.ReapMaxBytesMaxGas(int64(len(txBytes)), -1)
if len(txs) == 0 {
emptyMempoolCh <- struct{}{}
return

View File

@ -196,7 +196,7 @@ func newMockEvidencePool(val []byte) *mockEvidencePool {
}
// NOTE: maxBytes is ignored
func (m *mockEvidencePool) PendingEvidence(maxBytes int) []types.Evidence {
func (m *mockEvidencePool) PendingEvidence(maxBytes int64) []types.Evidence {
if m.height > 0 {
return m.ev
}

View File

@ -953,15 +953,17 @@ func (cs *ConsensusState) createProposalBlock() (block *types.Block, blockParts
// bound evidence to 1/10th of the block
evidence := cs.evpool.PendingEvidence(types.MaxEvidenceBytesPerBlock(maxBytes))
// Mempool validated transactions
txs := cs.mempool.ReapMaxBytesMaxGas(types.MaxDataBytes(maxBytes, cs.state.Validators.Size(), len(evidence)), maxGas)
txs := cs.mempool.ReapMaxBytesMaxGas(types.MaxDataBytes(
maxBytes,
cs.state.Validators.Size(),
len(evidence),
), maxGas)
proposerAddr := cs.privValidator.GetAddress()
block, parts := cs.state.MakeBlock(cs.Height, txs, commit, evidence, proposerAddr)
return block, parts
}
// Enter: `timeoutPropose` after entering Propose.
// Enter: proposal block and POL is ready.
// Enter: any +2/3 prevotes for future round.

View File

@ -21,6 +21,7 @@ please submit them to our [bug bounty](https://tendermint.com/security)!
### Consensus Protocol
- [Consensus Algorithm](/docs/spec/consensus/consensus.md)
- [Creating a proposal](/docs/spec/consensus/creating-proposal.md)
- [Time](/docs/spec/consensus/bft-time.md)
- [Light-Client](/docs/spec/consensus/light-client.md)

View File

@ -0,0 +1,42 @@
# Creating a proposal
A block consists of a header, transactions, votes (the commit),
and a list of evidence of malfeasance (ie. signing conflicting votes).
We include no more than 1/10th of the maximum block size
(`ConsensusParams.BlockSize.MaxBytes`) of evidence with each block.
## Reaping transactions from the mempool
When we reap transactions from the mempool, we calculate maximum data
size by subtracting maximum header size (`MaxHeaderBytes`), the maximum
amino overhead for a block (`MaxAminoOverheadForBlock`), the size of
the last commit (if present) and evidence (if present). While reaping
we account for amino overhead for each transaction.
```go
func MaxDataBytes(maxBytes int64, valsCount, evidenceCount int) int64 {
return maxBytes -
MaxAminoOverheadForBlock -
MaxHeaderBytes -
int64(valsCount)*MaxVoteBytes -
int64(evidenceCount)*MaxEvidenceBytes
}
```
## Validating transactions in the mempool
Before we accept a transaction in the mempool, we check if it's size is no more
than {MaxDataSize}. {MaxDataSize} is calculated using the same formula as
above, except because the evidence size is unknown at the moment, we subtract
maximum evidence size (1/10th of the maximum block size).
```go
func MaxDataBytesUnknownEvidence(maxBytes int64, valsCount int) int64 {
return maxBytes -
MaxAminoOverheadForBlock -
MaxHeaderBytes -
int64(valsCount)*MaxVoteBytes -
MaxEvidenceBytesPerBlock(maxBytes)
}
```

View File

@ -59,7 +59,7 @@ func (evpool *EvidencePool) PriorityEvidence() []types.Evidence {
// PendingEvidence returns uncommitted evidence up to maxBytes.
// If maxBytes is -1, all evidence is returned.
func (evpool *EvidencePool) PendingEvidence(maxBytes int) []types.Evidence {
func (evpool *EvidencePool) PendingEvidence(maxBytes int64) []types.Evidence {
return evpool.evidenceStore.PendingEvidence(maxBytes)
}

View File

@ -88,23 +88,23 @@ func (store *EvidenceStore) PriorityEvidence() (evidence []types.Evidence) {
// PendingEvidence returns known uncommitted evidence up to maxBytes.
// If maxBytes is -1, all evidence is returned.
func (store *EvidenceStore) PendingEvidence(maxBytes int) (evidence []types.Evidence) {
func (store *EvidenceStore) PendingEvidence(maxBytes int64) (evidence []types.Evidence) {
return store.listEvidence(baseKeyPending, maxBytes)
}
// listEvidence lists the evidence for the given prefix key up to maxBytes.
// It is wrapped by PriorityEvidence and PendingEvidence for convenience.
// If maxBytes is -1, there's no cap on the size of returned evidence.
func (store *EvidenceStore) listEvidence(prefixKey string, maxBytes int) (evidence []types.Evidence) {
var bytes int
func (store *EvidenceStore) listEvidence(prefixKey string, maxBytes int64) (evidence []types.Evidence) {
var bytes int64
iter := dbm.IteratePrefix(store.db, []byte(prefixKey))
for ; iter.Valid(); iter.Next() {
val := iter.Value()
if maxBytes > 0 && bytes+len(val) > maxBytes {
if maxBytes > 0 && bytes+int64(len(val)) > maxBytes {
return evidence
}
bytes += len(val)
bytes += int64(len(val))
var ei EvidenceInfo
err := cdc.UnmarshalBinaryBare(val, &ei)

View File

@ -4,7 +4,6 @@ import (
"bytes"
"container/list"
"crypto/sha256"
"encoding/binary"
"fmt"
"sync"
"sync/atomic"
@ -12,13 +11,13 @@ import (
"github.com/pkg/errors"
amino "github.com/tendermint/go-amino"
abci "github.com/tendermint/tendermint/abci/types"
cfg "github.com/tendermint/tendermint/config"
auto "github.com/tendermint/tendermint/libs/autofile"
"github.com/tendermint/tendermint/libs/clist"
cmn "github.com/tendermint/tendermint/libs/common"
"github.com/tendermint/tendermint/libs/log"
cfg "github.com/tendermint/tendermint/config"
"github.com/tendermint/tendermint/proxy"
"github.com/tendermint/tendermint/types"
)
@ -385,9 +384,7 @@ func (mem *Mempool) notifyTxsAvailable() {
// with the condition that the total gasWanted must be less than maxGas.
// If both maxes are negative, there is no cap on the size of all returned
// transactions (~ all available transactions).
func (mem *Mempool) ReapMaxBytesMaxGas(maxBytes int, maxGas int64) types.Txs {
var buf [binary.MaxVarintLen64]byte
func (mem *Mempool) ReapMaxBytesMaxGas(maxBytes, maxGas int64) types.Txs {
mem.proxyMtx.Lock()
defer mem.proxyMtx.Unlock()
@ -396,7 +393,7 @@ func (mem *Mempool) ReapMaxBytesMaxGas(maxBytes int, maxGas int64) types.Txs {
time.Sleep(time.Millisecond * 10)
}
var totalBytes int
var totalBytes int64
var totalGas int64
// TODO: we will get a performance boost if we have a good estimate of avg
// size per tx, and set the initial capacity based off of that.
@ -405,12 +402,11 @@ func (mem *Mempool) ReapMaxBytesMaxGas(maxBytes int, maxGas int64) types.Txs {
for e := mem.txs.Front(); e != nil; e = e.Next() {
memTx := e.Value.(*mempoolTx)
// Check total size requirement
// amino.UvarintSize is not used here because it won't be possible to reuse buf
aminoOverhead := binary.PutUvarint(buf[:], uint64(len(memTx.tx)))
if maxBytes > -1 && totalBytes+len(memTx.tx)+aminoOverhead > maxBytes {
aminoOverhead := int64(amino.UvarintSize(uint64(len(memTx.tx))))
if maxBytes > -1 && totalBytes+int64(len(memTx.tx))+aminoOverhead > maxBytes {
return txs
}
totalBytes += len(memTx.tx) + aminoOverhead
totalBytes += int64(len(memTx.tx)) + aminoOverhead
// Check total gas requirement
if maxGas > -1 && totalGas+memTx.gasWanted > maxGas {
return txs

View File

@ -90,7 +90,7 @@ func TestReapMaxBytesMaxGas(t *testing.T) {
// each tx has 20 bytes + amino overhead = 21 bytes, 1 gas
tests := []struct {
numTxsToCreate int
maxBytes int
maxBytes int64
maxGas int64
expectedNumTxs int
}{

View File

@ -250,16 +250,12 @@ func NewNode(config *cfg.Config,
csMetrics, p2pMetrics, memplMetrics := metricsProvider()
// Make MempoolReactor
maxDataBytes := types.MaxDataBytesUnknownEvidence(
state.ConsensusParams.BlockSize.MaxBytes,
state.Validators.Size(),
)
mempool := mempl.NewMempool(
config.Mempool,
proxyApp.Mempool(),
state.LastBlockHeight,
mempl.WithMetrics(memplMetrics),
mempl.WithFilter(func(tx types.Tx) bool { return len(tx) <= maxDataBytes }),
mempl.WithFilter(sm.TxFilter(state)),
)
mempoolLogger := logger.With("module", "mempool")
mempool.SetLogger(mempoolLogger)

View File

@ -145,12 +145,7 @@ func (blockExec *BlockExecutor) Commit(state State, block *types.Block) ([]byte,
"appHash", fmt.Sprintf("%X", res.Data))
// Update mempool.
maxDataBytes := types.MaxDataBytesUnknownEvidence(
state.ConsensusParams.BlockSize.MaxBytes,
state.Validators.Size(),
)
filter := func(tx types.Tx) bool { return len(tx) <= maxDataBytes }
if err := blockExec.mempool.Update(block.Height, block.Txs, filter); err != nil {
if err := blockExec.mempool.Update(block.Height, block.Txs, TxFilter(state)); err != nil {
return nil, err
}

View File

@ -22,7 +22,7 @@ type Mempool interface {
Size() int
CheckTx(types.Tx, func(*abci.Response)) error
ReapMaxBytesMaxGas(maxBytes int, maxGas int64) types.Txs
ReapMaxBytesMaxGas(maxBytes, maxGas int64) types.Txs
Update(height int64, txs types.Txs, filter func(types.Tx) bool) error
Flush()
FlushAppConn() error
@ -40,7 +40,7 @@ func (MockMempool) Lock()
func (MockMempool) Unlock() {}
func (MockMempool) Size() int { return 0 }
func (MockMempool) CheckTx(tx types.Tx, cb func(*abci.Response)) error { return nil }
func (MockMempool) ReapMaxBytesMaxGas(maxBytes int, maxGas int64) types.Txs { return types.Txs{} }
func (MockMempool) ReapMaxBytesMaxGas(maxBytes, maxGas int64) types.Txs { return types.Txs{} }
func (MockMempool) Update(height int64, txs types.Txs, filter func(types.Tx) bool) error { return nil }
func (MockMempool) Flush() {}
func (MockMempool) FlushAppConn() error { return nil }
@ -73,7 +73,7 @@ type BlockStore interface {
// EvidencePool defines the EvidencePool interface used by the ConsensusState.
type EvidencePool interface {
PendingEvidence(int) []types.Evidence
PendingEvidence(int64) []types.Evidence
AddEvidence(types.Evidence) error
Update(*types.Block, State)
}
@ -81,6 +81,6 @@ type EvidencePool interface {
// MockMempool is an empty implementation of a Mempool, useful for testing.
type MockEvidencePool struct{}
func (m MockEvidencePool) PendingEvidence(int) []types.Evidence { return nil }
func (m MockEvidencePool) PendingEvidence(int64) []types.Evidence { return nil }
func (m MockEvidencePool) AddEvidence(types.Evidence) error { return nil }
func (m MockEvidencePool) Update(*types.Block, State) {}

View File

@ -328,7 +328,7 @@ func TestConsensusParamsChangesSaveLoad(t *testing.T) {
params[0] = state.ConsensusParams
for i := 1; i < N+1; i++ {
params[i] = *types.DefaultConsensusParams()
params[i].BlockSize.MaxBytes += i
params[i].BlockSize.MaxBytes += int64(i)
}
// Build the params history by running updateState
@ -373,14 +373,14 @@ func TestConsensusParamsChangesSaveLoad(t *testing.T) {
}
}
func makeParams(txsBytes, blockGas, evidenceAge int) types.ConsensusParams {
func makeParams(blockBytes, blockGas, evidenceAge int64) types.ConsensusParams {
return types.ConsensusParams{
BlockSize: types.BlockSize{
MaxBytes: txsBytes,
MaxGas: int64(blockGas),
MaxBytes: blockBytes,
MaxGas: blockGas,
},
EvidenceParams: types.EvidenceParams{
MaxAge: int64(evidenceAge),
MaxAge: evidenceAge,
},
}
}

15
state/tx_filter.go Normal file
View File

@ -0,0 +1,15 @@
package state
import (
"github.com/tendermint/tendermint/types"
)
// TxFilter returns a function to filter transactions. The function limits the
// size of a transaction to the maximum block's data size.
func TxFilter(state State) func(tx types.Tx) bool {
maxDataBytes := types.MaxDataBytesUnknownEvidence(
state.ConsensusParams.BlockSize.MaxBytes,
state.Validators.Size(),
)
return func(tx types.Tx) bool { return int64(len(tx)) <= maxDataBytes }
}

47
state/tx_filter_test.go Normal file
View File

@ -0,0 +1,47 @@
package state
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto/ed25519"
cmn "github.com/tendermint/tendermint/libs/common"
dbm "github.com/tendermint/tendermint/libs/db"
"github.com/tendermint/tendermint/types"
tmtime "github.com/tendermint/tendermint/types/time"
)
func TestTxFilter(t *testing.T) {
genDoc := randomGenesisDoc()
genDoc.ConsensusParams.BlockSize.MaxBytes = 3000
testCases := []struct {
tx types.Tx
isTxValid bool
}{
{types.Tx(cmn.RandBytes(250)), true},
{types.Tx(cmn.RandBytes(3001)), false},
}
for i, tc := range testCases {
stateDB := dbm.NewDB("state", "memdb", os.TempDir())
state, err := LoadStateFromDBOrGenesisDoc(stateDB, genDoc)
require.NoError(t, err)
f := TxFilter(state)
assert.Equal(t, tc.isTxValid, f(tc.tx), "#%v", i)
}
}
func randomGenesisDoc() *types.GenesisDoc {
pubkey := ed25519.GenPrivKey().PubKey()
return &types.GenesisDoc{
GenesisTime: tmtime.Now(),
ChainID: "abc",
Validators: []types.GenesisValidator{{pubkey.Address(), pubkey, 10, "myval"}},
ConsensusParams: types.DefaultConsensusParams(),
}
}

View File

@ -15,7 +15,7 @@ import (
const (
// MaxHeaderBytes is a maximum header size (including amino overhead).
MaxHeaderBytes = 511
MaxHeaderBytes int64 = 511
// MaxAminoOverheadForBlock - maximum amino overhead to encode a block (up to
// MaxBlockSizeBytes in size) not including it's parts except Data.
@ -24,7 +24,7 @@ const (
// 2 fields (2 embedded): 2 bytes
// Uvarint length of Data.Txs: 4 bytes
// Data.Txs field: 1 byte
MaxAminoOverheadForBlock = 11
MaxAminoOverheadForBlock int64 = 11
)
// Block defines the atomic unit of a Tendermint blockchain.
@ -205,23 +205,48 @@ func (b *Block) StringShort() string {
//-----------------------------------------------------------------------------
// MaxDataBytes returns the maximum size of block's data.
func MaxDataBytes(maxBytes, valsCount, evidenceCount int) int {
return maxBytes -
//
// XXX: Panics on negative result.
func MaxDataBytes(maxBytes int64, valsCount, evidenceCount int) int64 {
maxDataBytes := maxBytes -
MaxAminoOverheadForBlock -
MaxHeaderBytes -
(valsCount * MaxVoteBytes) -
(evidenceCount * MaxEvidenceBytes)
int64(valsCount)*MaxVoteBytes -
int64(evidenceCount)*MaxEvidenceBytes
if maxDataBytes < 0 {
panic(fmt.Sprintf(
"Negative MaxDataBytes. BlockSize.MaxBytes=%d is too small to accommodate header&lastCommit&evidence=%d",
maxBytes,
-(maxDataBytes - maxBytes),
))
}
return maxDataBytes
}
// MaxDataBytesUnknownEvidence returns the maximum size of block's data when
// evidence count is unknown. MaxEvidenceBytesPerBlock will be used as the size
// of evidence.
func MaxDataBytesUnknownEvidence(maxBytes, valsCount int) int {
return maxBytes -
//
// XXX: Panics on negative result.
func MaxDataBytesUnknownEvidence(maxBytes int64, valsCount int) int64 {
maxDataBytes := maxBytes -
MaxAminoOverheadForBlock -
MaxHeaderBytes -
(valsCount * MaxVoteBytes) -
int64(valsCount)*MaxVoteBytes -
MaxEvidenceBytesPerBlock(maxBytes)
if maxDataBytes < 0 {
panic(fmt.Sprintf(
"Negative MaxDataBytesUnknownEvidence. BlockSize.MaxBytes=%d is too small to accommodate header&lastCommit&evidence=%d",
maxBytes,
-(maxDataBytes - maxBytes),
))
}
return maxDataBytes
}
//-----------------------------------------------------------------------------

View File

@ -266,7 +266,7 @@ func TestMaxHeaderBytes(t *testing.T) {
bz, err := cdc.MarshalBinary(h)
require.NoError(t, err)
assert.Equal(t, MaxHeaderBytes, len(bz))
assert.EqualValues(t, MaxHeaderBytes, len(bz))
}
func randCommit() *Commit {
@ -279,3 +279,60 @@ func randCommit() *Commit {
}
return commit
}
func TestBlockMaxDataBytes(t *testing.T) {
testCases := []struct {
maxBytes int64
valsCount int
evidenceCount int
panics bool
result int64
}{
0: {-10, 1, 0, true, 0},
1: {10, 1, 0, true, 0},
2: {721, 1, 0, true, 0},
3: {722, 1, 0, false, 0},
4: {723, 1, 0, false, 1},
}
for i, tc := range testCases {
if tc.panics {
assert.Panics(t, func() {
MaxDataBytes(tc.maxBytes, tc.valsCount, tc.evidenceCount)
}, "#%v", i)
} else {
assert.Equal(t,
tc.result,
MaxDataBytes(tc.maxBytes, tc.valsCount, tc.evidenceCount),
"#%v", i)
}
}
}
func TestBlockMaxDataBytesUnknownEvidence(t *testing.T) {
testCases := []struct {
maxBytes int64
valsCount int
panics bool
result int64
}{
0: {-10, 1, true, 0},
1: {10, 1, true, 0},
2: {801, 1, true, 0},
3: {802, 1, false, 0},
4: {803, 1, false, 1},
}
for i, tc := range testCases {
if tc.panics {
assert.Panics(t, func() {
MaxDataBytesUnknownEvidence(tc.maxBytes, tc.valsCount)
}, "#%v", i)
} else {
assert.Equal(t,
tc.result,
MaxDataBytesUnknownEvidence(tc.maxBytes, tc.valsCount),
"#%v", i)
}
}
}

View File

@ -12,7 +12,7 @@ import (
const (
// MaxEvidenceBytes is a maximum size of any evidence (including amino overhead).
MaxEvidenceBytes = 440
MaxEvidenceBytes int64 = 440
)
// ErrEvidenceInvalid wraps a piece of evidence and the error denoting how or why it is invalid.
@ -52,14 +52,16 @@ func RegisterEvidences(cdc *amino.Codec) {
cdc.RegisterConcrete(MockBadEvidence{}, "tendermint/MockBadEvidence", nil)
}
// MaxEvidenceBytesPerBlock returns the maximum evidence size per block.
func MaxEvidenceBytesPerBlock(blockMaxBytes int) int {
// MaxEvidenceBytesPerBlock returns the maximum evidence size per block -
// 1/10th of the maximum block size.
func MaxEvidenceBytesPerBlock(blockMaxBytes int64) int64 {
return blockMaxBytes / 10
}
//-------------------------------------------
// DuplicateVoteEvidence contains evidence a validator signed two conflicting votes.
// DuplicateVoteEvidence contains evidence a validator signed two conflicting
// votes.
type DuplicateVoteEvidence struct {
PubKey crypto.PubKey
VoteA *Vote

View File

@ -108,7 +108,7 @@ func TestMaxEvidenceBytes(t *testing.T) {
bz, err := cdc.MarshalBinary(ev)
require.NoError(t, err)
assert.Equal(t, MaxEvidenceBytes, len(bz))
assert.EqualValues(t, MaxEvidenceBytes, len(bz))
}
func randomDuplicatedVoteEvidence() *DuplicateVoteEvidence {

View File

@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/crypto/ed25519"
tmtime "github.com/tendermint/tendermint/types/time"
)

View File

@ -23,7 +23,7 @@ type ConsensusParams struct {
// BlockSize contain limits on the block size.
type BlockSize struct {
MaxBytes int `json:"max_txs_bytes"`
MaxBytes int64 `json:"max_bytes"`
MaxGas int64 `json:"max_gas"`
}
@ -102,7 +102,7 @@ func (params ConsensusParams) Update(params2 *abci.ConsensusParams) ConsensusPar
// XXX: it's cast city over here. It's ok because we only do int32->int
// but still, watch it champ.
if params2.BlockSize != nil {
res.BlockSize.MaxBytes = int(params2.BlockSize.MaxBytes)
res.BlockSize.MaxBytes = params2.BlockSize.MaxBytes
res.BlockSize.MaxGas = params2.BlockSize.MaxGas
}
if params2.EvidenceParams != nil {

View File

@ -9,30 +9,23 @@ import (
abci "github.com/tendermint/tendermint/abci/types"
)
func newConsensusParams(txsBytes, evidenceAge int) ConsensusParams {
return ConsensusParams{
BlockSize: BlockSize{MaxBytes: txsBytes},
EvidenceParams: EvidenceParams{MaxAge: int64(evidenceAge)},
}
}
func TestConsensusParamsValidation(t *testing.T) {
testCases := []struct {
params ConsensusParams
valid bool
}{
// test block size
0: {newConsensusParams(1, 1), true},
1: {newConsensusParams(0, 1), false},
2: {newConsensusParams(47*1024*1024, 1), true},
3: {newConsensusParams(10, 1), true},
4: {newConsensusParams(100*1024*1024, 1), true},
5: {newConsensusParams(101*1024*1024, 1), false},
6: {newConsensusParams(1024*1024*1024, 1), false},
7: {newConsensusParams(1024*1024*1024, -1), false},
0: {makeParams(1, 0, 1), true},
1: {makeParams(0, 0, 1), false},
2: {makeParams(47*1024*1024, 0, 1), true},
3: {makeParams(10, 0, 1), true},
4: {makeParams(100*1024*1024, 0, 1), true},
5: {makeParams(101*1024*1024, 0, 1), false},
6: {makeParams(1024*1024*1024, 0, 1), false},
7: {makeParams(1024*1024*1024, 0, -1), false},
// test evidence age
8: {newConsensusParams(1, 0), false},
9: {newConsensusParams(1, -1), false},
8: {makeParams(1, 0, 0), false},
9: {makeParams(1, 0, -1), false},
}
for i, tc := range testCases {
if tc.valid {
@ -43,14 +36,14 @@ func TestConsensusParamsValidation(t *testing.T) {
}
}
func makeParams(txsBytes, blockGas, evidenceAge int) ConsensusParams {
func makeParams(blockBytes, blockGas, evidenceAge int64) ConsensusParams {
return ConsensusParams{
BlockSize: BlockSize{
MaxBytes: txsBytes,
MaxGas: int64(blockGas),
MaxBytes: blockBytes,
MaxGas: blockGas,
},
EvidenceParams: EvidenceParams{
MaxAge: int64(evidenceAge),
MaxAge: evidenceAge,
},
}
}

View File

@ -115,7 +115,7 @@ func (tm2pb) ValidatorUpdates(vals *ValidatorSet) []abci.ValidatorUpdate {
func (tm2pb) ConsensusParams(params *ConsensusParams) *abci.ConsensusParams {
return &abci.ConsensusParams{
BlockSize: &abci.BlockSize{
MaxBytes: int32(params.BlockSize.MaxBytes),
MaxBytes: params.BlockSize.MaxBytes,
MaxGas: params.BlockSize.MaxGas,
},
EvidenceParams: &abci.EvidenceParams{
@ -211,11 +211,11 @@ func (pb2tm) ValidatorUpdates(vals []abci.ValidatorUpdate) ([]*Validator, error)
func (pb2tm) ConsensusParams(csp *abci.ConsensusParams) ConsensusParams {
return ConsensusParams{
BlockSize: BlockSize{
MaxBytes: int(csp.BlockSize.MaxBytes), // XXX
MaxBytes: csp.BlockSize.MaxBytes,
MaxGas: csp.BlockSize.MaxGas,
},
EvidenceParams: EvidenceParams{
MaxAge: csp.EvidenceParams.MaxAge, // XXX
MaxAge: csp.EvidenceParams.MaxAge,
},
}
}

View File

@ -12,7 +12,7 @@ import (
const (
// MaxVoteBytes is a maximum vote size (including amino overhead).
MaxVoteBytes = 200
MaxVoteBytes int64 = 200
)
var (

View File

@ -147,5 +147,5 @@ func TestMaxVoteBytes(t *testing.T) {
bz, err := cdc.MarshalBinary(vote)
require.NoError(t, err)
assert.Equal(t, MaxVoteBytes, len(bz))
assert.EqualValues(t, MaxVoteBytes, len(bz))
}