Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat!: restrict MaxBytes to values at or below that of MaxSquareSize #1743

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion app/app.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ import (
dbm "github.com/tendermint/tm-db"

"github.com/celestiaorg/celestia-app/app/encoding"
"github.com/celestiaorg/celestia-app/pkg/appconsts"
"github.com/celestiaorg/celestia-app/pkg/proof"
blobmodule "github.com/celestiaorg/celestia-app/x/blob"
blobmodulekeeper "github.com/celestiaorg/celestia-app/x/blob/keeper"
Expand Down Expand Up @@ -285,7 +286,7 @@ func New(
app.ParamsKeeper = initParamsKeeper(appCodec, cdc, keys[paramstypes.StoreKey], tkeys[paramstypes.TStoreKey])

// set the BaseApp's parameter store
bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramstypes.ConsensusParamsKeyTable()))
bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramfilter.ConsensusParamsKeyTable(appconsts.MaxSquareSize)))

// add capability keeper and ScopeToModule for ibc module
app.CapabilityKeeper = capabilitykeeper.NewKeeper(appCodec, keys[capabilitytypes.StoreKey], memKeys[capabilitytypes.MemStoreKey])
Expand Down Expand Up @@ -713,6 +714,8 @@ func (*App) BlockedParams() [][2]string {
{stakingtypes.ModuleName, string(stakingtypes.KeyBondDenom)},
// consensus.validator.PubKeyTypes
{baseapp.Paramspace, string(baseapp.ParamStoreKeyValidatorParams)},
// consensus.Evidence
{baseapp.Paramspace, string(baseapp.ParamStoreKeyEvidenceParams)},
}
}

Expand Down
2 changes: 1 addition & 1 deletion app/prepare_proposal.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func (app *App) PrepareProposal(req abci.RequestPrepareProposal) abci.ResponsePr

// build the square from the set of valid and prioritised transactions.
// The txs returned are the ones used in the square and block
dataSquare, txs, err := square.Build(txs, appconsts.DefaultMaxSquareSize)
dataSquare, txs, err := square.Build(txs, appconsts.MaxSquareSize)
if err != nil {
panic(err)
}
Expand Down
2 changes: 1 addition & 1 deletion app/process_proposal.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ func (app *App) ProcessProposal(req abci.RequestProcessProposal) abci.ResponsePr
}

// Construct the data square from the block's transactions
dataSquare, err := square.Construct(req.BlockData.Txs, appconsts.DefaultMaxSquareSize)
dataSquare, err := square.Construct(req.BlockData.Txs, appconsts.MaxSquareSize)
if err != nil {
logInvalidPropBlockError(app.Logger(), req.Header, "failure to compute data square from transactions:", err)
return reject()
Expand Down
6 changes: 3 additions & 3 deletions app/test/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -208,12 +208,12 @@ func (s *IntegrationTestSuite) TestMaxBlockSize() {
size := blockRes.Block.Data.SquareSize

// perform basic checks on the size of the square
require.LessOrEqual(size, uint64(appconsts.DefaultMaxSquareSize))
require.GreaterOrEqual(size, uint64(appconsts.DefaultMinSquareSize))
require.LessOrEqual(size, uint64(appconsts.MaxSquareSize))
require.GreaterOrEqual(size, uint64(appconsts.MinSquareSize))
sizes = append(sizes, size)
}
// ensure that at least one of the blocks used the max square size
assert.Contains(sizes, uint64(appconsts.DefaultMaxSquareSize))
assert.Contains(sizes, uint64(appconsts.MaxSquareSize))
})
require.NoError(s.network.WaitForNextBlock())
}
Expand Down
59 changes: 59 additions & 0 deletions app/test/limited_block_size_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package app_test

import (
"context"
"fmt"
"math/rand"
"testing"
"time"

"github.com/celestiaorg/celestia-app/pkg/square"
"github.com/celestiaorg/celestia-app/test/txsim"
"github.com/celestiaorg/celestia-app/test/util/testnode"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func TestLimitedBlockSize(t *testing.T) {
if testing.Short() {
t.Skip("skipping Limited Block Size integration test in short mode.")
}

desiredSquareSize := uint64(64)

cparams := testnode.DefaultParams()
// limit the max block size to 64 x 64 via the consensus parameter
cparams.Block.MaxBytes = square.EstimateMaxBlockBytes(desiredSquareSize)

kr, rpcAddr, grpcAddr := txsim.Setup(t, cparams)

// using lots of individual small blobs will result in a large amount of
// overhead added to the square
seqs := txsim.NewBlobSequence(
txsim.NewRange(1, 10000),
txsim.NewRange(1, 50),
).Clone(25)

ctx, cancel := context.WithTimeout(context.Background(), time.Second*30)
defer cancel()

_ = txsim.Run(
ctx,
[]string{rpcAddr},
[]string{grpcAddr},
kr,
rand.Int63(),
time.Second,
seqs...,
)

// check the block sizes
blocks, err := testnode.ReadBlockchain(context.Background(), rpcAddr)
require.NoError(t, err)
Comment on lines +30 to +52
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

txsim is cool tho


for _, block := range blocks {
fmt.Println(block.Data.SquareSize)
assert.LessOrEqual(t, block.Data.SquareSize, desiredSquareSize)
}

}
4 changes: 2 additions & 2 deletions app/test/process_proposal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ func TestProcessProposal(t *testing.T) {
d.Txs = [][]byte{blobTx}

// Erasure code the data to update the data root so this doesn't doesn't fail on an incorrect data root.
dataSquare, err := square.Construct(d.Txs, appconsts.DefaultMaxSquareSize)
dataSquare, err := square.Construct(d.Txs, appconsts.MaxSquareSize)
require.NoError(t, err)
eds, err := da.ExtendShares(shares.ToBytes(dataSquare))
require.NoError(t, err)
Expand Down Expand Up @@ -280,7 +280,7 @@ func TestProcessProposal(t *testing.T) {
Txs: coretypes.Txs(sendTxs).ToSliceOfBytes(),
},
mutator: func(d *core.Data) {
dataSquare, err := square.Construct(d.Txs, appconsts.DefaultMaxSquareSize)
dataSquare, err := square.Construct(d.Txs, appconsts.MaxSquareSize)
require.NoError(t, err)

b := shares.NewEmptyBuilder().ImportRawShare(dataSquare[1].ToBytes())
Expand Down
2 changes: 1 addition & 1 deletion app/test/qgb_rpc_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ func TestQGBRPCQueries(t *testing.T) {
if testing.Short() {
t.Skip("skipping QGB integration test in short mode.")
}
_, cctx := testnode.DefaultNetwork(t, time.Millisecond)
_, cctx := testnode.DefaultNetwork(t, time.Millisecond, nil)
h, err := cctx.WaitForHeightWithTimeout(405, time.Minute)
require.NoError(t, err, h)
require.Greater(t, h, int64(401))
Expand Down
32 changes: 31 additions & 1 deletion app/test/std_sdk_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@ import (

"github.com/celestiaorg/celestia-app/app"
"github.com/celestiaorg/celestia-app/app/encoding"
"github.com/celestiaorg/celestia-app/pkg/square"
"github.com/celestiaorg/celestia-app/test/util/testnode"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/crypto/hd"
"github.com/cosmos/cosmos-sdk/crypto/keyring"
"github.com/cosmos/cosmos-sdk/testutil/mock"
Expand All @@ -25,6 +27,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
abci "github.com/tendermint/tendermint/abci/types"
tmproto "github.com/tendermint/tendermint/proto/tendermint/types"
)

func TestStandardSDKIntegrationTestSuite(t *testing.T) {
Expand All @@ -48,7 +51,7 @@ type StandardSDKIntegrationTestSuite struct {
func (s *StandardSDKIntegrationTestSuite) SetupSuite() {
t := s.T()
t.Log("setting up integration test suite")
accounts, cctx := testnode.DefaultNetwork(t, time.Millisecond*400)
accounts, cctx := testnode.DefaultNetwork(t, time.Millisecond*400, nil)
s.accounts = accounts
s.ecfg = encoding.MakeConfig(app.ModuleEncodingRegisters...)
s.cctx = cctx
Expand Down Expand Up @@ -261,6 +264,33 @@ func (s *StandardSDKIntegrationTestSuite) TestStandardSDK() {
},
expectedCode: abci.CodeTypeOK,
},
{
name: "create param proposal change for a consensus parameter",
msgFunc: func() (msgs []sdk.Msg, signer string) {
account := s.unusedAccount()
newMaxBytes := &tmproto.BlockParams{
MaxBytes: square.EstimateMaxBlockBytes(256),
MaxGas: -1,
TimeIotaMs: 1000,
}

change := proposal.NewParamChange(baseapp.Paramspace, string(baseapp.ParamStoreKeyBlockParams), string(s.ecfg.Amino.MustMarshalJSON(newMaxBytes)))
content := proposal.NewParameterChangeProposal("title", "description", []proposal.ParamChange{change})
addr := getAddress(account, s.cctx.Keyring)
msg, err := oldgov.NewMsgSubmitProposal(
content,
sdk.NewCoins(
sdk.NewCoin(app.BondDenom, sdk.NewInt(1000000000))),
addr,
)
require.NoError(t, err)
return []sdk.Msg{msg}, account
},
// the error we're looking for is err: invalid parameter value:
// block maximum bytes must be less than or equal to XXXXXXX, but
// due to how the sdk bubbles up errors, we get this code instead
expectedCode: govtypes.ErrNoProposalHandlerExists.ABCICode(),
},
}

// sign and submit the transactions
Expand Down
9 changes: 9 additions & 0 deletions cmd/celestia-appd/cmd/overrides.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ package cmd

import (
"github.com/celestiaorg/celestia-app/pkg/appconsts"
"github.com/celestiaorg/celestia-app/pkg/square"
"github.com/celestiaorg/celestia-app/x/paramfilter"
"github.com/cosmos/cosmos-sdk/server"
"github.com/spf13/cobra"
)
Expand All @@ -15,3 +17,10 @@ func overrideServerConfig(command *cobra.Command) error {
ctx.Config.Consensus.SkipTimeoutCommit = false
return server.SetCmdServerContext(command, ctx)
}

func setDefaultConsensusParams(command *cobra.Command) error {
ctx := server.GetServerContextFromCmd(command)
ctx.DefaultConsensusParams = paramfilter.DefaultConsensusParams()
ctx.DefaultConsensusParams.Block.MaxBytes = square.EstimateMaxBlockBytes(64)
return server.SetCmdServerContext(command, ctx)
}
5 changes: 5 additions & 0 deletions cmd/celestia-appd/cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,11 @@ func NewRootCmd() *cobra.Command {
return err
}

err = setDefaultConsensusParams(cmd)
if err != nil {
return err
}

return overrideServerConfig(cmd)
},
SilenceUsage: true,
Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ require (
)

replace (
github.com/cosmos/cosmos-sdk => github.com/celestiaorg/cosmos-sdk v1.12.0-sdk-v0.46.11
github.com/cosmos/cosmos-sdk => github.com/celestiaorg/cosmos-sdk v1.12.0-sdk-v0.46.11.0.20230511190231-e7fc8b26c224 // v1.12.0-sdk-v0.46.11
github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1
github.com/tendermint/tendermint => github.com/celestiaorg/celestia-core v1.19.0-tm-v0.34.27
)
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,8 @@ github.com/c-bata/go-prompt v0.2.2/go.mod h1:VzqtzE2ksDBcdln8G7mk2RX9QyGjH+OVqOC
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
github.com/celestiaorg/celestia-core v1.19.0-tm-v0.34.27 h1:GLdDJRu1fRSMft4IqQz4/x/H1U3eN2TFlYbAycbSiN4=
github.com/celestiaorg/celestia-core v1.19.0-tm-v0.34.27/go.mod h1:8PbX2OIPehldawXWAzNWPxBPnfFtcYtjHecE45b2Beg=
github.com/celestiaorg/cosmos-sdk v1.12.0-sdk-v0.46.11 h1:Y+/dAyu7t0F8+EZz+jU3tyZqG10W8LTCQpnHe8Ejuec=
github.com/celestiaorg/cosmos-sdk v1.12.0-sdk-v0.46.11/go.mod h1:uKEyhG8H8hbjebOEEgtyqghJWpuMyF+u61MK5cis+pk=
github.com/celestiaorg/cosmos-sdk v1.12.0-sdk-v0.46.11.0.20230511190231-e7fc8b26c224 h1:0b7jjToGzeH1ZYIL9v/shigidfavO+UtBoAzc4reZB0=
github.com/celestiaorg/cosmos-sdk v1.12.0-sdk-v0.46.11.0.20230511190231-e7fc8b26c224/go.mod h1:xCG6OUkJy5KUMEg20Zk010lra9XjkmKS3+bk0wp7bd8=
github.com/celestiaorg/merkletree v0.0.0-20210714075610-a84dc3ddbbe4 h1:CJdIpo8n5MFP2MwK0gSRcOVlDlFdQJO1p+FqdxYzmvc=
github.com/celestiaorg/merkletree v0.0.0-20210714075610-a84dc3ddbbe4/go.mod h1:fzuHnhzj1pUygGz+1ZkB3uQbEUL4htqCGJ4Qs2LwMZA=
github.com/celestiaorg/nmt v0.15.0 h1:ID9QlMIeP6WK/iiGcfnYLu2qqVIq0UYe/dc3TVPt6EA=
Expand Down
14 changes: 7 additions & 7 deletions pkg/appconsts/appconsts.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,30 +55,30 @@ const (
// in a continuation sparse share of a sequence.
ContinuationSparseShareContentSize = ShareSize - NamespaceSize - ShareInfoBytes

// DefaultMaxSquareSize is the maximum original square width.
// MaxSquareSize is the maximum original square width.
//
// Note: 128 shares in a row * 128 shares in a column * 512 bytes in a share
// = 8 MiB
DefaultMaxSquareSize = 128
MaxSquareSize = 128

// MaxShareCount is the maximum number of shares allowed in the original
// data square.
MaxShareCount = DefaultMaxSquareSize * DefaultMaxSquareSize
MaxShareCount = MaxSquareSize * MaxSquareSize

// DefaultMinSquareSize is the smallest original square width.
DefaultMinSquareSize = 1
// MinSquareSize is the smallest original square width. This value should always be 1.
MinSquareSize = 1

// MinshareCount is the minimum number of shares allowed in the original
// data square.
MinShareCount = DefaultMinSquareSize * DefaultMinSquareSize
MinShareCount = MinSquareSize * MinSquareSize

// SubtreeRootThreshold works as a target value for the number of subtree roots in the
// share commitment. If a blob contains more shares than this number, than the height
// of the subtree roots will gradually increases to so that the amount remains within that limit.
// The rationale for this value is described in more detail in ADR013
// (./docs/architecture/adr-013).
// ADR013 https://github.com/celestiaorg/celestia-app/blob/e905143e8fe138ce6085ae9a5c1af950a2d87638/docs/architecture/adr-013-non-interactive-default-rules-for-zero-padding.md //nolint: lll
SubtreeRootThreshold = DefaultMaxSquareSize
SubtreeRootThreshold = MaxSquareSize

// MaxShareVersion is the maximum value a share version can be.
MaxShareVersion = 127
Expand Down
10 changes: 5 additions & 5 deletions pkg/da/data_availability_header.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ import (
)

const (
maxExtendedSquareWidth = appconsts.DefaultMaxSquareSize * 2
minExtendedSquareWidth = appconsts.DefaultMinSquareSize * 2
maxExtendedSquareWidth = appconsts.MaxSquareSize * 2
minExtendedSquareWidth = appconsts.MinSquareSize * 2
)

// DataAvailabilityHeader (DAHeader) contains the row and column roots of the
Expand Down Expand Up @@ -60,11 +60,11 @@ func ExtendShares(s [][]byte) (*rsmt2d.ExtendedDataSquare, error) {

squareSize := square.Size(len(s))

if squareSize < appconsts.DefaultMinSquareSize || squareSize > appconsts.DefaultMaxSquareSize {
if squareSize < appconsts.MinSquareSize || squareSize > appconsts.MaxSquareSize {
return nil, fmt.Errorf(
"invalid square size: min %d max %d provided %d",
appconsts.DefaultMinSquareSize,
appconsts.DefaultMaxSquareSize,
appconsts.MinSquareSize,
appconsts.MaxSquareSize,
squareSize,
)
}
Expand Down
14 changes: 7 additions & 7 deletions pkg/da/data_availability_header_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ func TestNewDataAvailabilityHeader(t *testing.T) {
{
name: "max square size",
expectedHash: []byte{0xce, 0x5c, 0xf3, 0xc9, 0x15, 0xeb, 0xbf, 0xb0, 0x67, 0xe1, 0xa5, 0x97, 0x35, 0xf3, 0x25, 0x7b, 0x1c, 0x47, 0x74, 0x1f, 0xec, 0x6a, 0x33, 0x19, 0x7f, 0x8f, 0xc2, 0x4a, 0xe, 0xe2, 0xbe, 0x73},
squareSize: appconsts.DefaultMaxSquareSize,
shares: generateShares(appconsts.DefaultMaxSquareSize * appconsts.DefaultMaxSquareSize),
squareSize: appconsts.MaxSquareSize,
shares: generateShares(appconsts.MaxSquareSize * appconsts.MaxSquareSize),
},
}

Expand All @@ -77,7 +77,7 @@ func TestExtendShares(t *testing.T) {
{
name: "too large square size",
expectedErr: true,
shares: generateShares((appconsts.DefaultMaxSquareSize + 1) * (appconsts.DefaultMaxSquareSize + 1)),
shares: generateShares((appconsts.MaxSquareSize + 1) * (appconsts.MaxSquareSize + 1)),
},
{
name: "invalid number of shares",
Expand All @@ -103,7 +103,7 @@ func TestDataAvailabilityHeaderProtoConversion(t *testing.T) {
dah DataAvailabilityHeader
}

shares := generateShares(appconsts.DefaultMaxSquareSize * appconsts.DefaultMaxSquareSize)
shares := generateShares(appconsts.MaxSquareSize * appconsts.MaxSquareSize)
eds, err := ExtendShares(shares)
require.NoError(t, err)
bigdah := NewDataAvailabilityHeader(eds)
Expand Down Expand Up @@ -138,15 +138,15 @@ func Test_DAHValidateBasic(t *testing.T) {
errStr string
}

shares := generateShares(appconsts.DefaultMaxSquareSize * appconsts.DefaultMaxSquareSize)
shares := generateShares(appconsts.MaxSquareSize * appconsts.MaxSquareSize)
eds, err := ExtendShares(shares)
require.NoError(t, err)
bigdah := NewDataAvailabilityHeader(eds)

// make a mutant dah that has too many roots
var tooBigDah DataAvailabilityHeader
tooBigDah.ColumnRoots = make([][]byte, appconsts.DefaultMaxSquareSize*appconsts.DefaultMaxSquareSize)
tooBigDah.RowRoots = make([][]byte, appconsts.DefaultMaxSquareSize*appconsts.DefaultMaxSquareSize)
tooBigDah.ColumnRoots = make([][]byte, appconsts.MaxSquareSize*appconsts.MaxSquareSize)
tooBigDah.RowRoots = make([][]byte, appconsts.MaxSquareSize*appconsts.MaxSquareSize)
copy(tooBigDah.ColumnRoots, bigdah.ColumnRoots)
copy(tooBigDah.RowRoots, bigdah.RowRoots)
tooBigDah.ColumnRoots = append(tooBigDah.ColumnRoots, bytes.Repeat([]byte{1}, 32))
Expand Down
2 changes: 1 addition & 1 deletion pkg/proof/proof.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func NewTxInclusionProof(txs [][]byte, txIndex uint64) (types.ShareProof, error)
return types.ShareProof{}, fmt.Errorf("txIndex %d out of bounds", txIndex)
}

builder, err := square.NewBuilder(appconsts.DefaultMaxSquareSize, txs...)
builder, err := square.NewBuilder(appconsts.MaxSquareSize, txs...)
if err != nil {
return types.ShareProof{}, err
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/proof/proof_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ func TestNewShareInclusionProof(t *testing.T) {

// not setting useShareIndexes because the transactions indexes do not refer
// to the messages because the square and transactions were created manually.
dataSquare, err := square.Construct(txs.ToSliceOfBytes(), appconsts.DefaultMaxSquareSize)
dataSquare, err := square.Construct(txs.ToSliceOfBytes(), appconsts.MaxSquareSize)
if err != nil {
panic(err)
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/proof/querier.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ func QueryShareInclusionProof(_ sdk.Context, path []string, req abci.RequestQuer
return nil, fmt.Errorf("error reading block: %w", err)
}

dataSquare, err := square.Construct(pbb.Data.Txs, appconsts.DefaultMaxSquareSize)
dataSquare, err := square.Construct(pbb.Data.Txs, appconsts.MaxSquareSize)
if err != nil {
return nil, err
}
Expand Down
Loading