const logModule = "leveldb"
var (
- BlockStoreKey = []byte("blockStore")
- BlockPrefix = []byte("B:")
- BlockHeaderPrefix = []byte("BH:")
- TxStatusPrefix = []byte("BTS:")
+ // CheckpointPrefix represent the namespace of checkpoints in db
+ CheckpointPrefix = []byte("CP:")
+ // BlockStoreKey block store key
+ BlockStoreKey = []byte("blockStore")
+ // BlockHeaderIndexPrefix block header index with height
+ BlockHeaderIndexPrefix = []byte("BH:")
)
func loadBlockStoreStateJSON(db dbm.DB) *protocol.BlockStoreState {
// methods for querying current data.
type Store struct {
db dbm.DB
- cache blockCache
+ cache cache
}
-func CalcBlockKey(hash *bc.Hash) []byte {
- return append(BlockPrefix, hash.Bytes()...)
-}
-
-func CalcBlockHeaderKey(height uint64, hash *bc.Hash) []byte {
- buf := [8]byte{}
- binary.BigEndian.PutUint64(buf[:], height)
- key := append(BlockHeaderPrefix, buf[:]...)
- return append(key, hash.Bytes()...)
-}
-
-// GetBlockHeader return the BlockHeader by given hash
-func (s *Store) GetBlockHeader(hash *bc.Hash) (*types.BlockHeader, error) {
- return nil, nil
-}
+// NewStore creates and returns a new Store object.
+func NewStore(db dbm.DB) *Store {
+ fillBlockHeaderFn := func(hash *bc.Hash) (*types.BlockHeader, error) {
+ return GetBlockHeader(db, hash)
+ }
-// GetBlock return the block by given hash
-func GetBlock(db dbm.DB, hash *bc.Hash) (*types.Block, error) {
- bytez := db.Get(CalcBlockKey(hash))
- if bytez == nil {
- return nil, nil
+ fillBlockTxsFn := func(hash *bc.Hash) ([]*types.Tx, error) {
+ return GetBlockTransactions(db, hash)
}
- block := &types.Block{}
- err := block.UnmarshalText(bytez)
- return block, err
-}
+ fillBlockHashesFn := func(height uint64) ([]*bc.Hash, error) {
+ return GetBlockHashesByHeight(db, height)
+ }
-// NewStore creates and returns a new Store object.
-func NewStore(db dbm.DB) *Store {
- cache := newBlockCache(func(hash *bc.Hash) (*types.Block, error) {
- return GetBlock(db, hash)
- })
+ cache := newCache(fillBlockHeaderFn, fillBlockTxsFn, fillBlockHashesFn)
return &Store{
db: db,
cache: cache,
}
}
+// GetBlockHeader return the BlockHeader by given hash
+func (s *Store) GetBlockHeader(hash *bc.Hash) (*types.BlockHeader, error) {
+ return s.cache.lookupBlockHeader(hash)
+}
+
// GetUtxo will search the utxo in db
func (s *Store) GetUtxo(hash *bc.Hash) (*storage.UtxoEntry, error) {
return getUtxo(s.db, hash)
}
+func (s *Store) GetContract(hash [32]byte) ([]byte, error) {
+ return getContract(s.db, hash)
+}
+
// BlockExist check if the block is stored in disk
func (s *Store) BlockExist(hash *bc.Hash) bool {
- block, err := s.cache.lookup(hash)
- return err == nil && block != nil
+ _, err := s.cache.lookupBlockHeader(hash)
+ return err == nil
+}
+
+// SaveBlockHeader persists a new block header in the protocol.
+func (s *Store) SaveBlockHeader(blockHeader *types.BlockHeader) error {
+ binaryBlockHeader, err := blockHeader.MarshalText()
+ if err != nil {
+ return errors.Wrap(err, "Marshal block header")
+ }
+
+ blockHash := blockHeader.Hash()
+ s.db.Set(CalcBlockHeaderKey(&blockHash), binaryBlockHeader)
+ s.cache.removeBlockHeader(blockHeader)
+ return nil
+}
+
+// GetBlockHashesByHeight return the block hash by the specified height
+func (s *Store) GetBlockHashesByHeight(height uint64) ([]*bc.Hash, error) {
+ return s.cache.lookupBlockHashesByHeight(height)
+}
+
+// SaveBlock persists a new block in the protocol.
+func (s *Store) SaveBlock(block *types.Block) error {
+ startTime := time.Now()
+ binaryBlockHeader, err := block.MarshalTextForBlockHeader()
+ if err != nil {
+ return errors.Wrap(err, "Marshal block header")
+ }
+
+ binaryBlockTxs, err := block.MarshalTextForTransactions()
+ if err != nil {
+ return errors.Wrap(err, "Marshal block transactions")
+ }
+
+ blockHashes := []*bc.Hash{}
+ hashes, err := s.GetBlockHashesByHeight(block.Height)
+ if err != nil {
+ return err
+ }
+
+ blockHashes = append(blockHashes, hashes...)
+ blockHash := block.Hash()
+ blockHashes = append(blockHashes, &blockHash)
+ binaryBlockHashes, err := json.Marshal(blockHashes)
+ if err != nil {
+ return errors.Wrap(err, "Marshal block hashes")
+ }
+
+ batch := s.db.NewBatch()
+ batch.Set(CalcBlockHashesKey(block.Height), binaryBlockHashes)
+ batch.Set(CalcBlockHeaderKey(&blockHash), binaryBlockHeader)
+ batch.Set(CalcBlockTransactionsKey(&blockHash), binaryBlockTxs)
+ batch.Set(CalcBlockHeaderIndexKey(block.Height, &blockHash), binaryBlockHeader)
+ batch.Write()
+
+ s.cache.removeBlockHashes(block.Height)
+ log.WithFields(log.Fields{
+ "module": logModule,
+ "height": block.Height,
+ "hash": blockHash.String(),
+ "duration": time.Since(startTime),
+ }).Info("block saved on disk")
+ return nil
+}
+
+// GetBlockTransactions return the Block transactions by given hash
+func (s *Store) GetBlockTransactions(hash *bc.Hash) ([]*types.Tx, error) {
+ return s.cache.lookupBlockTxs(hash)
}
// GetBlock return the block by given hash
func (s *Store) GetBlock(hash *bc.Hash) (*types.Block, error) {
- return s.cache.lookup(hash)
+ blockHeader, err := s.GetBlockHeader(hash)
+ if err != nil {
+ return nil, err
+ }
+
+ txs, err := s.GetBlockTransactions(hash)
+ if err != nil {
+ return nil, err
+ }
+
+ return &types.Block{
+ BlockHeader: *blockHeader,
+ Transactions: txs,
+ }, nil
}
// GetTransactionsUtxo will return all the utxo that related to the input txs
return loadBlockStoreStateJSON(s.db)
}
+// LoadBlockIndex loadblockIndex by bestHeight
func (s *Store) LoadBlockIndex(stateBestHeight uint64) (*state.BlockIndex, error) {
startTime := time.Now()
blockIndex := state.NewBlockIndex()
- bhIter := s.db.IteratorPrefix(BlockHeaderPrefix)
+ bhIter := s.db.IteratorPrefix(BlockHeaderIndexPrefix)
defer bhIter.Release()
var lastNode *state.BlockNode
return blockIndex, nil
}
-// SaveBlock persists a new block in the protocol.
-func (s *Store) SaveBlock(block *types.Block) error {
- startTime := time.Now()
- binaryBlock, err := block.MarshalText()
- if err != nil {
- return errors.Wrap(err, "Marshal block meta")
- }
-
- binaryBlockHeader, err := block.BlockHeader.MarshalText()
- if err != nil {
- return errors.Wrap(err, "Marshal block header")
- }
-
- blockHash := block.Hash()
- batch := s.db.NewBatch()
- batch.Set(CalcBlockKey(&blockHash), binaryBlock)
- batch.Set(CalcBlockHeaderKey(block.Height, &blockHash), binaryBlockHeader)
- batch.Write()
-
- log.WithFields(log.Fields{
- "module": logModule,
- "height": block.Height,
- "hash": blockHash.String(),
- "duration": time.Since(startTime),
- }).Info("block saved on disk")
- return nil
-}
-
// SaveChainStatus save the core's newest status && delete old status
-func (s *Store) SaveChainStatus(node *state.BlockNode, view *state.UtxoViewpoint, contractView *state.ContractViewpoint) error {
+func (s *Store) SaveChainStatus(node *state.BlockNode, view *state.UtxoViewpoint, contractView *state.ContractViewpoint, finalizedHeight uint64, finalizedHash *bc.Hash) error {
batch := s.db.NewBatch()
if err := saveUtxoView(batch, view); err != nil {
return err
return err
}
- bytes, err := json.Marshal(protocol.BlockStoreState{Height: node.Height, Hash: &node.Hash})
+ bytes, err := json.Marshal(protocol.BlockStoreState{Height: node.Height, Hash: &node.Hash, FinalizedHeight: finalizedHeight, FinalizedHash: finalizedHash})
if err != nil {
return err
}
return nil
}
-func (s *Store) GetCheckpoint(*bc.Hash) (*state.Checkpoint, error) {
- return nil, nil
+func calcCheckpointKey(height uint64, hash *bc.Hash) []byte {
+ buf := make([]byte, 8)
+ binary.BigEndian.PutUint64(buf, height)
+ key := append(CheckpointPrefix, buf...)
+ if hash != nil {
+ key = append(key, hash.Bytes()...)
+ }
+ return key
+}
+
+func (s *Store) GetCheckpoint(hash *bc.Hash) (*state.Checkpoint, error) {
+ header, err := s.GetBlockHeader(hash)
+ if err != nil {
+ return nil, err
+ }
+
+ data := s.db.Get(calcCheckpointKey(header.Height, hash))
+ checkpoint := &state.Checkpoint{}
+ if err := json.Unmarshal(data, checkpoint); err != nil {
+ return nil, err
+ }
+
+ return checkpoint, nil
}
// GetCheckpointsByHeight return all checkpoints of specified block height
-func (s *Store) GetCheckpointsByHeight(uint64) ([]*state.Checkpoint, error) {
- return nil, nil
+func (s *Store) GetCheckpointsByHeight(height uint64) ([]*state.Checkpoint, error) {
+ iter := s.db.IteratorPrefix(calcCheckpointKey(height, nil))
+ defer iter.Release()
+ return loadCheckpointsFromIter(iter)
+}
+
+// CheckpointsFromNode return all checkpoints from specified block height and hash
+func (s *Store) CheckpointsFromNode(height uint64, hash *bc.Hash) ([]*state.Checkpoint, error) {
+ startKey := calcCheckpointKey(height, hash)
+ iter := s.db.IteratorPrefixWithStart(CheckpointPrefix, startKey, false)
+
+ finalizedCheckpoint := &state.Checkpoint{}
+ if err := json.Unmarshal(iter.Value(), finalizedCheckpoint); err != nil {
+ return nil, err
+ }
+
+ checkpoints := []*state.Checkpoint{finalizedCheckpoint}
+ subs, err := loadCheckpointsFromIter(iter)
+ if err != nil {
+ return nil, err
+ }
+
+ checkpoints = append(checkpoints, subs...)
+ return checkpoints, nil
+}
+
+func loadCheckpointsFromIter(iter dbm.Iterator) ([]*state.Checkpoint, error) {
+ var checkpoints []*state.Checkpoint
+ defer iter.Release()
+ for iter.Next() {
+ checkpoint := &state.Checkpoint{}
+ if err := json.Unmarshal(iter.Value(), checkpoint); err != nil {
+ return nil, err
+ }
+
+ checkpoints = append(checkpoints, checkpoint)
+ }
+ return checkpoints, nil
}
// SaveCheckpoints bulk save multiple checkpoint
-func (s *Store) SaveCheckpoints(...*state.Checkpoint) error {
+func (s *Store) SaveCheckpoints(checkpoints ...*state.Checkpoint) error {
+ batch := s.db.NewBatch()
+ for _, checkpoint := range checkpoints {
+ data, err := json.Marshal(checkpoint)
+ if err != nil {
+ return err
+ }
+
+ batch.Set(calcCheckpointKey(checkpoint.Height, &checkpoint.Hash), data)
+ }
+ batch.Write()
return nil
}