• R/O
  • HTTP
  • SSH
  • HTTPS

bytom: Commit

Official Go implementation of the Bytom protocol


Commit MetaInfo

Revisionfefb96b027c89d8c5d118aceae0aa97643789bc2 (tree)
Time2021-07-22 16:40:21
AuthorPaladz <yzhu101@uott...>
CommiterGitHub

Log Message

Merge branch 'btm2.0' into btm2.0_api

Change Summary

Incremental Difference

--- a/database/cache.go
+++ b/database/cache.go
@@ -1,6 +1,7 @@
11 package database
22
33 import (
4+ "encoding/hex"
45 "strconv"
56
67 "github.com/golang/groupcache/singleflight"
@@ -8,6 +9,7 @@ import (
89 "github.com/bytom/bytom/common"
910 "github.com/bytom/bytom/protocol/bc"
1011 "github.com/bytom/bytom/protocol/bc/types"
12+ "github.com/bytom/bytom/protocol/state"
1113 )
1214
1315 const (
@@ -15,24 +17,28 @@ const (
1517 maxCachedBlockTransactions = 1024
1618 maxCachedBlockHashes = 8192
1719 maxCachedMainChainHashes = 8192
20+ maxCheckPoints = 8192
1821 )
1922
2023 type fillBlockHeaderFn func(hash *bc.Hash) (*types.BlockHeader, error)
2124 type fillBlockTransactionsFn func(hash *bc.Hash) ([]*types.Tx, error)
2225 type fillBlockHashesFn func(height uint64) ([]*bc.Hash, error)
2326 type fillMainChainHashFn func(height uint64) (*bc.Hash, error)
27+type fillCheckPointFn func(key []byte) (*state.Checkpoint, error)
2428
25-func newCache(fillBlockHeader fillBlockHeaderFn, fillBlockTxs fillBlockTransactionsFn, fillBlockHashes fillBlockHashesFn, fillMainChainHash fillMainChainHashFn) cache {
29+func newCache(fillBlockHeader fillBlockHeaderFn, fillBlockTxs fillBlockTransactionsFn, fillBlockHashes fillBlockHashesFn, fillMainChainHash fillMainChainHashFn, fillCheckPoint fillCheckPointFn) cache {
2630 return cache{
2731 lruBlockHeaders: common.NewCache(maxCachedBlockHeaders),
2832 lruBlockTxs: common.NewCache(maxCachedBlockTransactions),
2933 lruBlockHashes: common.NewCache(maxCachedBlockHashes),
3034 lruMainChainHashes: common.NewCache(maxCachedMainChainHashes),
35+ lruCheckPoints: common.NewCache(maxCheckPoints),
3136
3237 fillBlockHeaderFn: fillBlockHeader,
3338 fillBlockTransactionFn: fillBlockTxs,
3439 fillBlockHashesFn: fillBlockHashes,
3540 fillMainChainHashFn: fillMainChainHash,
41+ fillCheckPointFn: fillCheckPoint,
3642 }
3743 }
3844
@@ -41,11 +47,13 @@ type cache struct {
4147 lruBlockTxs *common.Cache
4248 lruBlockHashes *common.Cache
4349 lruMainChainHashes *common.Cache
50+ lruCheckPoints *common.Cache
4451
4552 fillBlockHashesFn func(uint64) ([]*bc.Hash, error)
4653 fillBlockTransactionFn func(hash *bc.Hash) ([]*types.Tx, error)
4754 fillBlockHeaderFn func(hash *bc.Hash) (*types.BlockHeader, error)
4855 fillMainChainHashFn func(uint64) (*bc.Hash, error)
56+ fillCheckPointFn func(key []byte) (*state.Checkpoint, error)
4957
5058 sf singleflight.Group
5159 }
@@ -145,3 +153,29 @@ func (c *cache) lookupMainChainHash(height uint64) (*bc.Hash, error) {
145153 func (c *cache) removeMainChainHash(height uint64) {
146154 c.lruMainChainHashes.Remove(height)
147155 }
156+
157+func (c *cache) lookupCheckPoint(key []byte) (*state.Checkpoint, error) {
158+ keyStr := hex.EncodeToString(key)
159+ if data, ok := c.lruCheckPoints.Get(keyStr); ok {
160+ return data.(*state.Checkpoint), nil
161+ }
162+
163+ checkpoint, err := c.sf.Do("CheckPoint:"+string(key), func() (interface{}, error) {
164+ checkPoint, err := c.fillCheckPointFn(key)
165+ if err != nil {
166+ return nil, err
167+ }
168+
169+ c.lruCheckPoints.Add(keyStr, checkPoint)
170+ return checkPoint, nil
171+ })
172+ if err != nil {
173+ return nil, err
174+ }
175+
176+ return checkpoint.(*state.Checkpoint), nil
177+}
178+
179+func (c *cache) removeCheckPoint(key []byte) {
180+ c.lruCheckPoints.Remove(hex.EncodeToString(key))
181+}
--- a/database/cache_test.go
+++ b/database/cache_test.go
@@ -41,7 +41,7 @@ func TestBlockCache(t *testing.T) {
4141 return blockIndexHashes[height][0], nil
4242 }
4343
44- cache := newCache(fillBlockHeaderFn, fillBlockTxsFn, fillBlockHashesFn, fillMainChainHashFn)
44+ cache := newCache(fillBlockHeaderFn, fillBlockTxsFn, fillBlockHashesFn, fillMainChainHashFn, nil)
4545
4646 for i := 0; i < maxCachedBlockHeaders+10; i++ {
4747 block := newBlock(uint64(i))
--- a/database/store.go
+++ b/database/store.go
@@ -1,7 +1,6 @@
11 package database
22
33 import (
4- "encoding/binary"
54 "encoding/json"
65 "time"
76
@@ -61,7 +60,11 @@ func NewStore(db dbm.DB) *Store {
6160 return GetMainChainHash(db, height)
6261 }
6362
64- cache := newCache(fillBlockHeaderFn, fillBlockTxsFn, fillBlockHashesFn, fillMainChainHashFn)
63+ fillCheckPointFn := func(key []byte) (*state.Checkpoint, error) {
64+ return getCheckpointFromDB(db, key)
65+ }
66+
67+ cache := newCache(fillBlockHeaderFn, fillBlockTxsFn, fillBlockHashesFn, fillMainChainHashFn, fillCheckPointFn)
6568 return &Store{
6669 db: db,
6770 cache: cache,
@@ -238,100 +241,3 @@ func (s *Store) SaveChainStatus(blockHeader *types.BlockHeader, mainBlockHeaders
238241
239242 return nil
240243 }
241-
242-func calcCheckpointKey(height uint64, hash *bc.Hash) []byte {
243- buf := make([]byte, 8)
244- binary.BigEndian.PutUint64(buf, height)
245- key := append(checkpointKeyPrefix, buf...)
246- if hash != nil {
247- key = append(key, hash.Bytes()...)
248- }
249- return key
250-}
251-
252-func (s *Store) GetCheckpoint(hash *bc.Hash) (*state.Checkpoint, error) {
253- header, err := s.GetBlockHeader(hash)
254- if err != nil {
255- return nil, err
256- }
257-
258- data := s.db.Get(calcCheckpointKey(header.Height, hash))
259- checkpoint := &state.Checkpoint{}
260- if err := json.Unmarshal(data, checkpoint); err != nil {
261- return nil, err
262- }
263-
264- checkpoint.SupLinks = append(checkpoint.SupLinks, header.SupLinks...)
265- return checkpoint, nil
266-}
267-
268-// GetCheckpointsByHeight return all checkpoints of specified block height
269-func (s *Store) GetCheckpointsByHeight(height uint64) ([]*state.Checkpoint, error) {
270- iter := s.db.IteratorPrefix(calcCheckpointKey(height, nil))
271- defer iter.Release()
272- return s.loadCheckpointsFromIter(iter)
273-}
274-
275-// CheckpointsFromNode return all checkpoints from specified block height and hash
276-func (s *Store) CheckpointsFromNode(height uint64, hash *bc.Hash) ([]*state.Checkpoint, error) {
277- startKey := calcCheckpointKey(height, hash)
278- iter := s.db.IteratorPrefixWithStart(checkpointKeyPrefix, startKey, false)
279-
280- firstCheckpoint := &state.Checkpoint{}
281- if err := json.Unmarshal(iter.Value(), firstCheckpoint); err != nil {
282- return nil, err
283- }
284-
285- checkpoints := []*state.Checkpoint{firstCheckpoint}
286- subs, err := s.loadCheckpointsFromIter(iter)
287- if err != nil {
288- return nil, err
289- }
290-
291- checkpoints = append(checkpoints, subs...)
292- return checkpoints, nil
293-}
294-
295-func (s *Store) loadCheckpointsFromIter(iter dbm.Iterator) ([]*state.Checkpoint, error) {
296- var checkpoints []*state.Checkpoint
297- defer iter.Release()
298- for iter.Next() {
299- checkpoint := &state.Checkpoint{}
300- if err := json.Unmarshal(iter.Value(), checkpoint); err != nil {
301- return nil, err
302- }
303-
304- header, err := s.GetBlockHeader(&checkpoint.Hash)
305- if err != nil {
306- return nil, err
307- }
308-
309- checkpoint.SupLinks = append(checkpoint.SupLinks, header.SupLinks...)
310- checkpoints = append(checkpoints, checkpoint)
311- }
312- return checkpoints, nil
313-}
314-
315-// SaveCheckpoints bulk save multiple checkpoint
316-func (s *Store) SaveCheckpoints(checkpoints []*state.Checkpoint) error {
317- batch := s.db.NewBatch()
318- for _, checkpoint := range checkpoints {
319- startTime := time.Now()
320- data, err := json.Marshal(checkpoint)
321- if err != nil {
322- return err
323- }
324-
325- batch.Set(calcCheckpointKey(checkpoint.Height, &checkpoint.Hash), data)
326- log.WithFields(log.Fields{
327- "module": logModule,
328- "height": checkpoint.Height,
329- "hash": checkpoint.Hash.String(),
330- "status": checkpoint.Status,
331- "duration": time.Since(startTime),
332- }).Info("checkpoint saved on disk")
333- }
334-
335- batch.Write()
336- return nil
337-}
--- /dev/null
+++ b/database/store_checkpoint.go
@@ -0,0 +1,127 @@
1+package database
2+
3+import (
4+ "encoding/binary"
5+ "encoding/json"
6+ "time"
7+
8+ log "github.com/sirupsen/logrus"
9+
10+ dbm "github.com/bytom/bytom/database/leveldb"
11+ "github.com/bytom/bytom/protocol/bc"
12+ "github.com/bytom/bytom/protocol/state"
13+)
14+
15+func calcCheckpointKey(height uint64, hash *bc.Hash) []byte {
16+ buf := make([]byte, 8)
17+ binary.BigEndian.PutUint64(buf, height)
18+ key := append(checkpointKeyPrefix, buf...)
19+ if hash != nil {
20+ key = append(key, hash.Bytes()...)
21+ }
22+ return key
23+}
24+
25+func getCheckpointFromDB(db dbm.DB, key []byte) (*state.Checkpoint, error) {
26+ checkpoint := &state.Checkpoint{}
27+ if err := json.Unmarshal(db.Get(key), checkpoint); err != nil {
28+ return nil, err
29+ }
30+
31+ return checkpoint, nil
32+}
33+
34+func (s *Store) GetCheckpoint(hash *bc.Hash) (*state.Checkpoint, error) {
35+ header, err := s.GetBlockHeader(hash)
36+ if err != nil {
37+ return nil, err
38+ }
39+
40+ checkpoint, err := s.cache.lookupCheckPoint(calcCheckpointKey(header.Height, hash))
41+ if err != nil {
42+ return nil, err
43+ }
44+
45+ checkpoint.SupLinks = append(checkpoint.SupLinks, header.SupLinks...)
46+ return checkpoint, nil
47+}
48+
49+// GetCheckpointsByHeight return all checkpoints of specified block height
50+func (s *Store) GetCheckpointsByHeight(height uint64) ([]*state.Checkpoint, error) {
51+ iter := s.db.IteratorPrefix(calcCheckpointKey(height, nil))
52+ defer iter.Release()
53+ return s.loadCheckpointsFromIter(iter)
54+}
55+
56+// CheckpointsFromNode return all checkpoints from specified block height and hash
57+func (s *Store) CheckpointsFromNode(height uint64, hash *bc.Hash) ([]*state.Checkpoint, error) {
58+ startKey := calcCheckpointKey(height, hash)
59+ iter := s.db.IteratorPrefixWithStart(checkpointKeyPrefix, startKey, false)
60+
61+ firstCheckpoint := &state.Checkpoint{}
62+ if err := json.Unmarshal(iter.Value(), firstCheckpoint); err != nil {
63+ return nil, err
64+ }
65+
66+ checkpoints := []*state.Checkpoint{firstCheckpoint}
67+ subs, err := s.loadCheckpointsFromIter(iter)
68+ if err != nil {
69+ return nil, err
70+ }
71+
72+ checkpoints = append(checkpoints, subs...)
73+ return checkpoints, nil
74+}
75+
76+func (s *Store) loadCheckpointsFromIter(iter dbm.Iterator) ([]*state.Checkpoint, error) {
77+ var checkpoints []*state.Checkpoint
78+ defer iter.Release()
79+ for iter.Next() {
80+ checkpoint := &state.Checkpoint{}
81+ if err := json.Unmarshal(iter.Value(), checkpoint); err != nil {
82+ return nil, err
83+ }
84+
85+ header, err := s.GetBlockHeader(&checkpoint.Hash)
86+ if err != nil {
87+ return nil, err
88+ }
89+
90+ checkpoint.SupLinks = append(checkpoint.SupLinks, header.SupLinks...)
91+ checkpoints = append(checkpoints, checkpoint)
92+ }
93+ return checkpoints, nil
94+}
95+
96+// SaveCheckpoints bulk save multiple checkpoint
97+func (s *Store) SaveCheckpoints(checkpoints []*state.Checkpoint) error {
98+ var keys [][]byte
99+
100+ batch := s.db.NewBatch()
101+ for _, checkpoint := range checkpoints {
102+ startTime := time.Now()
103+ data, err := json.Marshal(checkpoint)
104+ if err != nil {
105+ return err
106+ }
107+
108+ key := calcCheckpointKey(checkpoint.Height, &checkpoint.Hash)
109+ batch.Set(key, data)
110+ keys = append(keys, key)
111+ log.WithFields(log.Fields{
112+ "module": logModule,
113+ "height": checkpoint.Height,
114+ "hash": checkpoint.Hash.String(),
115+ "status": checkpoint.Status,
116+ "duration": time.Since(startTime),
117+ }).Info("checkpoint saved on disk")
118+ }
119+
120+ batch.Write()
121+
122+ for _, key := range keys {
123+ s.cache.removeCheckPoint(key)
124+ }
125+
126+ return nil
127+}
Show on old repository browser