Format the project via `go fmt ./...` under the bytom directory.
import (
"context"
-// stdsql "database/sql"
+ // stdsql "database/sql"
"encoding/json"
+ "fmt"
"sync"
"time"
- "fmt"
"github.com/golang/groupcache/lru"
//"github.com/lib/pq"
-// "chain/core/pin"
+ // "chain/core/pin"
"github.com/bytom/blockchain/signers"
"github.com/bytom/blockchain/txbuilder"
"github.com/bytom/crypto/ed25519/chainkd"
-// "chain/database/pg"
- dbm "github.com/tendermint/tmlibs/db"
+ // "chain/database/pg"
"github.com/bytom/errors"
"github.com/bytom/log"
"github.com/bytom/protocol"
"github.com/bytom/protocol/vm/vmutil"
+ dbm "github.com/tendermint/tmlibs/db"
)
const maxAccountCache = 1000
ErrBadIdentifier = errors.New("either ID or alias must be specified, and not both")
)
-func NewManager(db dbm.DB, chain *protocol.Chain/*, pinStore *pin.Store*/) *Manager {
+func NewManager(db dbm.DB, chain *protocol.Chain /*, pinStore *pin.Store*/) *Manager {
return &Manager{
- db: db,
- chain: chain,
- utxoDB: newReserver(db, chain/*, pinStore*/),
-// pinStore: pinStore,
+ db: db,
+ chain: chain,
+ utxoDB: newReserver(db, chain /*, pinStore*/),
+ // pinStore: pinStore,
cache: lru.New(maxAccountCache),
aliasCache: lru.New(maxAccountCache),
delayedACPs: make(map[*txbuilder.TemplateBuilder][]*controlProgram),
// Manager stores accounts and their associated control programs.
type Manager struct {
- db dbm.DB
- chain *protocol.Chain
- utxoDB *reserver
- indexer Saver
-// pinStore *pin.Store
+ db dbm.DB
+ chain *protocol.Chain
+ utxoDB *reserver
+ indexer Saver
+ // pinStore *pin.Store
cacheMu sync.Mutex
cache *lru.Cache
// Create creates a new Account.
func (m *Manager) Create(ctx context.Context, xpubs []chainkd.XPub, quorum int, alias string, tags map[string]interface{}, clientToken string) (*Account, error) {
//if ret := m.db.Get([]byte(alias));ret != nil {
- //return nil,errors.New("alias already exists")
+ //return nil,errors.New("alias already exists")
//}
accountSigner, err := signers.Create(ctx, m.db, "account", xpubs, quorum, clientToken)
m.db.Set([]byte(alias), account_id)
}
-
err = m.indexAnnotatedAccount(ctx, account)
if err != nil {
return nil, errors.Wrap(err, "indexing annotated account")
}
-
// FindByAlias retrieves an account's Signer record by its alias
func (m *Manager) FindByAlias(ctx context.Context, alias string) (*signers.Signer, error) {
var accountID string
if err != nil {
return nil, errors.Wrap(err)
}*/
- bytez := m.db.Get([]byte(fmt.Sprintf("alias_account:%v", alias)))
- accountID = string(bytez[:])
+ bytez := m.db.Get([]byte(fmt.Sprintf("alias_account:%v", alias)))
+ accountID = string(bytez[:])
m.cacheMu.Lock()
m.aliasCache.Add(alias, accountID)
m.cacheMu.Unlock()
return cp.controlProgram, nil
}
-
func (m *Manager) insertAccountControlProgram(ctx context.Context, progs ...*controlProgram) error {
/*const q = `
INSERT INTO account_control_programs (signer_id, key_index, control_program, change, expires_at)
})
}*/
-// _, err := m.dbm.ExecContext(ctx, q, accountIDs, keyIndexes, controlProgs, change, pq.Array(expirations))
+ // _, err := m.dbm.ExecContext(ctx, q, accountIDs, keyIndexes, controlProgs, change, pq.Array(expirations))
return errors.Wrap(nil)
}
-
func (m *Manager) nextIndex(ctx context.Context) (uint64, error) {
m.acpMu.Lock()
defer m.acpMu.Unlock()
return n, nil
}
-
-func (m *Manager) QueryAll(ctx context.Context) (interface{}, error){
- ret := make([]interface{},0)
+func (m *Manager) QueryAll(ctx context.Context) (interface{}, error) {
+ ret := make([]interface{}, 0)
iter := m.db.Iterator()
for iter.Next() {
value := string(iter.Value())
- if value[:3] == "acc"{
+ if value[:3] == "acc" {
continue
}
- ret = append(ret,value)
+ ret = append(ret, value)
//log.Printf(ctx,"%s\t", value)
}
- return ret,nil
-}
\ No newline at end of file
+ return ret, nil
+}
import (
"context"
-// "database/sql"
+ // "database/sql"
"encoding/json"
-// "github.com/lib/pq"
+ // "github.com/lib/pq"
"github.com/bytom/blockchain/query"
//"github.com/blockchain/database/pg"
if len(acps) == 0 {
return nil
}
- // return m.insertAccountControlProgram(ctx, acps...)
- return nil
+ // return m.insertAccountControlProgram(ctx, acps...)
+ return nil
})
}
import (
"context"
"encoding/json"
- "fmt"
+ "fmt"
-// "github.com/lib/pq"
+ // "github.com/lib/pq"
"github.com/bytom/blockchain/query"
"github.com/bytom/blockchain/signers"
return m.deleteSpentOutputs(ctx, b)
})
m.pinStore.ProcessBlocks(ctx, m.chain, PinName, m.indexAccountUTXOs)
- */
+ */
}
/*
if err != nil {
return errors.Wrap(err, "loading account info from control programs")
}
- fmt.Printf("accOuts:%v", accOuts);
+ fmt.Printf("accOuts:%v", accOuts)
//err = m.upsertConfirmedAccountOutputs(ctx, accOuts, blockPositions, b)
return errors.Wrap(err, "upserting confirmed account utxos")
for s := range outsByScript {
scripts = append(scripts, []byte(s))
}
- */
+ */
result := make([]*accountOutput, 0, len(outs))
-/*
- const q = `
- SELECT signer_id, key_index, control_program, change
- FROM account_control_programs
- WHERE control_program IN (SELECT unnest($1::bytea[]))
- `
- err := pg.ForQueryRows(ctx, m.db, q, scripts, func(accountID string, keyIndex uint64, program []byte, change bool) {
- for _, out := range outsByScript[string(program)] {
- newOut := &accountOutput{
- rawOutput: *out,
- AccountID: accountID,
- keyIndex: keyIndex,
- change: change,
+ /*
+ const q = `
+ SELECT signer_id, key_index, control_program, change
+ FROM account_control_programs
+ WHERE control_program IN (SELECT unnest($1::bytea[]))
+ `
+ err := pg.ForQueryRows(ctx, m.db, q, scripts, func(accountID string, keyIndex uint64, program []byte, change bool) {
+ for _, out := range outsByScript[string(program)] {
+ newOut := &accountOutput{
+ rawOutput: *out,
+ AccountID: accountID,
+ keyIndex: keyIndex,
+ change: change,
+ }
+ result = append(result, newOut)
}
- result = append(result, newOut)
+ })
+ if err != nil {
+ return nil, err
}
- })
- if err != nil {
- return nil, err
- }
- */
+ */
return result, nil
}
"github.com/bytom/blockchain/account"
"github.com/bytom/crypto/ed25519/chainkd"
+ "github.com/bytom/log"
"github.com/bytom/net/http/httpjson"
"github.com/bytom/net/http/reqid"
- "github.com/bytom/log"
)
// POST /create-account
Alias *string
Tags map[string]interface{} `json:"tags"`
}) interface{} {
- log.Printf(ctx,"-------update-account-tags---------")
+ log.Printf(ctx, "-------update-account-tags---------")
responses := make([]interface{}, len(ins))
var wg sync.WaitGroup
wg.Add(len(responses))
import (
"context"
-// "encoding/json"
+ // "encoding/json"
//"github.com/lib/pq"
aid := assetID
assetIDs = append(assetIDs, aid.Bytes())
}
-/* var (
- tagsByAssetID = make(map[bc.AssetID]*json.RawMessage, len(assetIDs))
- defsByAssetID = make(map[bc.AssetID]*json.RawMessage, len(assetIDs))
- aliasesByAssetID = make(map[bc.AssetID]string, len(assetIDs))
- localByAssetID = make(map[bc.AssetID]bool, len(assetIDs))
- )
+ /* var (
+ tagsByAssetID = make(map[bc.AssetID]*json.RawMessage, len(assetIDs))
+ defsByAssetID = make(map[bc.AssetID]*json.RawMessage, len(assetIDs))
+ aliasesByAssetID = make(map[bc.AssetID]string, len(assetIDs))
+ localByAssetID = make(map[bc.AssetID]bool, len(assetIDs))
+ )
- const q = `
- SELECT id, COALESCE(alias, ''), signer_id IS NOT NULL, tags, definition
- FROM assets
- LEFT JOIN asset_tags ON asset_id=id
- WHERE id IN (SELECT unnest($1::bytea[]))
- `
- err := pg.ForQueryRows(ctx, reg.db, q, pq.ByteaArray(assetIDs),
- func(assetID bc.AssetID, alias string, local bool, tagsBlob, defBlob []byte) error {
- if alias != "" {
- aliasesByAssetID[assetID] = alias
- }
- localByAssetID[assetID] = local
+ const q = `
+ SELECT id, COALESCE(alias, ''), signer_id IS NOT NULL, tags, definition
+ FROM assets
+ LEFT JOIN asset_tags ON asset_id=id
+ WHERE id IN (SELECT unnest($1::bytea[]))
+ `
+ err := pg.ForQueryRows(ctx, reg.db, q, pq.ByteaArray(assetIDs),
+ func(assetID bc.AssetID, alias string, local bool, tagsBlob, defBlob []byte) error {
+ if alias != "" {
+ aliasesByAssetID[assetID] = alias
+ }
+ localByAssetID[assetID] = local
- jsonTags := json.RawMessage(tagsBlob)
- jsonDef := json.RawMessage(defBlob)
- if len(tagsBlob) > 0 {
- var v interface{}
- err := json.Unmarshal(tagsBlob, &v)
- if err == nil {
- tagsByAssetID[assetID] = &jsonTags
+ jsonTags := json.RawMessage(tagsBlob)
+ jsonDef := json.RawMessage(defBlob)
+ if len(tagsBlob) > 0 {
+ var v interface{}
+ err := json.Unmarshal(tagsBlob, &v)
+ if err == nil {
+ tagsByAssetID[assetID] = &jsonTags
+ }
}
- }
- if len(defBlob) > 0 {
- var v interface{}
- err := json.Unmarshal(defBlob, &v)
- if err == nil {
- defsByAssetID[assetID] = &jsonDef
+ if len(defBlob) > 0 {
+ var v interface{}
+ err := json.Unmarshal(defBlob, &v)
+ if err == nil {
+ defsByAssetID[assetID] = &jsonDef
+ }
}
- }
- return nil
- },
- )
- if err != nil {
- return errors.Wrap(err, "querying assets")
- }
-
- empty := json.RawMessage(`{}`)
- for _, tx := range txs {
- for _, in := range tx.Inputs {
- if alias, ok := aliasesByAssetID[in.AssetID]; ok {
- in.AssetAlias = alias
- }
- if localByAssetID[in.AssetID] {
- in.AssetIsLocal = true
- }
- tags := tagsByAssetID[in.AssetID]
- def := defsByAssetID[in.AssetID]
- in.AssetTags = &empty
- in.AssetDefinition = &empty
- if tags != nil {
- in.AssetTags = tags
- }
- if def != nil {
- in.AssetDefinition = def
- }
+ return nil
+ },
+ )
+ if err != nil {
+ return errors.Wrap(err, "querying assets")
}
- for _, out := range tx.Outputs {
- if alias, ok := aliasesByAssetID[out.AssetID]; ok {
- out.AssetAlias = alias
- }
- if localByAssetID[out.AssetID] {
- out.AssetIsLocal = true
- }
- tags := tagsByAssetID[out.AssetID]
- def := defsByAssetID[out.AssetID]
- out.AssetTags = &empty
- out.AssetDefinition = &empty
- if tags != nil {
- out.AssetTags = tags
+ empty := json.RawMessage(`{}`)
+ for _, tx := range txs {
+ for _, in := range tx.Inputs {
+ if alias, ok := aliasesByAssetID[in.AssetID]; ok {
+ in.AssetAlias = alias
+ }
+ if localByAssetID[in.AssetID] {
+ in.AssetIsLocal = true
+ }
+ tags := tagsByAssetID[in.AssetID]
+ def := defsByAssetID[in.AssetID]
+ in.AssetTags = &empty
+ in.AssetDefinition = &empty
+ if tags != nil {
+ in.AssetTags = tags
+ }
+ if def != nil {
+ in.AssetDefinition = def
+ }
}
- if def != nil {
- out.AssetDefinition = def
+
+ for _, out := range tx.Outputs {
+ if alias, ok := aliasesByAssetID[out.AssetID]; ok {
+ out.AssetAlias = alias
+ }
+ if localByAssetID[out.AssetID] {
+ out.AssetIsLocal = true
+ }
+ tags := tagsByAssetID[out.AssetID]
+ def := defsByAssetID[out.AssetID]
+ out.AssetTags = &empty
+ out.AssetDefinition = &empty
+ if tags != nil {
+ out.AssetTags = tags
+ }
+ if def != nil {
+ out.AssetDefinition = def
+ }
}
}
- }
-*/
+ */
return errors.Wrap(nil, "annotating with asset data")
}
-
IssuanceProgram []byte
InitialBlockHash bc.Hash
*signers.Signer
- Tags map[string]interface{}
- RawDefinition1 []byte
- definition map[string]interface{}
- sortID string
+ Tags map[string]interface{}
+ RawDefinition1 []byte
+ definition map[string]interface{}
+ sortID string
}
func (asset *Asset) Definition() (map[string]interface{}, error) {
return nil, errors.Wrap(err, "failed marshal asset")
}
if len(ass) > 0 {
- reg.db.Set(asset_id,json.RawMessage(ass))
+ reg.db.Set(asset_id, json.RawMessage(ass))
}
-/* asset, err = reg.insertAsset(ctx, asset, clientToken)
- if err != nil {
- return nil, errors.Wrap(err, "inserting asset")
- }
+ /* asset, err = reg.insertAsset(ctx, asset, clientToken)
+ if err != nil {
+ return nil, errors.Wrap(err, "inserting asset")
+ }
- err = insertAssetTags(ctx, reg.db, asset.AssetID, tags)
- if err != nil {
- return nil, errors.Wrap(err, "inserting asset tags")
- }
-*/
+ err = insertAssetTags(ctx, reg.db, asset.AssetID, tags)
+ if err != nil {
+ return nil, errors.Wrap(err, "inserting asset tags")
+ }
+ */
err = reg.indexAnnotatedAsset(ctx, asset)
if err != nil {
return nil, errors.Wrap(err, "indexing annotated asset")
asset.Tags = tags
// Perform persistent updates
-/*
- err = insertAssetTags(ctx, reg.db, asset.AssetID, asset.Tags)
- if err != nil {
- return errors.Wrap(err, "inserting asset tags")
- }
+ /*
+ err = insertAssetTags(ctx, reg.db, asset.AssetID, asset.Tags)
+ if err != nil {
+ return errors.Wrap(err, "inserting asset tags")
+ }
- err = reg.indexAnnotatedAsset(ctx, asset)
- if err != nil {
- return errors.Wrap(err, "update asset index")
- }
-*/
+ err = reg.indexAnnotatedAsset(ctx, asset)
+ if err != nil {
+ return errors.Wrap(err, "update asset index")
+ }
+ */
// Revise cache
reg.cacheMu.Lock()
var asset Asset
err := json.Unmarshal(bytes, &asset)
- if err !=nil {
+ if err != nil {
return nil, errors.New("this asset can't be unmarshal.")
}
}
untypedAsset, err := reg.aliasGroup.Do(alias, func() (interface{}, error) {
-// asset, err := assetQuery(ctx, reg.db, "assets.alias=$1", alias)
-// return asset, err
- return nil,nil
+ // asset, err := assetQuery(ctx, reg.db, "assets.alias=$1", alias)
+ // return asset, err
+ return nil, nil
})
if err != nil {
}
-func (reg *Registry) QueryAll(ctx context.Context) (interface{}, error){
- ret := make([]interface{},0)
+func (reg *Registry) QueryAll(ctx context.Context) (interface{}, error) {
+ ret := make([]interface{}, 0)
iter := reg.db.Iterator()
for iter.Next() {
value := string(iter.Value())
- ret = append(ret,value)
+ ret = append(ret, value)
//log.Printf(ctx,"%s\t", value)
}
- return ret,nil
+ return ret, nil
}
// insertAsset adds the asset to the database. If the asset has a client token,
keyIndex uint64
xpubs [][]byte
tags []byte
- )
+ )
err := db.QueryRowContext(ctx, fmt.Sprintf(baseQ, pred), args...).Scan(
&a.AssetID,
&a.Alias,
prog, err := builder.Build()
return prog, 1, err
}
+
/*
func mapToNullString(in map[string]interface{}) (*sql.NullString, error) {
var mapJSON []byte
"context"
"encoding/json"
-// "github.com/lib/pq"
+ // "github.com/lib/pq"
"github.com/bytom/blockchain/query"
"github.com/bytom/blockchain/signers"
-// "chain/database/pg"
+ // "chain/database/pg"
chainjson "github.com/bytom/encoding/json"
-// "github.com/bytom/errors"
-// "github.com/bytom/protocol/bc"
-// "github.com/bytom/protocol/bc/legacy"
+ // "github.com/bytom/errors"
+ // "github.com/bytom/protocol/bc"
+ // "github.com/bytom/protocol/bc/legacy"
"github.com/bytom/protocol/vm/vmutil"
)
// a.RawDefinition is the asset definition as it appears on the
// blockchain, so it's untrusted and may not be valid json.
-/* if pg.IsValidJSONB(a.RawDefinition()) {
- jsonDefinition = json.RawMessage(a.RawDefinition())
- }
-*/
+ /* if pg.IsValidJSONB(a.RawDefinition()) {
+ jsonDefinition = json.RawMessage(a.RawDefinition())
+ }
+ */
if a.Tags != nil {
b, err := json.Marshal(a.Tags)
if err != nil {
}
return reg.indexer.SaveAnnotatedAsset(ctx, aa, a.sortID)
}
+
/*
func (reg *Registry) ProcessBlocks(ctx context.Context) {
if reg.pinStore == nil {
txin := legacy.NewIssuanceInput(nonce[:], a.Amount, a.ReferenceData, asset.InitialBlockHash, asset.IssuanceProgram, nil, assetdef)
-
tplIn := &txbuilder.SigningInstruction{}
path := signers.Path(asset.Signer, signers.AssetKeySpace)
tplIn.AddWitnessKeys(asset.Signer.XPubs, path, asset.Signer.Quorum)
builder.RestrictMinTime(time.Now())
return builder.AddInput(txin, tplIn)
}
-
"github.com/bytom/blockchain/asset"
"github.com/bytom/crypto/ed25519/chainkd"
+ "github.com/bytom/log"
"github.com/bytom/net/http/httpjson"
"github.com/bytom/net/http/reqid"
- "github.com/bytom/log"
)
// POST /create-asset
Alias *string
Tags map[string]interface{} `json:"tags"`
}) interface{} {
- log.Printf(ctx,"------updateAssetTags-----")
+ log.Printf(ctx, "------updateAssetTags-----")
responses := make([]interface{}, len(ins))
var wg sync.WaitGroup
wg.Add(len(responses))
wg.Wait()
return responses
}
-
"github.com/bytom/encoding/json"
"github.com/bytom/errors"
+ "github.com/bytom/log"
"github.com/bytom/net/http/httpjson"
"github.com/bytom/net/http/reqid"
- "github.com/bytom/log"
)
// POST /create-control-program
import (
"context"
-// "github.com/bytom/blockchain/accesstoken"
+ // "github.com/bytom/blockchain/accesstoken"
"github.com/bytom/blockchain/account"
-// "github.com/bytom/blockchain/asset"
-// "github.com/bytom/blockchain/blocksigner"
-// "github.com/bytom/blockchain/config"
+ // "github.com/bytom/blockchain/asset"
+ // "github.com/bytom/blockchain/blocksigner"
+ // "github.com/bytom/blockchain/config"
"github.com/bytom/blockchain/query"
"github.com/bytom/blockchain/query/filter"
"github.com/bytom/blockchain/rpc"
"github.com/bytom/blockchain/signers"
"github.com/bytom/blockchain/txbuilder"
-// "github.com/bytom/blockchain/txfeed"
+ // "github.com/bytom/blockchain/txfeed"
"github.com/bytom/errors"
-// "github.com/bytom/net/http/authz"
+ // "github.com/bytom/net/http/authz"
"github.com/bytom/net/http/httperror"
"github.com/bytom/net/http/httpjson"
"github.com/bytom/protocol"
IsTemporary: isTemporary,
Errors: map[error]httperror.Info{
// General error namespace (0xx)
- context.DeadlineExceeded: {408, "CH001", "Request timed out"},
- httpjson.ErrBadRequest: {400, "CH003", "Invalid request body"},
+ context.DeadlineExceeded: {408, "CH001", "Request timed out"},
+ httpjson.ErrBadRequest: {400, "CH003", "Invalid request body"},
/*errNotFound: {404, "CH006", "Not found"},
errRateLimited: {429, "CH007", "Request limit exceeded"},
errNotAuthenticated: {401, "CH009", "Request could not be authenticated"},
txbuilder.ErrMissingFields: {400, "CH010", "One or more fields are missing"},
//authz.ErrNotAuthorized: {403, "CH011", "Request is unauthorized"},
//asset.ErrDuplicateAlias: {400, "CH050", "Alias already exists"},
- account.ErrDuplicateAlias: {400, "CH050", "Alias already exists"},
+ account.ErrDuplicateAlias: {400, "CH050", "Alias already exists"},
//txfeed.ErrDuplicateAlias: {400, "CH050", "Alias already exists"},
- account.ErrBadIdentifier: {40, "CH051", "Either an ID or alias must be provided, but not both"},
+ account.ErrBadIdentifier: {40, "CH051", "Either an ID or alias must be provided, but not both"},
//asset.ErrBadIdentifier: {40, "CH051", "Either an ID or alias must be provided, but not both"},
// Core error namespace
//errAlreadyConfigured: {400, "CH101", "This core has already been configured"},
//config.ErrBadGenerator: {400, "CH102", "Generator URL returned an invalid response"},
//errBadBlockPub: {400, "CH103", "Provided Block XPub is invalid"},
- rpc.ErrWrongNetwork: {502, "CH104", "A peer core is operating on a different blockchain network"},
- protocol.ErrTheDistantFuture: {400, "CH105", "Requested height is too far ahead"},
+ rpc.ErrWrongNetwork: {502, "CH104", "A peer core is operating on a different blockchain network"},
+ protocol.ErrTheDistantFuture: {400, "CH105", "Requested height is too far ahead"},
//config.ErrBadSignerURL: {400, "CH106", "Block signer URL is invalid"},
//config.ErrBadSignerPubkey: {400, "CH107", "Block signer pubkey is invalid"},
//config.ErrBadQuorum: {400, "CH108", "Quorum must be greater than 0 if there are signers"},
package blockchain
-
import (
"context"
"github.com/bytom/blockchain/pseudohsm"
}
*/
-
-func (a *BlockchainReactor) pseudohsmCreateKey(ctx context.Context, in struct{ Alias, Password string }) (result *pseudohsm.XPub, err error) {
+func (a *BlockchainReactor) pseudohsmCreateKey(ctx context.Context, in struct{ Alias, Password string }) (result *pseudohsm.XPub, err error) {
return a.hsm.XCreate(in.Password, in.Alias)
}
-
func (a *BlockchainReactor) pseudohsmListKeys(ctx context.Context, query requestQuery) (page, error) {
limit := query.PageSize
if limit == 0 {
- limit = defGenericPageSize // defGenericPageSize = 100
+ limit = defGenericPageSize // defGenericPageSize = 100
}
xpubs, after, err := a.hsm.ListKeys(query.After, limit)
}, nil
}
-func (a *BlockchainReactor) pseudohsmDeleteKey(ctx context.Context, x struct {
- Password string
- XPub chainkd.XPub `json:"xpubs"`
+func (a *BlockchainReactor) pseudohsmDeleteKey(ctx context.Context, x struct {
+ Password string
+ XPub chainkd.XPub `json:"xpubs"`
}) error {
return a.hsm.XDelete(x.XPub, x.Password)
}
-func (a *BlockchainReactor) pseudohsmSignTemplates(ctx context.Context, x struct {
+func (a *BlockchainReactor) pseudohsmSignTemplates(ctx context.Context, x struct {
Auth string
Txs []*txbuilder.Template `json:"transactions"`
XPubs []chainkd.XPub `json:"xpubs"`
return sigBytes, err
}
-func (a *BlockchainReactor) pseudohsmResetPassword(ctx context.Context, x struct {
- OldPassword string
- NewPassword string
- XPub chainkd.XPub `json:"xpubs"`
+func (a *BlockchainReactor) pseudohsmResetPassword(ctx context.Context, x struct {
+ OldPassword string
+ NewPassword string
+ XPub chainkd.XPub `json:"xpubs"`
}) error {
return a.hsm.ResetPassword(x.XPub, x.OldPassword, x.NewPassword)
}
-func (a *BlockchainReactor) pseudohsmUpdateAlias(ctx context.Context, x struct {
- Password string
- NewAlias string
- XPub chainkd.XPub `json:"xpubs"`
+func (a *BlockchainReactor) pseudohsmUpdateAlias(ctx context.Context, x struct {
+ Password string
+ NewAlias string
+ XPub chainkd.XPub `json:"xpubs"`
}) error {
return a.hsm.UpdateAlias(x.XPub, x.Password, x.NewAlias)
}
-
"sync"
"github.com/bytom/protocol/bc/legacy"
-// "github.com/blockchain/protocol/state"
+ // "github.com/blockchain/protocol/state"
)
// MemStore satisfies the Store interface.
type MemStore struct {
- mu sync.Mutex
- Blocks map[uint64]*legacy.Block
-// State *state.Snapshot
-// StateHeight uint64
+ mu sync.Mutex
+ Blocks map[uint64]*legacy.Block
+ // State *state.Snapshot
+ // StateHeight uint64
}
// New returns a new MemStore
"time"
"github.com/bytom/common"
- _"github.com/bytom/errors"
+ _ "github.com/bytom/errors"
)
// Minimum amount of time between cache reloads. This limit applies if the platform does
const minReloadInterval = 2 * time.Second
type keysByFile []XPub
+
func (s keysByFile) Len() int { return len(s) }
func (s keysByFile) Less(i, j int) bool { return s[i].File < s[j].File }
func (s keysByFile) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
-
// AmbiguousAddrError is returned when attempting to unlock
// an address for which more than one file exists.
type AmbiguousAddrError struct {
func (ac *addrCache) maybeReload() {
ac.mu.Lock()
defer ac.mu.Unlock()
-
+
if ac.watcher.running {
return // A watcher is running and will keep the cache up-to-date.
}
keys []XPub
keyJSON struct {
Address common.Address `json:"address"`
- Alias string `json:"alias"`
+ Alias string `json:"alias"`
}
)
for _, fi := range files {
import (
"fmt"
+ "io/ioutil"
"math/rand"
"os"
"path/filepath"
- "io/ioutil"
"reflect"
"sort"
"testing"
"github.com/davecgh/go-spew/spew"
)
-
var (
- cachetestDir, _ = filepath.Abs(filepath.Join("testdata", "keystore"))
- cachetestKeys = []XPub{
+ cachetestDir, _ = filepath.Abs(filepath.Join("testdata", "keystore"))
+ cachetestKeys = []XPub{
{
Alias: "langyu",
Address: common.StringToAddress("bm1pktmny6q69dlqulja2p2ja28k2vd6wvqpk5r76a"),
ac.keys()
time.Sleep(200 * time.Millisecond)
// Move in the files.
- wantKeystores:= make([]XPub, len(cachetestKeys))
+ wantKeystores := make([]XPub, len(cachetestKeys))
for i := range cachetestKeys {
a := cachetestKeys[i]
a.File = filepath.Join(dir, filepath.Base(a.File))
t.Errorf("got %s, want %s", spew.Sdump(list), spew.Sdump(wantKeystores))
}
-
func TestWatchNoDir(t *testing.T) {
t.Parallel()
}
}
-
func TestCacheAddDeleteOrder(t *testing.T) {
cache := newAddrCache("testdata/no-such-dir")
cache.watcher.running = true // prevent unexpected reloads
// Check that the account list is sorted by filename.
wantKeys := make([]XPub, len(keys))
- copy(wantKeys , keys)
+ copy(wantKeys, keys)
sort.Sort(keysByFile(wantKeys))
list := cache.keys()
-
+
if !reflect.DeepEqual(list, wantKeys) {
t.Fatalf("got keys: %s\nwant %s", spew.Sdump(keys), spew.Sdump(wantKeys))
}
-
+
for _, a := range keys {
if !cache.hasAddress(a.Address) {
t.Errorf("expected hasAccount(%x) to return true", a.Address)
}
}
-
func TestCacheFind(t *testing.T) {
dir := filepath.Join("testdata", "dir")
cache := newAddrCache(dir)
package pseudohsm
import (
- _"encoding/hex"
+ _ "encoding/hex"
//"encoding/json"
"fmt"
"io/ioutil"
"github.com/bytom/crypto/ed25519/chainkd"
"github.com/pborman/uuid"
-
)
const (
type XKey struct {
Id uuid.UUID
KeyType string
- Alias string
+ Alias string
Address common.Address
XPrv chainkd.XPrv
XPub chainkd.XPub
Address string `json:"address"`
Crypto cryptoJSON `json:"crypto"`
Id string `json:"id"`
- Type string `json:"type"`
+ Type string `json:"type"`
Version int `json:"version"`
- Alias string `json:"alias"`
+ Alias string `json:"alias"`
}
type cryptoJSON struct {
if k.KeyType != keytype {
return ErrInvalidKeyType
}
-
+
k.Address = common.BytesToAddress(addr)
copy(k.XPrv[:], privkey)
"github.com/bytom/common"
"github.com/bytom/crypto"
- "github.com/bytom/crypto/randentropy"
"github.com/bytom/crypto/ed25519/chainkd"
+ "github.com/bytom/crypto/randentropy"
"github.com/pborman/uuid"
"golang.org/x/crypto/pbkdf2"
"golang.org/x/crypto/scrypt"
if err != nil {
return nil, err
}
- var xprv chainkd.XPrv
+ var xprv chainkd.XPrv
copy(xprv[:], keyBytes[:])
xpub := xprv.XPub()
//key := crypto.ToECDSA(keyBytes)
return &XKey{
- Id: uuid.UUID(keyId),
- Address: crypto.PubkeyToAddress(xpub[:]),
- XPrv: xprv,
- XPub: xpub,
- KeyType: k.Type,
- Alias: k.Alias,
+ Id: uuid.UUID(keyId),
+ Address: crypto.PubkeyToAddress(xpub[:]),
+ XPrv: xprv,
+ XPub: xpub,
+ KeyType: k.Type,
+ Alias: k.Alias,
}, nil
}
return plainText, keyId, err
}
-
func getKDFKey(cryptoJSON cryptoJSON, auth string) ([]byte, error) {
authArray := []byte(auth)
salt, err := hex.DecodeString(cryptoJSON.KDFParams["salt"].(string))
"github.com/bytom/crypto/ed25519/chainkd"
"github.com/pborman/uuid"
-
)
const (
veryLightScryptP = 1
)
-
// Tests that a json key file can be decrypted and encrypted in multiple rounds.
func TestKeyEncryptDecrypt(t *testing.T) {
keyjson, err := ioutil.ReadFile("testdata/bytom-very-light-scrypt.json")
}
password := "bytomtest"
address := common.StringToAddress("bm1pcwfm9xnkrf62pg405tcgjzzk7ur670jqhtm3cq")
-
+
// Do a few rounds of decryption and encryption
for i := 0; i < 3; i++ {
// Try a bad password first
-
+
if _, err := DecryptKey(keyjson, password+"bad"); err == nil {
t.Errorf("test %d: json key decrypted with bad password", i)
}
-
+
// Decrypt with the correct password
key, err := DecryptKey(keyjson, password)
if err != nil {
if key.Address != address {
t.Errorf("test %d: key address mismatch: have %x, want %x", i, key.Address, address)
}
-
+
// Recrypt with a new password and start over
//password += "new data appended"
if _, err = EncryptKey(key, password, veryLightScryptN, veryLightScryptP); err != nil {
}
}
-
func TestGenerateFile(t *testing.T) {
xprv, xpub, err := chainkd.NewXKeys(nil)
if err != nil {
}
id := uuid.NewRandom()
key := &XKey{
- Id: id,
- KeyType: "bytom_kd",
+ Id: id,
+ KeyType: "bytom_kd",
Address: crypto.PubkeyToAddress(xpub[:]),
- XPub: xpub,
- XPrv: xprv,
+ XPub: xpub,
+ XPrv: xprv,
}
t.Log(key)
password := "bytomtest"
import (
//"context"
- _"fmt"
- "strconv"
+ _ "fmt"
+ "os"
"path/filepath"
+ "strconv"
"sync"
- "os"
- "github.com/bytom/crypto/ed25519/chainkd"
"github.com/bytom/common"
- "github.com/bytom/errors"
"github.com/bytom/crypto"
+ "github.com/bytom/crypto/ed25519/chainkd"
+ "github.com/bytom/errors"
//"bytom/protocol/bc/legacy"
"github.com/pborman/uuid"
)
ErrNoKey = errors.New("key not found")
ErrInvalidKeySize = errors.New("key invalid size")
ErrTooManyAliasesToList = errors.New("requested aliases exceeds limit")
- ErrAmbiguousAddr = errors.New("multiple keys match address")
- ErrDecrypt = errors.New("could not decrypt key with given passphrase")
- ErrInvalidKeyType = errors.New("key type stored invalid")
+ ErrAmbiguousAddr = errors.New("multiple keys match address")
+ ErrDecrypt = errors.New("could not decrypt key with given passphrase")
+ ErrInvalidKeyType = errors.New("key type stored invalid")
)
type HSM struct {
}
type XPub struct {
- Alias string `json:"alias"`
+ Alias string `json:"alias"`
Address common.Address `json:"address"`
- XPub chainkd.XPub `json:"xpub"`
- File string `json:"file"`
+ XPub chainkd.XPub `json:"xpub"`
+ File string `json:"file"`
}
-
func New(keypath string) (*HSM, error) {
keydir, _ := filepath.Abs(keypath)
return &HSM{
- keyStore: &keyStorePassphrase{keydir, LightScryptN, LightScryptP},
- cache: newAddrCache(keydir),
- kdCache: make(map[chainkd.XPub]chainkd.XPrv),
+ keyStore: &keyStorePassphrase{keydir, LightScryptN, LightScryptP},
+ cache: newAddrCache(keydir),
+ kdCache: make(map[chainkd.XPub]chainkd.XPrv),
}, nil
}
}
id := uuid.NewRandom()
key := &XKey{
- Id: id,
- KeyType: "bytom_kd",
+ Id: id,
+ KeyType: "bytom_kd",
Address: crypto.PubkeyToAddress(xpub[:]),
- XPub: xpub,
- XPrv: xprv,
- Alias: alias,
+ XPub: xpub,
+ XPrv: xprv,
+ Alias: alias,
}
file := h.keyStore.JoinPath(keyFileName(key.Address))
if err := h.keyStore.StoreKey(file, key, auth); err != nil {
return &XPub{XPub: xpub, Address: key.Address, Alias: alias, File: file}, true, nil
}
-
// ListKeys returns a list of all xpubs from the store
-func (h *HSM) ListKeys(after string , limit int) ([]XPub, string, error) {
+func (h *HSM) ListKeys(after string, limit int) ([]XPub, string, error) {
xpubs := h.cache.keys()
start, end := 0, len(xpubs)
)
if after != "" {
- zafter,err = strconv.Atoi(after)
+ zafter, err = strconv.Atoi(after)
if err != nil {
return nil, "", errors.WithDetailf(ErrInvalidAfter, "value: %q", zafter)
}
return nil, "", errors.WithDetailf(ErrInvalidAfter, "value: %v", zafter)
}
if len(xpubs) > zafter+limit {
- end = zafter+limit
+ end = zafter + limit
}
return xpubs[start:end], strconv.Itoa(start), nil
}
return xkey.XPrv, nil
}
-
// XDelete deletes the key matched by xpub if the passphrase is correct.
// If a contains no filename, the address must match a unique key.
func (h *HSM) XDelete(xpub chainkd.XPub, auth string) error {
// Decrypting the key isn't really necessary, but we do
// it anyway to check the password and zero out the key
// immediately afterwards.
-
+
xpb, xkey, err := h.loadDecryptedKey(xpub, auth)
if xkey != nil {
zeroKey(xkey)
package pseudohsm
import (
- "time"
- "fmt"
- "github.com/rjeczalik/notify"
+ "fmt"
+ "github.com/rjeczalik/notify"
+ "time"
)
type watcher struct {
- ac *addrCache
- starting bool
- running bool
- ev chan notify.EventInfo
- quit chan struct{}
+ ac *addrCache
+ starting bool
+ running bool
+ ev chan notify.EventInfo
+ quit chan struct{}
}
func newWatcher(ac *addrCache) *watcher {
- return &watcher{
- ac: ac,
- ev: make(chan notify.EventInfo, 10),
- quit: make(chan struct{}),
- }
+ return &watcher{
+ ac: ac,
+ ev: make(chan notify.EventInfo, 10),
+ quit: make(chan struct{}),
+ }
}
// starts the watcher loop in the background.
// Start a watcher in the background if that's not already in progress.
// The caller must hold w.ac.mu.
func (w *watcher) start() {
- if w.starting || w.running {
- return
- }
- w.starting = true
- go w.loop()
+ if w.starting || w.running {
+ return
+ }
+ w.starting = true
+ go w.loop()
}
func (w *watcher) close() {
- close(w.quit)
+ close(w.quit)
}
func (w *watcher) loop() {
- defer func() {
- w.ac.mu.Lock()
- w.running = false
- w.starting = false
- w.ac.mu.Unlock()
- }()
+ defer func() {
+ w.ac.mu.Lock()
+ w.running = false
+ w.starting = false
+ w.ac.mu.Unlock()
+ }()
- err := notify.Watch(w.ac.keydir, w.ev, notify.All)
- if err != nil {
- fmt.Printf("can't watch %s: %v", w.ac.keydir, err)
- return
- }
- defer notify.Stop(w.ev)
- fmt.Printf("now watching %s", w.ac.keydir)
- defer fmt.Printf("no longer watching %s", w.ac.keydir)
+ err := notify.Watch(w.ac.keydir, w.ev, notify.All)
+ if err != nil {
+ fmt.Printf("can't watch %s: %v", w.ac.keydir, err)
+ return
+ }
+ defer notify.Stop(w.ev)
+ fmt.Printf("now watching %s", w.ac.keydir)
+ defer fmt.Printf("no longer watching %s", w.ac.keydir)
- w.ac.mu.Lock()
- w.running = true
- w.ac.mu.Unlock()
+ w.ac.mu.Lock()
+ w.running = true
+ w.ac.mu.Unlock()
- // Wait for file system events and reload.
- // When an event occurs, the reload call is delayed a bit so that
- // multiple events arriving quickly only cause a single reload.
- var (
- debounce = time.NewTimer(0)
- debounceDuration = 500 * time.Millisecond
- inCycle, hadEvent bool
- )
- defer debounce.Stop()
- for {
- select {
- case <-w.quit:
- return
- case <-w.ev:
- if !inCycle {
- debounce.Reset(debounceDuration)
- inCycle = true
- } else {
- hadEvent = true
- }
- case <-debounce.C:
- w.ac.mu.Lock()
- w.ac.reload()
- w.ac.mu.Unlock()
- if hadEvent {
- debounce.Reset(debounceDuration)
- inCycle, hadEvent = true, false
- } else {
- inCycle, hadEvent = false, false
- }
- }
- }
+ // Wait for file system events and reload.
+ // When an event occurs, the reload call is delayed a bit so that
+ // multiple events arriving quickly only cause a single reload.
+ var (
+ debounce = time.NewTimer(0)
+ debounceDuration = 500 * time.Millisecond
+ inCycle, hadEvent bool
+ )
+ defer debounce.Stop()
+ for {
+ select {
+ case <-w.quit:
+ return
+ case <-w.ev:
+ if !inCycle {
+ debounce.Reset(debounceDuration)
+ inCycle = true
+ } else {
+ hadEvent = true
+ }
+ case <-debounce.C:
+ w.ac.mu.Lock()
+ w.ac.reload()
+ w.ac.mu.Unlock()
+ if hadEvent {
+ debounce.Reset(debounceDuration)
+ inCycle, hadEvent = true, false
+ } else {
+ inCycle, hadEvent = false, false
+ }
+ }
+ }
}
defGenericPageSize = 100
)
-
//
// POST /list-accounts
func (bcr *BlockchainReactor) listAccounts(ctx context.Context, in requestQuery) interface{} {
- response,_ := bcr.accounts.QueryAll(ctx)
+ response, _ := bcr.accounts.QueryAll(ctx)
return response
}
-
//
// POST /list-assets
func (bcr *BlockchainReactor) listAssets(ctx context.Context, in requestQuery) interface{} {
- response,_ := bcr.assets.QueryAll(ctx)
+ response, _ := bcr.assets.QueryAll(ctx)
return response
}
response := bcr.chain.GetAssetsAmount()
if len(response) == 0 {
return nil
- }else{
+ } else {
return response
}
}
// Either parse the provided `after` or look one up for the time range.
-// var after query.TxAfter
+ // var after query.TxAfter
if in.After != "" {
_, err = query.DecodeTxAfter(in.After)
if err != nil {
return result, errors.Wrap(err, "decoding `after`")
}
} else {
-/* after, err = bcr.indexer.LookupTxAfter(ctx, in.StartTimeMS, endTimeMS)
- if err != nil {
- return result, err
- }
-*/
+ /* after, err = bcr.indexer.LookupTxAfter(ctx, in.StartTimeMS, endTimeMS)
+ if err != nil {
+ return result, err
+ }
+ */
}
-/* txns, nextAfter, err := bcr.indexer.Transactions(ctx, in.Filter, in.FilterParams, after, limit, in.AscLongPoll)
- if err != nil {
- return result, errors.Wrap(err, "running tx query")
- }
-*/
+ /* txns, nextAfter, err := bcr.indexer.Transactions(ctx, in.Filter, in.FilterParams, after, limit, in.AscLongPoll)
+ if err != nil {
+ return result, errors.Wrap(err, "running tx query")
+ }
+ */
out := in
-// out.After = nextAfter.String()
+ // out.After = nextAfter.String()
return page{
-// Items: httpjson.Array(txns),
-// LastPage: len(txns) < limit,
- Next: out,
+ // Items: httpjson.Array(txns),
+ // LastPage: len(txns) < limit,
+ Next: out,
}, nil
}
after := in.After
-/* txfeeds, after, err := bcr.txFeeds.Query(ctx, after, limit)
- if err != nil {
- return page{}, errors.Wrap(err, "running txfeed query")
- }
-*/
+ /* txfeeds, after, err := bcr.txFeeds.Query(ctx, after, limit)
+ if err != nil {
+ return page{}, errors.Wrap(err, "running txfeed query")
+ }
+ */
out := in
out.After = after
return page{
-// Items: httpjson.Array(txfeeds),
-// LastPage: len(txfeeds) < limit,
- Next: out,
+ // Items: httpjson.Array(txfeeds),
+ // LastPage: len(txfeeds) < limit,
+ Next: out,
}, nil
}
limit = defGenericPageSize
}
-// var after *query.OutputsAfter
+ // var after *query.OutputsAfter
if in.After != "" {
_, err = query.DecodeOutputsAfter(in.After)
if err != nil {
} else if timestampMS > math.MaxInt64 {
return result, errors.WithDetail(httpjson.ErrBadRequest, "timestamp is too large")
}
-/* outputs, nextAfter, err := bcr.indexer.Outputs(ctx, in.Filter, in.FilterParams, timestampMS, after, limit)
- if err != nil {
- return result, errors.Wrap(err, "querying outputs")
- }
-*/
+ /* outputs, nextAfter, err := bcr.indexer.Outputs(ctx, in.Filter, in.FilterParams, timestampMS, after, limit)
+ if err != nil {
+ return result, errors.Wrap(err, "querying outputs")
+ }
+ */
outQuery := in
-// outQuery.After = nextAfter.String()
+ // outQuery.After = nextAfter.String()
return page{
-// Items: httpjson.Array(outputs),
-// LastPage: len(outputs) < limit,
- Next: outQuery,
+ // Items: httpjson.Array(outputs),
+ // LastPage: len(outputs) < limit,
+ Next: outQuery,
}, nil
}
package query
- /*
+/*
import (
"bytes"
"context"
"time"
"github.com/bytom/crypto/ed25519/chainkd"
-// "github.com/blockchain/database/pg"
+ // "github.com/blockchain/database/pg"
chainjson "github.com/bytom/encoding/json"
"github.com/bytom/protocol/bc"
"github.com/bytom/protocol/bc/legacy"
import (
"context"
-// "database/sql"
-// "encoding/json"
+ // "database/sql"
+ // "encoding/json"
//"github.com/lib/pq"
//"chain/core/pin"
//"github.com/blockchain/database/pg"
-// "github.com/blockchain/errors"
+ // "github.com/blockchain/errors"
"github.com/bytom/protocol"
//"github.com/blockchain/protocol/bc/legacy"
- dbm "github.com/tendermint/tmlibs/db"
+ dbm "github.com/tendermint/tmlibs/db"
)
const (
)
// NewIndexer constructs a new indexer for indexing transactions.
-func NewIndexer(db dbm.DB, c *protocol.Chain/*, pinStore *pin.Store*/) *Indexer {
+func NewIndexer(db dbm.DB, c *protocol.Chain /*, pinStore *pin.Store*/) *Indexer {
indexer := &Indexer{
- db: db,
- c: c,
- // pinStore: pinStore,
+ db: db,
+ c: c,
+ // pinStore: pinStore,
}
return indexer
}
// Indexer creates, updates and queries against indexes.
type Indexer struct {
- db dbm.DB
- c *protocol.Chain
+ db dbm.DB
+ c *protocol.Chain
//pinStore *pin.Store
annotators []Annotator
}
package query
import (
-// "bytes"
-// "context"
+ // "bytes"
+ // "context"
"fmt"
"math"
//"github.com/lib/pq"
-// "github.com/blockchain/blockchain/query/filter"
+ // "github.com/blockchain/blockchain/query/filter"
"github.com/bytom/errors"
-// "github.com/blockchain/protocol/bc"
+ // "github.com/blockchain/protocol/bc"
)
var defaultOutputsAfter = OutputsAfter{
package query
import (
-// "bytes"
-// "context"
-// "encoding/json"
+ // "bytes"
+ // "context"
+ // "encoding/json"
"fmt"
"math"
-// "strconv"
+ // "strconv"
-// "github.com/blockchain/blockchain/query/filter"
+ // "github.com/blockchain/blockchain/query/filter"
"github.com/bytom/errors"
)
"sync"
"time"
- "github.com/bytom/net/http/reqid"
"github.com/bytom/log"
+ "github.com/bytom/net/http/reqid"
)
// POST /create-account-receiver
AccountAlias string `json:"account_alias"`
ExpiresAt time.Time `json:"expires_at"`
}) []interface{} {
- log.Printf(ctx,"-------create-Account-Receiver-------")
+ log.Printf(ctx, "-------create-Account-Receiver-------")
responses := make([]interface{}, len(ins))
var wg sync.WaitGroup
wg.Add(len(responses))
import (
"context"
-// "encoding/json"
-// "net/http"
+ // "encoding/json"
+ // "net/http"
chainjson "github.com/bytom/encoding/json"
"github.com/bytom/errors"
-// "github.com/bytom/net/http/httpjson"
+ // "github.com/bytom/net/http/httpjson"
"github.com/bytom/protocol/bc"
)
package signers
import (
- "time"
- "github.com/bytom/encoding/base32"
"encoding/binary"
+ "github.com/bytom/encoding/base32"
"sync/atomic"
+ "time"
)
//1<seq_id ,increase by 1
var seq_id uint32 = 1
-func next_seq_id() uint32 {
+func next_seq_id() uint32 {
- atomic.AddUint32(&seq_id,1)
+ atomic.AddUint32(&seq_id, 1)
return (seq_id)
}
// see the SQL function next_cahin_id in schema.sql on https://github.com/chain/chain
-func Idgenerate (prefix string) (string,uint64){
+func Idgenerate(prefix string) (string, uint64) {
var our_epoch_ms uint64 = 1496635208000
var n uint64
- now_ms := uint64(time.Now().UnixNano()/1e6)
+ now_ms := uint64(time.Now().UnixNano() / 1e6)
seq_index := uint64(next_seq_id())
- seq_id := uint64(seq_index%1024)
- shard_id := uint64(5)
+ seq_id := uint64(seq_index % 1024)
+ shard_id := uint64(5)
n = (now_ms - our_epoch_ms) << 23
n = n | (shard_id << 10)
n = n | seq_id
- bin := make([]byte,8)
- binary.BigEndian.PutUint64(bin,n)
+ bin := make([]byte, 8)
+ binary.BigEndian.PutUint64(bin, n)
encodestring := base32.HexEncoding.WithPadding(base32.NoPadding).EncodeToString(bin)
- return prefix+encodestring,seq_index
+ return prefix + encodestring, seq_index
}
import (
"bytes"
"context"
-// "database/sql"
+ // "database/sql"
"encoding/binary"
"sort"
//"github.com/lib/pq"
"github.com/bytom/crypto/ed25519/chainkd"
-// "github.com/blockchain/database/pg"
+ // "github.com/blockchain/database/pg"
"github.com/bytom/errors"
- dbm "github.com/tendermint/tmlibs/db"
+ dbm "github.com/tendermint/tmlibs/db"
)
type keySpace byte
key := key
xpubBytes = append(xpubBytes, key[:])
}
- /*
+ /*
- nullToken := sql.NullString{
- String: clientToken,
- Valid: clientToken != "",
- }
+ nullToken := sql.NullString{
+ String: clientToken,
+ Valid: clientToken != "",
+ }
- const q = `
- INSERT INTO signers (id, type, xpubs, quorum, client_token)
- VALUES (next_chain_id($1::text), $2, $3, $4, $5)
- ON CONFLICT (client_token) DO NOTHING
- RETURNING id, key_index
- `
- var (
- id string
- keyIndex uint64
- )
- err := db.QueryRowContext(ctx, q, typeIDMap[typ], typ, pq.ByteaArray(xpubBytes), quorum, nullToken).
- Scan(&id, &keyIndex)
- if err == sql.ErrNoRows && clientToken != "" {
- return findByClientToken(ctx, db, clientToken)
- }
- if err != nil && err != sql.ErrNoRows {
- return nil, errors.Wrap(err)
- }
- */
+ const q = `
+ INSERT INTO signers (id, type, xpubs, quorum, client_token)
+ VALUES (next_chain_id($1::text), $2, $3, $4, $5)
+ ON CONFLICT (client_token) DO NOTHING
+ RETURNING id, key_index
+ `
+ var (
+ id string
+ keyIndex uint64
+ )
+ err := db.QueryRowContext(ctx, q, typeIDMap[typ], typ, pq.ByteaArray(xpubBytes), quorum, nullToken).
+ Scan(&id, &keyIndex)
+ if err == sql.ErrNoRows && clientToken != "" {
+ return findByClientToken(ctx, db, clientToken)
+ }
+ if err != nil && err != sql.ErrNoRows {
+ return nil, errors.Wrap(err)
+ }
+ */
var (
id string
keyIndex uint64
SELECT id, type, xpubs, quorum, key_index
FROM signers WHERE id=$1
`
- */
+ */
var (
s Signer
xpubBytes [][]byte
)
- /*
- err := db.QueryRowContext(ctx, q, id).Scan(
- &s.ID,
- &s.Type,
- (*pq.ByteaArray)(&xpubBytes),
- &s.Quorum,
- &s.KeyIndex,
- )
- if err == sql.ErrNoRows {
- return nil, errors.Wrap(pg.ErrUserInputNotFound)
- }
- if err != nil {
- return nil, errors.Wrap(err)
- }
+ /*
+ err := db.QueryRowContext(ctx, q, id).Scan(
+ &s.ID,
+ &s.Type,
+ (*pq.ByteaArray)(&xpubBytes),
+ &s.Quorum,
+ &s.KeyIndex,
+ )
+ if err == sql.ErrNoRows {
+ return nil, errors.Wrap(pg.ErrUserInputNotFound)
+ }
+ if err != nil {
+ return nil, errors.Wrap(err)
+ }
- if s.Type != typ {
- return nil, errors.Wrap(ErrBadType)
- }*/
+ if s.Type != typ {
+ return nil, errors.Wrap(ErrBadType)
+ }*/
keys, err := ConvertKeys(xpubBytes)
if err != nil {
"github.com/bytom/blockchain/txbuilder"
chainjson "github.com/bytom/encoding/json"
"github.com/bytom/errors"
+ "github.com/bytom/log"
"github.com/bytom/net/http/httperror"
"github.com/bytom/net/http/reqid"
"github.com/bytom/protocol/bc/legacy"
- "github.com/bytom/log"
)
var defaultTxTTL = 5 * time.Minute
package txdb
import (
- "strconv"
- "sync"
"errors"
"fmt"
+ "strconv"
+ "sync"
"github.com/bytom/protocol/bc/legacy"
mu sync.Mutex
lru *lru.Cache
- fillFn func(height uint64) (*legacy.Block)
+ fillFn func(height uint64) *legacy.Block
single singleflight.Group // for cache misses
}
import (
"context"
- "fmt"
"encoding/json"
+ "fmt"
"github.com/golang/protobuf/proto"
"github.com/bytom/blockchain/txdb/internal/storage"
"github.com/bytom/errors"
+ "github.com/bytom/protocol/bc"
"github.com/bytom/protocol/patricia"
"github.com/bytom/protocol/state"
- "github.com/bytom/protocol/bc"
- dbm "github.com/tendermint/tmlibs/db"
. "github.com/tendermint/tmlibs/common"
+ dbm "github.com/tendermint/tmlibs/db"
)
func calcSnapshotKey(height uint64) []byte {
- return []byte(fmt.Sprintf("S:%v", height))
+ return []byte(fmt.Sprintf("S:%v", height))
}
func calcLatestSnapshotHeight() []byte {
return []byte("LatestSnapshotHeight")
}
+
// DecodeSnapshot decodes a snapshot from the Chain Core's binary,
// protobuf representation of the snapshot.
func DecodeSnapshot(data []byte) (*state.Snapshot, error) {
var latestSnapshotHeight = []byte("latestSnapshotHeight")
type SnapshotHeightJSON struct {
- Height uint64
+ Height uint64
}
func (bsj SnapshotHeightJSON) Save(db dbm.DB) {
return bsj
}
-
func storeStateSnapshot(ctx context.Context, db dbm.DB, snapshot *state.Snapshot, blockHeight uint64) error {
var storedSnapshot storage.Snapshot
err := patricia.Walk(snapshot.Tree, func(key []byte) error {
import (
"context"
//"database/sql"
-// "fmt"
-
-// "github.com/bytom/errors"
+ // "fmt"
+ // "github.com/bytom/errors"
)
// Query queries the Chain Core for txfeeds matching the query.
-func/* (t *Tracker)*/ Query(ctx context.Context, after string, limit int) ([]*TxFeed, string, error) {
-/* const baseQ = `
- SELECT id, alias, filter, after FROM txfeeds
- WHERE ($1='' OR id < $1) ORDER BY id DESC LIMIT %d
- `
- rows, err := t.DB.QueryContext(ctx, fmt.Sprintf(baseQ, limit), after)
- if err != nil {
- return nil, "", errors.Wrap(err, "executing txfeeds query")
- }
- defer rows.Close()
-
- txfeeds := make([]*TxFeed, 0, limit)
- for rows.Next() {
- var (
- feed TxFeed
- alias sql.NullString
- )
- err := rows.Scan(&feed.ID, &alias, &feed.Filter, &feed.After)
+func /* (t *Tracker)*/ Query(ctx context.Context, after string, limit int) ([]*TxFeed, string, error) {
+ /* const baseQ = `
+ SELECT id, alias, filter, after FROM txfeeds
+ WHERE ($1='' OR id < $1) ORDER BY id DESC LIMIT %d
+ `
+ rows, err := t.DB.QueryContext(ctx, fmt.Sprintf(baseQ, limit), after)
if err != nil {
- return nil, "", errors.Wrap(err, "scanning txfeed row")
+ return nil, "", errors.Wrap(err, "executing txfeeds query")
}
+ defer rows.Close()
+
+ txfeeds := make([]*TxFeed, 0, limit)
+ for rows.Next() {
+ var (
+ feed TxFeed
+ alias sql.NullString
+ )
+ err := rows.Scan(&feed.ID, &alias, &feed.Filter, &feed.After)
+ if err != nil {
+ return nil, "", errors.Wrap(err, "scanning txfeed row")
+ }
- if alias.Valid {
- feed.Alias = &alias.String
+ if alias.Valid {
+ feed.Alias = &alias.String
+ }
+ after = feed.ID
+ txfeeds = append(txfeeds, &feed)
}
- after = feed.ID
- txfeeds = append(txfeeds, &feed)
- }
- err = rows.Err()
- if err != nil {
- return nil, "", errors.Wrap(err)
- }
- return txfeeds, after, nil
-*/
+ err = rows.Err()
+ if err != nil {
+ return nil, "", errors.Wrap(err)
+ }
+ return txfeeds, after, nil
+ */
after = "qwertyu"
- return nil,after,nil
+ return nil, after, nil
}
-
package txfeed
import (
-// "bytes"
+ // "bytes"
"context"
//"database/sql"
-// "github.com/bytom/blockchain/query"
-// "github.com/bytom/database/pg"
+ // "github.com/bytom/blockchain/query"
+ // "github.com/bytom/database/pg"
"github.com/bytom/errors"
)
var ErrDuplicateAlias = errors.New("duplicate feed alias")
type Tracker struct {
- DB string
+ DB string
}
type TxFeed struct {
func (t *Tracker) Create(ctx context.Context, alias, fil, after string, clientToken string) (*TxFeed, error) {
// Validate the filter.
-/* err := query.ValidateTransactionFilter(fil)
- if err != nil {
- return nil, err
- }
-
- var ptrAlias *string
- if alias != "" {
- ptrAlias = &alias
- }
-
- feed := &TxFeed{
- Alias: ptrAlias,
- Filter: fil,
- After: after,
- }
- return insertTxFeed(ctx, t.DB, feed, clientToken)
-
-*/
- return nil,nil
-}
+ /* err := query.ValidateTransactionFilter(fil)
+ if err != nil {
+ return nil, err
+ }
+ var ptrAlias *string
+ if alias != "" {
+ ptrAlias = &alias
+ }
+
+ feed := &TxFeed{
+ Alias: ptrAlias,
+ Filter: fil,
+ After: after,
+ }
+ return insertTxFeed(ctx, t.DB, feed, clientToken)
+
+ */
+ return nil, nil
+}
// insertTxFeed adds the txfeed to the database. If the txfeed has a client token,
// and there already exists a txfeed with that client token, insertTxFeed will
// lookup and return the existing txfeed instead.
-func insertTxFeed(ctx context.Context,/* db pg.DB,*/ feed *TxFeed, clientToken string) (*TxFeed, error) {
-/* const q = `
- INSERT INTO txfeeds (alias, filter, after, client_token)
- VALUES ($1, $2, $3, $4)
- ON CONFLICT (client_token) DO NOTHING
- RETURNING id
- `
-
- var alias sql.NullString
- if feed.Alias != nil {
- alias = sql.NullString{Valid: true, String: *feed.Alias}
- }
-
- nullToken := sql.NullString{
- String: clientToken,
- Valid: clientToken != "",
- }
-
- err := db.QueryRowContext(
- ctx, q, alias, feed.Filter, feed.After,
- nullToken).Scan(&feed.ID)
-
- if pg.IsUniqueViolation(err) {
- return nil, errors.WithDetail(ErrDuplicateAlias, "a transaction feed with the provided alias already exists")
- } else if err == sql.ErrNoRows && clientToken != "" {
- // There is already a txfeed with the provided client
- // token. We should return the existing txfeed
- feed, err = txfeedByClientToken(ctx, db, clientToken)
- if err != nil {
- return nil, errors.Wrap(err, "retrieving existing txfeed")
- }
- } else if err != nil {
- return nil, err
- }
-*/
-// return feed, nil
- return nil,nil
+func insertTxFeed(ctx context.Context /* db pg.DB,*/, feed *TxFeed, clientToken string) (*TxFeed, error) {
+ /* const q = `
+ INSERT INTO txfeeds (alias, filter, after, client_token)
+ VALUES ($1, $2, $3, $4)
+ ON CONFLICT (client_token) DO NOTHING
+ RETURNING id
+ `
+
+ var alias sql.NullString
+ if feed.Alias != nil {
+ alias = sql.NullString{Valid: true, String: *feed.Alias}
+ }
+
+ nullToken := sql.NullString{
+ String: clientToken,
+ Valid: clientToken != "",
+ }
+
+ err := db.QueryRowContext(
+ ctx, q, alias, feed.Filter, feed.After,
+ nullToken).Scan(&feed.ID)
+
+ if pg.IsUniqueViolation(err) {
+ return nil, errors.WithDetail(ErrDuplicateAlias, "a transaction feed with the provided alias already exists")
+ } else if err == sql.ErrNoRows && clientToken != "" {
+ // There is already a txfeed with the provided client
+ // token. We should return the existing txfeed
+ feed, err = txfeedByClientToken(ctx, db, clientToken)
+ if err != nil {
+ return nil, errors.Wrap(err, "retrieving existing txfeed")
+ }
+ } else if err != nil {
+ return nil, err
+ }
+ */
+ // return feed, nil
+ return nil, nil
}
+func txfeedByClientToken(ctx context.Context /* db pg.DB,*/, clientToken string) (*TxFeed, error) {
+ /* const q = `
+ SELECT id, alias, filter, after
+ FROM txfeeds
+ WHERE client_token=$1
+ `
+
+ var (
+ feed TxFeed
+ alias sql.NullString
+ )
+ err := db.QueryRowContext(ctx, q, clientToken).Scan(&feed.ID, &alias, &feed.Filter, &feed.After)
+ if err != nil {
+ return nil, err
+ }
-func txfeedByClientToken(ctx context.Context,/* db pg.DB,*/ clientToken string) (*TxFeed, error) {
-/* const q = `
- SELECT id, alias, filter, after
- FROM txfeeds
- WHERE client_token=$1
- `
-
- var (
- feed TxFeed
- alias sql.NullString
- )
- err := db.QueryRowContext(ctx, q, clientToken).Scan(&feed.ID, &alias, &feed.Filter, &feed.After)
- if err != nil {
- return nil, err
- }
-
- if alias.Valid {
- feed.Alias = &alias.String
- }
-*/
-// return &feed, nil
- return nil,nil
+ if alias.Valid {
+ feed.Alias = &alias.String
+ }
+ */
+ // return &feed, nil
+ return nil, nil
}
func (t *Tracker) Find(ctx context.Context, id, alias string) (*TxFeed, error) {
-/* var q bytes.Buffer
-
- q.WriteString(`
- SELECT id, alias, filter, after
- FROM txfeeds
- WHERE
- `)
-
- if id != "" {
- q.WriteString(`id=$1`)
- } else {
- q.WriteString(`alias=$1`)
- id = alias
- }
-
- var (
- feed TxFeed
- sqlAlias sql.NullString
- )
-
- err := t.DB.QueryRowContext(ctx, q.String(), id).Scan(&feed.ID, &sqlAlias, &feed.Filter, &feed.After)
- if err == sql.ErrNoRows {
- err = errors.Sub(pg.ErrUserInputNotFound, err)
- err = errors.WithDetailf(err, "alias: %s", alias)
- return nil, err
- }
- if err != nil {
- return nil, err
- }
-
- if sqlAlias.Valid {
- feed.Alias = &sqlAlias.String
- }
-*/
-// return &feed, nil
- return nil,nil
+ /* var q bytes.Buffer
+
+ q.WriteString(`
+ SELECT id, alias, filter, after
+ FROM txfeeds
+ WHERE
+ `)
+
+ if id != "" {
+ q.WriteString(`id=$1`)
+ } else {
+ q.WriteString(`alias=$1`)
+ id = alias
+ }
+
+ var (
+ feed TxFeed
+ sqlAlias sql.NullString
+ )
+
+ err := t.DB.QueryRowContext(ctx, q.String(), id).Scan(&feed.ID, &sqlAlias, &feed.Filter, &feed.After)
+ if err == sql.ErrNoRows {
+ err = errors.Sub(pg.ErrUserInputNotFound, err)
+ err = errors.WithDetailf(err, "alias: %s", alias)
+ return nil, err
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ if sqlAlias.Valid {
+ feed.Alias = &sqlAlias.String
+ }
+ */
+ // return &feed, nil
+ return nil, nil
}
func (t *Tracker) Delete(ctx context.Context, id, alias string) error {
-/* var q bytes.Buffer
-
- q.WriteString(`DELETE FROM txfeeds WHERE `)
-
- if id != "" {
- q.WriteString(`id=$1`)
- } else {
- q.WriteString(`alias=$1`)
- id = alias
- }
-
- res, err := t.DB.ExecContext(ctx, q.String(), id)
- if err != nil {
- return err
- }
-
- affected, err := res.RowsAffected()
- if err != nil {
- return err
- }
-
- if affected == 0 {
- return errors.WithDetailf(pg.ErrUserInputNotFound, "could not find and delete txfeed with id/alias=%s", id)
- }
-*/
+ /* var q bytes.Buffer
+
+ q.WriteString(`DELETE FROM txfeeds WHERE `)
+
+ if id != "" {
+ q.WriteString(`id=$1`)
+ } else {
+ q.WriteString(`alias=$1`)
+ id = alias
+ }
+
+ res, err := t.DB.ExecContext(ctx, q.String(), id)
+ if err != nil {
+ return err
+ }
+
+ affected, err := res.RowsAffected()
+ if err != nil {
+ return err
+ }
+
+ if affected == 0 {
+ return errors.WithDetailf(pg.ErrUserInputNotFound, "could not find and delete txfeed with id/alias=%s", id)
+ }
+ */
return nil
}
func (t *Tracker) Update(ctx context.Context, id, alias, after, prev string) (*TxFeed, error) {
-/* var q bytes.Buffer
-
- q.WriteString(`UPDATE txfeeds SET after=$1 WHERE `)
-
- if id != "" {
- q.WriteString(`id=$2`)
- } else {
- q.WriteString(`alias=$2`)
- id = alias
- }
-
- q.WriteString(` AND after=$3`)
-
- res, err := t.DB.ExecContext(ctx, q.String(), after, id, prev)
- if err != nil {
- return nil, err
- }
-
- affected, err := res.RowsAffected()
- if err != nil {
- return nil, err
- }
-
- if affected == 0 {
- return nil, errors.WithDetailf(pg.ErrUserInputNotFound, "could not find txfeed with id/alias=%s and prev=%s", id, prev)
- }
-
- return &TxFeed{
- ID: id,
- Alias: &alias,
- After: after,
- }, nil
-*/
-/* return &TxFeed{
- ID: nil,
- Alias nil,
- After nil,
- }
-*/ return nil,nil
-}
+ /* var q bytes.Buffer
+
+ q.WriteString(`UPDATE txfeeds SET after=$1 WHERE `)
+ if id != "" {
+ q.WriteString(`id=$2`)
+ } else {
+ q.WriteString(`alias=$2`)
+ id = alias
+ }
+
+ q.WriteString(` AND after=$3`)
+
+ res, err := t.DB.ExecContext(ctx, q.String(), after, id, prev)
+ if err != nil {
+ return nil, err
+ }
+
+ affected, err := res.RowsAffected()
+ if err != nil {
+ return nil, err
+ }
+
+ if affected == 0 {
+ return nil, errors.WithDetailf(pg.ErrUserInputNotFound, "could not find txfeed with id/alias=%s and prev=%s", id, prev)
+ }
+
+ return &TxFeed{
+ ID: id,
+ Alias: &alias,
+ After: after,
+ }, nil
+ */
+ /* return &TxFeed{
+ ID: nil,
+ Alias nil,
+ After nil,
+ }
+ */return nil, nil
+}
import (
"context"
-// "fmt"
-// "math"
+ // "fmt"
+ // "math"
"github.com/bytom/blockchain/query"
"github.com/bytom/blockchain/txfeed"
// with the same client_token will only create one txfeed.
ClientToken string `json:"client_token"`
}) (*txfeed.TxFeed, error) {
- log.Printf(ctx,"-------createTxFeed-------")
-// after := fmt.Sprintf("%d:%d-%d", a.chain.Height(), math.MaxInt32, uint64(math.MaxInt64))
-// return a.txFeeds.Create(ctx, in.Alias, in.Filter, after, in.ClientToken)
- return nil,nil
+ log.Printf(ctx, "-------createTxFeed-------")
+ // after := fmt.Sprintf("%d:%d-%d", a.chain.Height(), math.MaxInt32, uint64(math.MaxInt64))
+ // return a.txFeeds.Create(ctx, in.Alias, in.Filter, after, in.ClientToken)
+ return nil, nil
}
// POST /get-transaction-feed
ID string `json:"id,omitempty"`
Alias string `json:"alias,omitempty"`
}) (*txfeed.TxFeed, error) {
- log.Printf(ctx,"-------getTxFeed-------")
-// return a.txFeeds.Find(ctx, in.ID, in.Alias)
- return nil,nil
+ log.Printf(ctx, "-------getTxFeed-------")
+ // return a.txFeeds.Find(ctx, in.ID, in.Alias)
+ return nil, nil
}
// POST /delete-transaction-feed
ID string `json:"id,omitempty"`
Alias string `json:"alias,omitempty"`
}) error {
- log.Printf(ctx,"-------deleteTxFeed-------")
-// return a.txFeeds.Delete(ctx, in.ID, in.Alias)
+ log.Printf(ctx, "-------deleteTxFeed-------")
+ // return a.txFeeds.Delete(ctx, in.ID, in.Alias)
return nil
}
Prev string `json:"previous_after"`
After string `json:"after"`
}) (*txfeed.TxFeed, error) {
- log.Printf(ctx,"-------updateTxFeed-------")
+ log.Printf(ctx, "-------updateTxFeed-------")
// TODO(tessr): Consider moving this function into the txfeed package.
// (It's currently outside the txfeed package to avoid a dependecy cycle
// between txfeed and query.)
if bad {
return nil, errors.WithDetail(httpjson.ErrBadRequest, "new After cannot be before Prev")
}
-// return a.txFeeds.Update(ctx, in.ID, in.Alias, in.After, in.Prev)
- return nil,nil
+ // return a.txFeeds.Update(ctx, in.ID, in.Alias, in.After, in.Prev)
+ return nil, nil
}
// txAfterIsBefore returns true if a is before b. It returns an error if either
(aAfter.FromBlockHeight == bAfter.FromBlockHeight &&
aAfter.FromPosition < bAfter.FromPosition), nil
}
-
cmn "github.com/tendermint/tmlibs/common"
)
-var initFilesCmd = &cobra.Command {
+var initFilesCmd = &cobra.Command{
Use: "init",
Short: "Initialize blockchain",
Run: initFiles,
if _, err := os.Stat(genFile); os.IsNotExist(err) {
genDoc := types.GenesisDoc{
- ChainID: cmn.Fmt("bytom"),
+ ChainID: cmn.Fmt("bytom"),
PrivateKey: "27F82582AEFAE7AB151CFB01C48BB6C1A0DA78F9BDDA979A9F70A84D074EB07D3B3069C422E19688B45CBFAE7BB009FC0FA1B1EA86593519318B7214853803C8",
}
genDoc.SaveAs(genFile)
package example
import (
- "context"
- stdjson "encoding/json"
- "encoding/hex"
- "fmt"
- _"time"
+ "context"
+ "encoding/hex"
+ stdjson "encoding/json"
+ "fmt"
+ _ "time"
- bc "github.com/bytom/blockchain"
- "github.com/bytom/blockchain/query"
- "github.com/bytom/blockchain/rpc"
- "github.com/bytom/blockchain/txbuilder"
+ bc "github.com/bytom/blockchain"
+ "github.com/bytom/blockchain/query"
+ "github.com/bytom/blockchain/rpc"
+ "github.com/bytom/blockchain/txbuilder"
- "github.com/bytom/crypto/ed25519/chainkd"
- _"github.com/bytom/encoding/json"
+ "github.com/bytom/crypto/ed25519/chainkd"
+ _ "github.com/bytom/encoding/json"
)
// TO DO: issue a asset to a account.
func WalletTest(client *rpc.Client, args []string) {
- // Create Account.
- /*
- fmt.Printf("To create Account:\n")
- xprv, _ := chainkd.NewXPrv(nil)
- xpub := xprv.XPub()
- fmt.Printf("xprv_account:%v\n", xprv)
- fmt.Printf("xpub_account:%v\n", xpub)
- */
- xpub_str := "7ed0a605bf967c6329c29dff17b924ddfb05e1c09d8dc993309919e82a817a87f11b4f2b3464eb7303b74ec0a41604f3691bfd39bfd59c8ab6f85333bc7d127f"
- xpub_account := new(chainkd.XPub)
- data, err := hex.DecodeString(xpub_str)
- if err != nil {
- fmt.Printf("error: %v", err)
- }
- copy(xpub_account[:], data)
+ // Create Account.
+ /*
+ fmt.Printf("To create Account:\n")
+ xprv, _ := chainkd.NewXPrv(nil)
+ xpub := xprv.XPub()
+ fmt.Printf("xprv_account:%v\n", xprv)
+ fmt.Printf("xpub_account:%v\n", xpub)
+ */
+ xpub_str := "7ed0a605bf967c6329c29dff17b924ddfb05e1c09d8dc993309919e82a817a87f11b4f2b3464eb7303b74ec0a41604f3691bfd39bfd59c8ab6f85333bc7d127f"
+ xpub_account := new(chainkd.XPub)
+ data, err := hex.DecodeString(xpub_str)
+ if err != nil {
+ fmt.Printf("error: %v", err)
+ }
+ copy(xpub_account[:], data)
- type Ins struct {
- RootXPubs []chainkd.XPub `json:"root_xpubs"`
- Quorum int
- Alias string
- Tags map[string]interface{}
- ClientToken string `json:"client_token"`
- }
- var ins Ins
- ins.RootXPubs = []chainkd.XPub{*xpub_account}
- ins.Quorum = 1
- ins.Alias = "alice"
- ins.Tags = map[string]interface{}{"test_tag": "v0"}
- ins.ClientToken = "account"
- account := make([]query.AnnotatedAccount, 1)
- client.Call(context.Background(), "/create-account", &[]Ins{ins}, &account)
- fmt.Printf("account:%v\n", account)
+ type Ins struct {
+ RootXPubs []chainkd.XPub `json:"root_xpubs"`
+ Quorum int
+ Alias string
+ Tags map[string]interface{}
+ ClientToken string `json:"client_token"`
+ }
+ var ins Ins
+ ins.RootXPubs = []chainkd.XPub{*xpub_account}
+ ins.Quorum = 1
+ ins.Alias = "alice"
+ ins.Tags = map[string]interface{}{"test_tag": "v0"}
+ ins.ClientToken = "account"
+ account := make([]query.AnnotatedAccount, 1)
+ client.Call(context.Background(), "/create-account", &[]Ins{ins}, &account)
+ fmt.Printf("account:%v\n", account)
- // Create Asset.
- fmt.Printf("To create Asset:\n")
- xprv_asset, _ := chainkd.NewXPrv(nil)
- xpub_asset := xprv_asset.XPub()
- fmt.Printf("xprv_asset:%v\n", xprv_asset)
- fmt.Printf("xpub_asset:%v\n", xpub_asset)
- type Ins_asset struct {
- RootXPubs []chainkd.XPub `json:"root_xpubs"`
- Quorum int
- Alias string
- Tags map[string]interface{}
- Definition map[string]interface{}
- ClientToken string `json:"client_token"`
- }
- var ins_asset Ins_asset
- ins_asset.RootXPubs = []chainkd.XPub{xpub_asset}
- ins_asset.Quorum = 1
- ins_asset.Alias = "gold"
- ins_asset.Tags = map[string]interface{}{"test_tag": "v0"}
- ins_asset.Definition = map[string]interface{}{"test_definition": "v0"}
- ins_asset.ClientToken = "asset"
- asset := make([]query.AnnotatedAsset, 1)
- client.Call(context.Background(), "/create-asset", &[]Ins_asset{ins_asset}, &asset)
- fmt.Printf("asset:%v\n", asset)
+ // Create Asset.
+ fmt.Printf("To create Asset:\n")
+ xprv_asset, _ := chainkd.NewXPrv(nil)
+ xpub_asset := xprv_asset.XPub()
+ fmt.Printf("xprv_asset:%v\n", xprv_asset)
+ fmt.Printf("xpub_asset:%v\n", xpub_asset)
+ type Ins_asset struct {
+ RootXPubs []chainkd.XPub `json:"root_xpubs"`
+ Quorum int
+ Alias string
+ Tags map[string]interface{}
+ Definition map[string]interface{}
+ ClientToken string `json:"client_token"`
+ }
+ var ins_asset Ins_asset
+ ins_asset.RootXPubs = []chainkd.XPub{xpub_asset}
+ ins_asset.Quorum = 1
+ ins_asset.Alias = "gold"
+ ins_asset.Tags = map[string]interface{}{"test_tag": "v0"}
+ ins_asset.Definition = map[string]interface{}{"test_definition": "v0"}
+ ins_asset.ClientToken = "asset"
+ asset := make([]query.AnnotatedAsset, 1)
+ client.Call(context.Background(), "/create-asset", &[]Ins_asset{ins_asset}, &asset)
+ fmt.Printf("asset:%v\n", asset)
- // Build Transaction.
- fmt.Printf("To build transaction:\n")
- // Now Issue actions
- buildReqFmt := `
+ // Build Transaction.
+ fmt.Printf("To build transaction:\n")
+ // Now Issue actions
+ buildReqFmt := `
{"actions": [
{"type": "issue", "asset_id": "%s", "amount": 100},
{"type": "control_account", "asset_id": "%s", "amount": 100, "account_id": "%s"}
]}`
- buildReqStr := fmt.Sprintf(buildReqFmt, asset[0].ID.String(), asset[0].ID.String(), account[0].ID)
- var buildReq bc.BuildRequest
- err = stdjson.Unmarshal([]byte(buildReqStr), &buildReq)
- if err != nil {
- fmt.Printf("json Unmarshal error.")
- }
+ buildReqStr := fmt.Sprintf(buildReqFmt, asset[0].ID.String(), asset[0].ID.String(), account[0].ID)
+ var buildReq bc.BuildRequest
+ err = stdjson.Unmarshal([]byte(buildReqStr), &buildReq)
+ if err != nil {
+ fmt.Printf("json Unmarshal error.")
+ }
- tpl := make([]txbuilder.Template, 1)
- client.Call(context.Background(), "/build-transaction", []*bc.BuildRequest{&buildReq}, &tpl)
- fmt.Printf("tpl:%v\n", tpl)
+ tpl := make([]txbuilder.Template, 1)
+ client.Call(context.Background(), "/build-transaction", []*bc.BuildRequest{&buildReq}, &tpl)
+ fmt.Printf("tpl:%v\n", tpl)
- // sign-transaction
- var response interface{}
- type Ins_tx struct {
- Auth string
- Txs []*txbuilder.Template `json:"transactions"`
- XPubs []chainkd.XPub `json:"xpubs"`
- }
- var tx Ins_tx
- tx.Auth = "langyu"
- tx.Txs = []*txbuilder.Template{&tpl[0]}
- tx.XPubs = []chainkd.XPub{*xpub_account}
+ // sign-transaction
+ var response interface{}
+ type Ins_tx struct {
+ Auth string
+ Txs []*txbuilder.Template `json:"transactions"`
+ XPubs []chainkd.XPub `json:"xpubs"`
+ }
+ var tx Ins_tx
+ tx.Auth = "langyu"
+ tx.Txs = []*txbuilder.Template{&tpl[0]}
+ tx.XPubs = []chainkd.XPub{*xpub_account}
- client.Call(context.Background(), "/sign-transactions", &tx, &response)
- /*err = txbuilder.Sign(context.Background(), &tpl[0], []chainkd.XPub{xprv_asset.XPub()}, func(_ context.Context, _ chainkd.XPub, path [][]byte, data [32]byte) ([]byte, error) {
- derived := xprv_asset.Derive(path)
- return derived.Sign(data[:]), nil
- })
- */
- fmt.Printf("response %v\n", response)
- if err != nil {
- fmt.Printf("sign-transaction error. err:%v\n", err)
- }
- fmt.Printf("sign tpl:%v\n", tpl[0])
- fmt.Printf("sign tpl's SigningInstructions:%v\n", tpl[0].SigningInstructions[0])
- fmt.Printf("SigningInstructions's SignatureWitnesses:%v\n", tpl[0].SigningInstructions[0].SignatureWitnesses[0])
+ client.Call(context.Background(), "/sign-transactions", &tx, &response)
+ /*err = txbuilder.Sign(context.Background(), &tpl[0], []chainkd.XPub{xprv_asset.XPub()}, func(_ context.Context, _ chainkd.XPub, path [][]byte, data [32]byte) ([]byte, error) {
+ derived := xprv_asset.Derive(path)
+ return derived.Sign(data[:]), nil
+ })
+ */
+ fmt.Printf("response %v\n", response)
+ if err != nil {
+ fmt.Printf("sign-transaction error. err:%v\n", err)
+ }
+ fmt.Printf("sign tpl:%v\n", tpl[0])
+ fmt.Printf("sign tpl's SigningInstructions:%v\n", tpl[0].SigningInstructions[0])
+ fmt.Printf("SigningInstructions's SignatureWitnesses:%v\n", tpl[0].SigningInstructions[0].SignatureWitnesses[0])
- // submit-transaction
- /*
- var submitResponse interface{}
- submitArg := bc.SubmitArg{tpl, json.Duration{time.Duration(1000000)}, "none"}
- client.Call(context.Background(), "/submit-transaction", submitArg, &submitResponse)
- fmt.Printf("submit transaction:%v\n", submitResponse)
- */
-}
\ No newline at end of file
+ // submit-transaction
+ /*
+ var submitResponse interface{}
+ submitArg := bc.SubmitArg{tpl, json.Duration{time.Duration(1000000)}, "none"}
+ client.Call(context.Background(), "/submit-transaction", submitArg, &submitResponse)
+ fmt.Printf("submit transaction:%v\n", submitResponse)
+ */
+}
import (
"bytes"
"context"
- "strconv"
"encoding/hex"
stdjson "encoding/json"
"flag"
"net/http"
"os"
"path/filepath"
+ "strconv"
"strings"
"time"
"github.com/bytom/blockchain"
"github.com/bytom/blockchain/query"
"github.com/bytom/blockchain/rpc"
+ "github.com/bytom/blockchain/txbuilder"
"github.com/bytom/cmd/bytomcli/example"
"github.com/bytom/crypto/ed25519"
"github.com/bytom/crypto/ed25519/chainkd"
"github.com/bytom/encoding/json"
- "github.com/bytom/blockchain/txbuilder"
"github.com/bytom/env"
"github.com/bytom/errors"
"github.com/bytom/log"
"delete-transaction-feed": {deleteTxFeed},
"issue-test": {example.IssueTest},
"spend-test": {example.SpendTest},
- "wallet-test": {example.WalletTest},
+ "wallet-test": {example.WalletTest},
"create-access-token": {createAccessToken},
"list-access-token": {listAccessTokens},
"delete-access-token": {deleteAccessToken},
- "create-key": {createKey},
- "list-keys": {listKeys},
- "delete-key": {deleteKey},
+ "create-key": {createKey},
+ "list-keys": {listKeys},
+ "delete-key": {deleteKey},
"sign-transactions": {signTransactions},
"sub-create-issue-tx": {submitCreateIssueTransaction},
- "reset-password": {resetPassword},
- "update-alias": {updateAlias},
+ "reset-password": {resetPassword},
+ "update-alias": {updateAlias},
}
func main() {
}
var in requestQuery
-
responses := make([]interface{}, 0)
client.Call(context.Background(), "/list-accounts", in, &responses)
- if len(responses)>0{
- for i,item := range responses{
- fmt.Println(i,"-----",item)
+ if len(responses) > 0 {
+ for i, item := range responses {
+ fmt.Println(i, "-----", item)
}
}
}
responses := make([]interface{}, 0)
client.Call(context.Background(), "/list-assets", in, &responses)
- if len(responses)>0{
- for i,item := range responses{
- fmt.Println(i,"-----",item)
+ if len(responses) > 0 {
+ for i, item := range responses {
+ fmt.Println(i, "-----", item)
}
}
}
responses := make([]interface{}, 0)
client.Call(context.Background(), "/list-balances", in, &responses)
- if len(responses)>0{
- for i,item := range responses{
- fmt.Println(i,"-----",item)
+ if len(responses) > 0 {
+ for i, item := range responses {
+ fmt.Println(i, "-----", item)
}
}
}
client.Call(context.Background(), "/delete-access-token", &[]Token{token}, nil)
}
-
func createKey(client *rpc.Client, args []string) {
if len(args) != 2 {
fatalln("error: createKey args not vaild")
}
type Key struct {
- Alias string
- Password string
+ Alias string
+ Password string
}
var key Key
var response map[string]interface{}
- key.Alias = args[0]
+ key.Alias = args[0]
key.Password = args[1]
client.Call(context.Background(), "/create-key", &key, &response)
fatalln("error: deleteKey args not vaild")
}
type Key struct {
- Password string
- XPub chainkd.XPub `json:"xpubs"`
+ Password string
+ XPub chainkd.XPub `json:"xpubs"`
}
var key Key
xpub := new(chainkd.XPub)
fatalln("error: deletKey %v", err)
}
copy(xpub[:], data)
- key.Password = args[0]
- key.XPub= *xpub
+ key.Password = args[0]
+ key.XPub = *xpub
client.Call(context.Background(), "/delete-key", &key, nil)
}
-
func listKeys(client *rpc.Client, args []string) {
if len(args) != 2 {
fatalln("error: listKeys args not vaild")
in.PageSize, _ = strconv.Atoi(args[1])
var response map[string][]interface{}
client.Call(context.Background(), "/list-keys", &in, &response)
- for i, item := range response["items"]{
+ for i, item := range response["items"] {
key := item.(map[string]interface{})
fmt.Printf("---No.%v Alias:%v Address:%v File:%v\n", i, key["alias"], key["address"], key["file"])
}
fatalln("error: resetpassword args not vaild")
}
type Key struct {
- OldPassword string
- NewPassword string
- XPub chainkd.XPub `json:"xpubs"`
+ OldPassword string
+ NewPassword string
+ XPub chainkd.XPub `json:"xpubs"`
}
var key Key
xpub := new(chainkd.XPub)
fatalln("error: resetPassword %v", err)
}
copy(xpub[:], data)
- key.OldPassword = args[0]
- key.NewPassword = args[1]
- key.XPub= *xpub
+ key.OldPassword = args[0]
+ key.NewPassword = args[1]
+ key.XPub = *xpub
client.Call(context.Background(), "/reset-password", &key, nil)
}
fatalln("error: resetpassword args not vaild")
}
type Key struct {
- Password string
- NewAlias string
- XPub chainkd.XPub `json:"xpubs"`
+ Password string
+ NewAlias string
+ XPub chainkd.XPub `json:"xpubs"`
}
var key Key
xpub := new(chainkd.XPub)
fatalln("error: resetPassword %v", err)
}
copy(xpub[:], data)
- key.Password = args[0]
- key.NewAlias = args[1]
- key.XPub= *xpub
+ key.Password = args[0]
+ key.NewAlias = args[1]
+ key.XPub = *xpub
client.Call(context.Background(), "/update-alias", &key, nil)
}
package common
import (
- "bytes"
- "fmt"
- "strings"
+ "bytes"
+ "fmt"
+ "strings"
)
var charset = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
var generator = []int{0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3}
func polymod(values []int) int {
- chk := 1
- for _, v := range values {
- top := chk >> 25
- chk = (chk&0x1ffffff)<<5 ^ v
- for i := 0; i < 5; i++ {
- if (top>>uint(i))&1 == 1 {
- chk ^= generator[i]
- }
- }
- }
- return chk
+ chk := 1
+ for _, v := range values {
+ top := chk >> 25
+ chk = (chk&0x1ffffff)<<5 ^ v
+ for i := 0; i < 5; i++ {
+ if (top>>uint(i))&1 == 1 {
+ chk ^= generator[i]
+ }
+ }
+ }
+ return chk
}
func hrpExpand(hrp string) []int {
- ret := []int{}
- for _, c := range hrp {
- ret = append(ret, int(c>>5))
- }
- ret = append(ret, 0)
- for _, c := range hrp {
- ret = append(ret, int(c&31))
- }
- return ret
+ ret := []int{}
+ for _, c := range hrp {
+ ret = append(ret, int(c>>5))
+ }
+ ret = append(ret, 0)
+ for _, c := range hrp {
+ ret = append(ret, int(c&31))
+ }
+ return ret
}
func verifyChecksum(hrp string, data []int) bool {
- return polymod(append(hrpExpand(hrp), data...)) == 1
+ return polymod(append(hrpExpand(hrp), data...)) == 1
}
func createChecksum(hrp string, data []int) []int {
- values := append(append(hrpExpand(hrp), data...), []int{0, 0, 0, 0, 0, 0}...)
- mod := polymod(values) ^ 1
- ret := make([]int, 6)
- for p := 0; p < len(ret); p++ {
- ret[p] = (mod >> uint(5*(5-p))) & 31
- }
- return ret
+ values := append(append(hrpExpand(hrp), data...), []int{0, 0, 0, 0, 0, 0}...)
+ mod := polymod(values) ^ 1
+ ret := make([]int, 6)
+ for p := 0; p < len(ret); p++ {
+ ret[p] = (mod >> uint(5*(5-p))) & 31
+ }
+ return ret
}
// Encode encodes hrp(human-readable part) and data(32bit data array), returns Bech32 / or error
// if hrp is uppercase, return uppercase Bech32
func Encode(hrp string, data []int) (string, error) {
- if (len(hrp) + len(data) + 7) > 90 {
- return "", fmt.Errorf("too long : hrp length=%d, data length=%d", len(hrp), len(data))
- }
- if len(hrp) < 1 {
- return "", fmt.Errorf("invalid hrp : hrp=%v", hrp)
- }
- for p, c := range hrp {
- if c < 33 || c > 126 {
- return "", fmt.Errorf("invalid character human-readable part : hrp[%d]=%d", p, c)
- }
- }
- if strings.ToUpper(hrp) != hrp && strings.ToLower(hrp) != hrp {
- return "", fmt.Errorf("mix case : hrp=%v", hrp)
- }
- lower := strings.ToLower(hrp) == hrp
- hrp = strings.ToLower(hrp)
- combined := append(data, createChecksum(hrp, data)...)
- var ret bytes.Buffer
- ret.WriteString(hrp)
- ret.WriteString("1")
- for idx, p := range combined {
- if p < 0 || p >= len(charset) {
- return "", fmt.Errorf("invalid data : data[%d]=%d", idx, p)
- }
- ret.WriteByte(charset[p])
- }
- if lower {
- return ret.String(), nil
- }
- return strings.ToUpper(ret.String()), nil
+ if (len(hrp) + len(data) + 7) > 90 {
+ return "", fmt.Errorf("too long : hrp length=%d, data length=%d", len(hrp), len(data))
+ }
+ if len(hrp) < 1 {
+ return "", fmt.Errorf("invalid hrp : hrp=%v", hrp)
+ }
+ for p, c := range hrp {
+ if c < 33 || c > 126 {
+ return "", fmt.Errorf("invalid character human-readable part : hrp[%d]=%d", p, c)
+ }
+ }
+ if strings.ToUpper(hrp) != hrp && strings.ToLower(hrp) != hrp {
+ return "", fmt.Errorf("mix case : hrp=%v", hrp)
+ }
+ lower := strings.ToLower(hrp) == hrp
+ hrp = strings.ToLower(hrp)
+ combined := append(data, createChecksum(hrp, data)...)
+ var ret bytes.Buffer
+ ret.WriteString(hrp)
+ ret.WriteString("1")
+ for idx, p := range combined {
+ if p < 0 || p >= len(charset) {
+ return "", fmt.Errorf("invalid data : data[%d]=%d", idx, p)
+ }
+ ret.WriteByte(charset[p])
+ }
+ if lower {
+ return ret.String(), nil
+ }
+ return strings.ToUpper(ret.String()), nil
}
// Decode decodes bechString(Bech32) returns hrp(human-readable part) and data(32bit data array) / or error
func Decode(bechString string) (string, []int, error) {
- if len(bechString) > 90 {
- return "", nil, fmt.Errorf("too long : len=%d", len(bechString))
- }
- if strings.ToLower(bechString) != bechString && strings.ToUpper(bechString) != bechString {
- return "", nil, fmt.Errorf("mixed case")
- }
- bechString = strings.ToLower(bechString)
- pos := strings.LastIndex(bechString, "1")
- if pos < 1 || pos+7 > len(bechString) {
- return "", nil, fmt.Errorf("separator '1' at invalid position : pos=%d , len=%d", pos, len(bechString))
- }
- hrp := bechString[0:pos]
- for p, c := range hrp {
- if c < 33 || c > 126 {
- return "", nil, fmt.Errorf("invalid character human-readable part : bechString[%d]=%d", p, c)
- }
- }
- data := []int{}
- for p := pos + 1; p < len(bechString); p++ {
- d := strings.Index(charset, fmt.Sprintf("%c", bechString[p]))
- if d == -1 {
- return "", nil, fmt.Errorf("invalid character data part : bechString[%d]=%d", p, bechString[p])
- }
- data = append(data, d)
- }
- if !verifyChecksum(hrp, data) {
- return "", nil, fmt.Errorf("invalid checksum")
- }
- return hrp, data[:len(data)-6], nil
+ if len(bechString) > 90 {
+ return "", nil, fmt.Errorf("too long : len=%d", len(bechString))
+ }
+ if strings.ToLower(bechString) != bechString && strings.ToUpper(bechString) != bechString {
+ return "", nil, fmt.Errorf("mixed case")
+ }
+ bechString = strings.ToLower(bechString)
+ pos := strings.LastIndex(bechString, "1")
+ if pos < 1 || pos+7 > len(bechString) {
+ return "", nil, fmt.Errorf("separator '1' at invalid position : pos=%d , len=%d", pos, len(bechString))
+ }
+ hrp := bechString[0:pos]
+ for p, c := range hrp {
+ if c < 33 || c > 126 {
+ return "", nil, fmt.Errorf("invalid character human-readable part : bechString[%d]=%d", p, c)
+ }
+ }
+ data := []int{}
+ for p := pos + 1; p < len(bechString); p++ {
+ d := strings.Index(charset, fmt.Sprintf("%c", bechString[p]))
+ if d == -1 {
+ return "", nil, fmt.Errorf("invalid character data part : bechString[%d]=%d", p, bechString[p])
+ }
+ data = append(data, d)
+ }
+ if !verifyChecksum(hrp, data) {
+ return "", nil, fmt.Errorf("invalid checksum")
+ }
+ return hrp, data[:len(data)-6], nil
}
func convertbits(data []int, frombits, tobits uint, pad bool) ([]int, error) {
- acc := 0
- bits := uint(0)
- ret := []int{}
- maxv := (1 << tobits) - 1
- for idx, value := range data {
- if value < 0 || (value>>frombits) != 0 {
- return nil, fmt.Errorf("invalid data range : data[%d]=%d (frombits=%d)", idx, value, frombits)
- }
- acc = (acc << frombits) | value
- bits += frombits
- for bits >= tobits {
- bits -= tobits
- ret = append(ret, (acc>>bits)&maxv)
- }
- }
- if pad {
- if bits > 0 {
- ret = append(ret, (acc<<(tobits-bits))&maxv)
- }
- } else if bits >= frombits {
- return nil, fmt.Errorf("illegal zero padding")
- } else if ((acc << (tobits - bits)) & maxv) != 0 {
- return nil, fmt.Errorf("non-zero padding")
- }
- return ret, nil
+ acc := 0
+ bits := uint(0)
+ ret := []int{}
+ maxv := (1 << tobits) - 1
+ for idx, value := range data {
+ if value < 0 || (value>>frombits) != 0 {
+ return nil, fmt.Errorf("invalid data range : data[%d]=%d (frombits=%d)", idx, value, frombits)
+ }
+ acc = (acc << frombits) | value
+ bits += frombits
+ for bits >= tobits {
+ bits -= tobits
+ ret = append(ret, (acc>>bits)&maxv)
+ }
+ }
+ if pad {
+ if bits > 0 {
+ ret = append(ret, (acc<<(tobits-bits))&maxv)
+ }
+ } else if bits >= frombits {
+ return nil, fmt.Errorf("illegal zero padding")
+ } else if ((acc << (tobits - bits)) & maxv) != 0 {
+ return nil, fmt.Errorf("non-zero padding")
+ }
+ return ret, nil
}
// AddressDecode decodes hrp(human-readable part) Address(string), returns version(int) and data(bytes array) / or error
func AddressDecode(hrp, addr string) (int, []int, error) {
- dechrp, data, err := Decode(addr)
- if err != nil {
- return -1, nil, err
- }
- if dechrp != hrp {
- return -1, nil, fmt.Errorf("invalid human-readable part : %s != %s", hrp, dechrp)
- }
- if len(data) < 1 {
- return -1, nil, fmt.Errorf("invalid decode data length : %d", len(data))
- }
- if data[0] > 16 {
- return -1, nil, fmt.Errorf("invalid address version : %d", data[0])
- }
- res, err := convertbits(data[1:], 5, 8, false)
- if err != nil {
- return -1, nil, err
- }
- if len(res) < 2 || len(res) > 40 {
- return -1, nil, fmt.Errorf("invalid convertbits length : %d", len(res))
- }
- if data[0] == 0 && len(res) != 20 && len(res) != 32 {
- return -1, nil, fmt.Errorf("invalid program length for witness version 0 (per BIP141) : %d", len(res))
- }
- return data[0], res, nil
+ dechrp, data, err := Decode(addr)
+ if err != nil {
+ return -1, nil, err
+ }
+ if dechrp != hrp {
+ return -1, nil, fmt.Errorf("invalid human-readable part : %s != %s", hrp, dechrp)
+ }
+ if len(data) < 1 {
+ return -1, nil, fmt.Errorf("invalid decode data length : %d", len(data))
+ }
+ if data[0] > 16 {
+ return -1, nil, fmt.Errorf("invalid address version : %d", data[0])
+ }
+ res, err := convertbits(data[1:], 5, 8, false)
+ if err != nil {
+ return -1, nil, err
+ }
+ if len(res) < 2 || len(res) > 40 {
+ return -1, nil, fmt.Errorf("invalid convertbits length : %d", len(res))
+ }
+ if data[0] == 0 && len(res) != 20 && len(res) != 32 {
+ return -1, nil, fmt.Errorf("invalid program length for witness version 0 (per BIP141) : %d", len(res))
+ }
+ return data[0], res, nil
}
// AddressEncode encodes hrp(human-readable part) , version(int) and data(bytes array), returns Address / or error
func AddressEncode(hrp string, version int, pubkey []int) (string, error) {
- if version < 0 || version > 16 {
- return "", fmt.Errorf("invalid version : %d", version)
- }
- if len(pubkey) < 2 || len(pubkey) > 40 {
- return "", fmt.Errorf("invalid pubkey hash length : %d", len(pubkey))
- }
- if version == 0 && len(pubkey) != 20 && len(pubkey) != 32 {
- return "", fmt.Errorf("invalid program length for witness version 0 (per BIP141) : %d", len(pubkey))
- }
- data, err := convertbits(pubkey, 8, 5, true)
- if err != nil {
- return "", err
- }
- ret, err := Encode(hrp, append([]int{version}, data...))
- if err != nil {
- return "", err
- }
- return ret, nil
-}
\ No newline at end of file
+ if version < 0 || version > 16 {
+ return "", fmt.Errorf("invalid version : %d", version)
+ }
+ if len(pubkey) < 2 || len(pubkey) > 40 {
+ return "", fmt.Errorf("invalid pubkey hash length : %d", len(pubkey))
+ }
+ if version == 0 && len(pubkey) != 20 && len(pubkey) != 32 {
+ return "", fmt.Errorf("invalid program length for witness version 0 (per BIP141) : %d", len(pubkey))
+ }
+ data, err := convertbits(pubkey, 8, 5, true)
+ if err != nil {
+ return "", err
+ }
+ ret, err := Encode(hrp, append([]int{version}, data...))
+ if err != nil {
+ return "", err
+ }
+ return ret, nil
+}
package common
import (
- "reflect"
- "strings"
- "testing"
+ "reflect"
+ "strings"
+ "testing"
)
func ScriptPubkey(version int, program []int) []int {
- if version != 0 {
- version += 0x50
- }
- return append(append([]int{version}, len(program)), program...)
+ if version != 0 {
+ version += 0x50
+ }
+ return append(append([]int{version}, len(program)), program...)
}
var validChecksum = []string{
- "A12UEL5L",
- "an83characterlonghumanreadablepartthatcontainsthenumber1andtheexcludedcharactersbio1tt5tgs",
- "abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
- "11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc8247j",
- "split1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
+ "A12UEL5L",
+ "an83characterlonghumanreadablepartthatcontainsthenumber1andtheexcludedcharactersbio1tt5tgs",
+ "abcdef1qpzry9x8gf2tvdw0s3jn54khce6mua7lmqqqxw",
+ "11qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc8247j",
+ "split1checkupstagehandshakeupstreamerranterredcaperred2y9e3w",
}
type item struct {
- address string
- scriptpubkey []int
+ address string
+ scriptpubkey []int
}
var validAddress = []item{
- item{"BC1QW508D6QEJXTDG4Y5R3ZARVARY0C5XW7KV8F3T4",
- []int{
- 0x00, 0x14, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,
- 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6,
- },
- },
- item{"tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7",
- []int{
- 0x00, 0x20, 0x18, 0x63, 0x14, 0x3c, 0x14, 0xc5, 0x16, 0x68, 0x04,
- 0xbd, 0x19, 0x20, 0x33, 0x56, 0xda, 0x13, 0x6c, 0x98, 0x56, 0x78,
- 0xcd, 0x4d, 0x27, 0xa1, 0xb8, 0xc6, 0x32, 0x96, 0x04, 0x90, 0x32,
- 0x62,
- },
- },
- item{"bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx",
- []int{
- 0x51, 0x28, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,
- 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6,
- 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c,
- 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6,
- },
- },
- item{"BC1SW50QA3JX3S",
- []int{
- 0x60, 0x02, 0x75, 0x1e,
- },
- },
- item{"bc1zw508d6qejxtdg4y5r3zarvaryvg6kdaj",
- []int{
- 0x52, 0x10, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,
- 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23,
- },
- },
- item{"tb1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesrxh6hy",
- []int{
- 0x00, 0x20, 0x00, 0x00, 0x00, 0xc4, 0xa5, 0xca, 0xd4, 0x62, 0x21,
- 0xb2, 0xa1, 0x87, 0x90, 0x5e, 0x52, 0x66, 0x36, 0x2b, 0x99, 0xd5,
- 0xe9, 0x1c, 0x6c, 0xe2, 0x4d, 0x16, 0x5d, 0xab, 0x93, 0xe8, 0x64,
- 0x33,
- },
- },
+ item{"BC1QW508D6QEJXTDG4Y5R3ZARVARY0C5XW7KV8F3T4",
+ []int{
+ 0x00, 0x14, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,
+ 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6,
+ },
+ },
+ item{"tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7",
+ []int{
+ 0x00, 0x20, 0x18, 0x63, 0x14, 0x3c, 0x14, 0xc5, 0x16, 0x68, 0x04,
+ 0xbd, 0x19, 0x20, 0x33, 0x56, 0xda, 0x13, 0x6c, 0x98, 0x56, 0x78,
+ 0xcd, 0x4d, 0x27, 0xa1, 0xb8, 0xc6, 0x32, 0x96, 0x04, 0x90, 0x32,
+ 0x62,
+ },
+ },
+ item{"bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx",
+ []int{
+ 0x51, 0x28, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,
+ 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6,
+ 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c,
+ 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6,
+ },
+ },
+ item{"BC1SW50QA3JX3S",
+ []int{
+ 0x60, 0x02, 0x75, 0x1e,
+ },
+ },
+ item{"bc1zw508d6qejxtdg4y5r3zarvaryvg6kdaj",
+ []int{
+ 0x52, 0x10, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54,
+ 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23,
+ },
+ },
+ item{"tb1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesrxh6hy",
+ []int{
+ 0x00, 0x20, 0x00, 0x00, 0x00, 0xc4, 0xa5, 0xca, 0xd4, 0x62, 0x21,
+ 0xb2, 0xa1, 0x87, 0x90, 0x5e, 0x52, 0x66, 0x36, 0x2b, 0x99, 0xd5,
+ 0xe9, 0x1c, 0x6c, 0xe2, 0x4d, 0x16, 0x5d, 0xab, 0x93, 0xe8, 0x64,
+ 0x33,
+ },
+ },
}
var invalidAddress = []string{
- "tc1qw508d6qejxtdg4y5r3zarvary0c5xw7kg3g4ty",
- "bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t5",
- "BC13W508D6QEJXTDG4Y5R3ZARVARY0C5XW7KN40WF2",
- "bc1rw5uspcuh",
- "bc10w508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kw5rljs90",
- "BC1QR508D6QEJXTDG4Y5R3ZARVARYV98GJ9P",
- "tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sL5k7",
- "tb1pw508d6qejxtdg4y5r3zarqfsj6c3",
- "tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3pjxtptv",
+ "tc1qw508d6qejxtdg4y5r3zarvary0c5xw7kg3g4ty",
+ "bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t5",
+ "BC13W508D6QEJXTDG4Y5R3ZARVARY0C5XW7KN40WF2",
+ "bc1rw5uspcuh",
+ "bc10w508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kw5rljs90",
+ "BC1QR508D6QEJXTDG4Y5R3ZARVARYV98GJ9P",
+ "tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sL5k7",
+ "tb1pw508d6qejxtdg4y5r3zarqfsj6c3",
+ "tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3pjxtptv",
}
func TestValidChecksum(t *testing.T) {
- for _, test := range validChecksum {
- hrp, data, err := Decode(test)
- if err != nil {
- t.Errorf("Valid checksum for %s : FAIL / error %+v\n", test, err)
- } else {
- t.Logf("Valid checksum for %s : ok / hrp : %+v , data : %+v\n", test, hrp, data)
- }
- }
+ for _, test := range validChecksum {
+ hrp, data, err := Decode(test)
+ if err != nil {
+ t.Errorf("Valid checksum for %s : FAIL / error %+v\n", test, err)
+ } else {
+ t.Logf("Valid checksum for %s : ok / hrp : %+v , data : %+v\n", test, hrp, data)
+ }
+ }
}
func TestValidAddress(t *testing.T) {
- for _, test := range validAddress {
- hrp := "bc"
- version, program, err := AddressDecode(hrp, test.address)
- if err != nil {
- hrp = "tb"
- version, program, err = AddressDecode(hrp, test.address)
- }
- ok := err == nil
- if ok {
- output := ScriptPubkey(version, program)
- ok = reflect.DeepEqual(output, test.scriptpubkey)
- }
- if ok {
- recreate, err := AddressEncode(hrp, version, program)
- if err == nil {
- ok = recreate == strings.ToLower(test.address)
- }
- }
- if ok {
- t.Logf("Valid address %v : ok\n", test.address)
- } else {
- t.Errorf("Valid address %v : FAIL\n", test.address)
- }
- }
+ for _, test := range validAddress {
+ hrp := "bc"
+ version, program, err := AddressDecode(hrp, test.address)
+ if err != nil {
+ hrp = "tb"
+ version, program, err = AddressDecode(hrp, test.address)
+ }
+ ok := err == nil
+ if ok {
+ output := ScriptPubkey(version, program)
+ ok = reflect.DeepEqual(output, test.scriptpubkey)
+ }
+ if ok {
+ recreate, err := AddressEncode(hrp, version, program)
+ if err == nil {
+ ok = recreate == strings.ToLower(test.address)
+ }
+ }
+ if ok {
+ t.Logf("Valid address %v : ok\n", test.address)
+ } else {
+ t.Errorf("Valid address %v : FAIL\n", test.address)
+ }
+ }
}
-
func TestInvalidAddress(t *testing.T) {
- for _, test := range invalidAddress {
- _, _, bcErr := AddressDecode("bc", test)
- t.Logf("bc error:%v\n", bcErr)
- _, _, tbErr := AddressDecode("tb", test)
- t.Logf("tb error:%v\n", tbErr)
- if bcErr != nil && tbErr != nil {
- t.Logf("Invalid address %v : ok\n", test)
- } else {
- t.Errorf("Invalid address %v : FAIL\n", test)
- }
- }
+ for _, test := range invalidAddress {
+ _, _, bcErr := AddressDecode("bc", test)
+ t.Logf("bc error:%v\n", bcErr)
+ _, _, tbErr := AddressDecode("tb", test)
+ t.Logf("tb error:%v\n", tbErr)
+ if bcErr != nil && tbErr != nil {
+ t.Logf("Invalid address %v : ok\n", test)
+ } else {
+ t.Errorf("Invalid address %v : FAIL\n", test)
+ }
+ }
}
-
// add coverage tests
func TestCoverage(t *testing.T) {
- var err error
- var bech32String string
- var hrp string
- var data []int
+ var err error
+ var bech32String string
+ var hrp string
+ var data []int
- // AddressEncode
- bech32String, err = AddressEncode("bc", 1, []int{0, 1})
- if err != nil {
- t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
- } else {
- t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
- }
- data = make([]int, 40)
- bech32String, err = AddressEncode("bc", 16, data)
- if err != nil {
- t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
- } else {
- t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
- }
- data = make([]int, 20)
- bech32String, err = AddressEncode("bc", 0, data)
- if err != nil {
- t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
- } else {
- t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
- }
- data = make([]int, 32)
- bech32String, err = AddressEncode("bc", 0, data)
- if err != nil {
- t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
- } else {
- t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
- }
- data = make([]int, 1)
- _, err = AddressEncode("bc", 1, data)
- if err == nil {
- t.Errorf("Coverage AddressEncode invalid program length error case : FAIL")
- } else {
- t.Log("Coverage AddressEncode invalid program length error case : ok / error :", err)
- }
- data = make([]int, 41)
- _, err = AddressEncode("bc", 1, data)
- if err == nil {
- t.Errorf("Coverage AddressEncode invalid program length error case : FAIL")
- } else {
- t.Log("Coverage AddressEncode invalid program length error case : ok / error :", err)
- }
- data = make([]int, 26)
- _, err = AddressEncode("bc", 0, data)
- if err == nil {
- t.Errorf("Coverage AddressEncode invalid program length for witness version 0 (per BIP141) error case : FAIL")
- } else {
- t.Log("Coverage AddressEncode invalid program length for witness version 0 (per BIP141) error case : ok / error :", err)
- }
- data = make([]int, 20)
- _, err = AddressEncode("Bc", 0, data)
- if err == nil {
- t.Errorf("Coverage AddressEncode Encode error case : FAIL")
- } else {
- t.Log("Coverage AddressEncode Encode error case : ok / error :", err)
- }
- _, err = AddressEncode("bc", 1, []int{-1, 0})
- if err == nil {
- t.Errorf("Coverage AddressEncode invalid data range error case : FAIL")
- } else {
- t.Log("Coverage AddressEncode invalid data range error case : ok / error :", err)
- }
- _, err = AddressEncode("bc", -1, data)
- if err == nil {
- t.Errorf("Coverage AddressEncode invalid witness version error case : FAIL")
- } else {
- t.Log("Coverage AddressEncode invalid witness version error case : ok / error :", err)
- }
- _, err = AddressEncode("bc", 17, data)
- if err == nil {
- t.Errorf("Coverage AddressEncode invalid witness version error case : FAIL")
- } else {
- t.Log("Coverage AddressEncode invalid witness version error case : ok / error :", err)
- }
+ // AddressEncode
+ bech32String, err = AddressEncode("bc", 1, []int{0, 1})
+ if err != nil {
+ t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
+ } else {
+ t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
+ }
+ data = make([]int, 40)
+ bech32String, err = AddressEncode("bc", 16, data)
+ if err != nil {
+ t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
+ } else {
+ t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
+ }
+ data = make([]int, 20)
+ bech32String, err = AddressEncode("bc", 0, data)
+ if err != nil {
+ t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
+ } else {
+ t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
+ }
+ data = make([]int, 32)
+ bech32String, err = AddressEncode("bc", 0, data)
+ if err != nil {
+ t.Errorf("Coverage AddressEncode normal case : FAIL / error : %+v\n", err)
+ } else {
+ t.Log("Coverage AddressEncode normal case : ok / bech32String :", bech32String)
+ }
+ data = make([]int, 1)
+ _, err = AddressEncode("bc", 1, data)
+ if err == nil {
+ t.Errorf("Coverage AddressEncode invalid program length error case : FAIL")
+ } else {
+ t.Log("Coverage AddressEncode invalid program length error case : ok / error :", err)
+ }
+ data = make([]int, 41)
+ _, err = AddressEncode("bc", 1, data)
+ if err == nil {
+ t.Errorf("Coverage AddressEncode invalid program length error case : FAIL")
+ } else {
+ t.Log("Coverage AddressEncode invalid program length error case : ok / error :", err)
+ }
+ data = make([]int, 26)
+ _, err = AddressEncode("bc", 0, data)
+ if err == nil {
+ t.Errorf("Coverage AddressEncode invalid program length for witness version 0 (per BIP141) error case : FAIL")
+ } else {
+ t.Log("Coverage AddressEncode invalid program length for witness version 0 (per BIP141) error case : ok / error :", err)
+ }
+ data = make([]int, 20)
+ _, err = AddressEncode("Bc", 0, data)
+ if err == nil {
+ t.Errorf("Coverage AddressEncode Encode error case : FAIL")
+ } else {
+ t.Log("Coverage AddressEncode Encode error case : ok / error :", err)
+ }
+ _, err = AddressEncode("bc", 1, []int{-1, 0})
+ if err == nil {
+ t.Errorf("Coverage AddressEncode invalid data range error case : FAIL")
+ } else {
+ t.Log("Coverage AddressEncode invalid data range error case : ok / error :", err)
+ }
+ _, err = AddressEncode("bc", -1, data)
+ if err == nil {
+ t.Errorf("Coverage AddressEncode invalid witness version error case : FAIL")
+ } else {
+ t.Log("Coverage AddressEncode invalid witness version error case : ok / error :", err)
+ }
+ _, err = AddressEncode("bc", 17, data)
+ if err == nil {
+ t.Errorf("Coverage AddressEncode invalid witness version error case : FAIL")
+ } else {
+ t.Log("Coverage AddressEncode invalid witness version error case : ok / error :", err)
+ }
- // SegwitAddrDecode
- _, _, err = AddressDecode("a", "A12UEL5L")
- if err == nil {
- t.Errorf("Coverage SegwitAddrDecode invalid decode data length error case : FAIL")
- } else {
- t.Log("Coverage SegwitAddrDecode invalid decode data length error case : ok / error :", err)
- }
+ // SegwitAddrDecode
+ _, _, err = AddressDecode("a", "A12UEL5L")
+ if err == nil {
+ t.Errorf("Coverage SegwitAddrDecode invalid decode data length error case : FAIL")
+ } else {
+ t.Log("Coverage SegwitAddrDecode invalid decode data length error case : ok / error :", err)
+ }
- // Decode
- _, _, err = Decode("!~1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc356v3")
- if err != nil {
- t.Errorf("Coverage Decode normal case : FAIL / error :%v", err)
- } else {
- t.Log("Coverage Decode normal case : ok")
- }
- _, _, err = Decode("a1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq")
- if err == nil {
- t.Errorf("Coverage Decode too long error case : FAIL")
- } else {
- t.Log("Coverage Decode too long error case : ok / error :", err)
- }
- _, _, err = Decode("1")
- if err == nil {
- t.Errorf("Coverage Decode separator '1' at invalid position error case : FAIL")
- } else {
- t.Log("Coverage Decode separator '1' at invalid position error case : ok / error :", err)
- }
- _, _, err = Decode("a1qqqqq")
- if err == nil {
- t.Errorf("Coverage Decode separator '1' at invalid position error case : FAIL")
- } else {
- t.Log("Coverage Decode separator '1' at invalid position error case : ok / error :", err)
- }
- _, _, err = Decode("a" + string(32) + "1qqqqqq")
- if err == nil {
- t.Errorf("Coverage Decode invalid character human-readable part error case : FAIL")
- } else {
- t.Log("Coverage Decode invalid character human-readable part error case : ok / error :", err)
- }
- _, _, err = Decode("a" + string(127) + "1qqqqqq")
- if err == nil {
- t.Errorf("Coverage Decode invalid character human-readable part error case : FAIL")
- } else {
- t.Log("Coverage Decode invalid character human-readable part error case : ok / error :", err)
- }
- _, _, err = Decode("a1qqqqqb")
- if err == nil {
- t.Errorf("Coverage Decode invalid character data part error case : FAIL")
- } else {
- t.Log("Coverage Decode invalid character data part erroer case : ok / error :", err)
- }
+ // Decode
+ _, _, err = Decode("!~1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqc356v3")
+ if err != nil {
+ t.Errorf("Coverage Decode normal case : FAIL / error :%v", err)
+ } else {
+ t.Log("Coverage Decode normal case : ok")
+ }
+ _, _, err = Decode("a1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq")
+ if err == nil {
+ t.Errorf("Coverage Decode too long error case : FAIL")
+ } else {
+ t.Log("Coverage Decode too long error case : ok / error :", err)
+ }
+ _, _, err = Decode("1")
+ if err == nil {
+ t.Errorf("Coverage Decode separator '1' at invalid position error case : FAIL")
+ } else {
+ t.Log("Coverage Decode separator '1' at invalid position error case : ok / error :", err)
+ }
+ _, _, err = Decode("a1qqqqq")
+ if err == nil {
+ t.Errorf("Coverage Decode separator '1' at invalid position error case : FAIL")
+ } else {
+ t.Log("Coverage Decode separator '1' at invalid position error case : ok / error :", err)
+ }
+ _, _, err = Decode("a" + string(32) + "1qqqqqq")
+ if err == nil {
+ t.Errorf("Coverage Decode invalid character human-readable part error case : FAIL")
+ } else {
+ t.Log("Coverage Decode invalid character human-readable part error case : ok / error :", err)
+ }
+ _, _, err = Decode("a" + string(127) + "1qqqqqq")
+ if err == nil {
+ t.Errorf("Coverage Decode invalid character human-readable part error case : FAIL")
+ } else {
+ t.Log("Coverage Decode invalid character human-readable part error case : ok / error :", err)
+ }
+ _, _, err = Decode("a1qqqqqb")
+ if err == nil {
+ t.Errorf("Coverage Decode invalid character data part error case : FAIL")
+ } else {
+ t.Log("Coverage Decode invalid character data part erroer case : ok / error :", err)
+ }
- // Encode
- hrp = "bc"
- data = []int{}
- bech32String, err = Encode(hrp, data)
- if err != nil || bech32String != strings.ToLower(bech32String) {
- t.Errorf("Coverage Encode lower case : FAIL / bech32String : %v , error : %v", bech32String, err)
- } else {
- t.Log("Coverage Encode lower case : ok / bech32String : ", bech32String)
- }
- hrp = "BC"
- bech32String, err = Encode(hrp, data)
- if err != nil || bech32String != strings.ToUpper(bech32String) {
- t.Errorf("Coverage Encode upper case : FAIL / bech32String : %v , error : %v", bech32String, err)
- } else {
- t.Log("Coverage Encode upper case : ok / bech32String : ", bech32String)
- }
- hrp = "bc"
- data = make([]int, 90-7-len(hrp)+1)
- bech32String, err = Encode(hrp, data)
- if err == nil {
- t.Errorf("Coverage Encode too long error case : FAIL / bech32String : %v", bech32String)
- } else {
- t.Log("Coverage Encode too long error case : ok / error : ", err)
- }
- hrp = ""
- data = make([]int, 90-7-len(hrp))
- bech32String, err = Encode(hrp, data)
- if err == nil {
- t.Errorf("Coverage Encode invalid hrp error case : FAIL / bech32String : %v", bech32String)
- } else {
- t.Log("Coverage Encode invalid hrp error case : ok / error : ", err)
- }
- hrp = "Bc"
- data = make([]int, 90-7-len(hrp))
- bech32String, err = Encode(hrp, data)
- if err == nil {
- t.Errorf("Coverage Encode mix case error case : FAIL / bech32String : %v", bech32String)
- } else {
- t.Log("Coverage Encode mix case error case : ok / error : ", err)
- }
- hrp = string(33) + string(126)
- data = make([]int, 90-7-len(hrp))
- bech32String, err = Encode(hrp, data)
- if err != nil {
- t.Errorf("Coverage Encode normal case : FAIL / error : %v", err)
- } else {
- t.Log("Coverage Encode normal case : ok / bech32String : ", bech32String)
- }
- hrp = string(32) + "c"
- data = make([]int, 90-7-len(hrp))
- bech32String, err = Encode(hrp, data)
- if err == nil {
- t.Errorf("Coverage Encode invalid character human-readable part error case : FAIL / bech32String : %v", bech32String)
- } else {
- t.Log("Coverage Encode invalid character human-readable part error case : ok / error : ", err)
- }
- hrp = "b" + string(127)
- data = make([]int, 90-7-len(hrp))
- bech32String, err = Encode(hrp, data)
- if err == nil {
- t.Errorf("Coverage Encode invalid character human-readable part error case : FAIL / bech32String : %v", bech32String)
- } else {
- t.Log("Coverage Encode invalid character human-readable part error case : ok / error : ", err)
- }
- hrp = "bc"
- data = []int{0, 31}
- bech32String, err = Encode(hrp, data)
- if err != nil {
- t.Errorf("Coverage Encode normal case : FAIL / error : %v", err)
- } else {
- t.Log("Coverage Encode normal case : ok / bech32String : ", bech32String)
- }
- hrp = "bc"
- data = []int{-1}
- bech32String, err = Encode(hrp, data)
- if err == nil {
- t.Errorf("Coverage Encode invalid data error case : FAIL / bech32String : %v", bech32String)
- } else {
- t.Log("Coverage Encode invalid data error case : ok / error : ", err)
- }
- hrp = "bc"
- data = []int{32}
- bech32String, err = Encode(hrp, data)
- if err == nil {
- t.Errorf("Coverage Encode invalid data error case : FAIL / bech32String : %v", bech32String)
- } else {
- t.Log("Coverage Encode invalid data error case : ok / error : ", err)
- }
+ // Encode
+ hrp = "bc"
+ data = []int{}
+ bech32String, err = Encode(hrp, data)
+ if err != nil || bech32String != strings.ToLower(bech32String) {
+ t.Errorf("Coverage Encode lower case : FAIL / bech32String : %v , error : %v", bech32String, err)
+ } else {
+ t.Log("Coverage Encode lower case : ok / bech32String : ", bech32String)
+ }
+ hrp = "BC"
+ bech32String, err = Encode(hrp, data)
+ if err != nil || bech32String != strings.ToUpper(bech32String) {
+ t.Errorf("Coverage Encode upper case : FAIL / bech32String : %v , error : %v", bech32String, err)
+ } else {
+ t.Log("Coverage Encode upper case : ok / bech32String : ", bech32String)
+ }
+ hrp = "bc"
+ data = make([]int, 90-7-len(hrp)+1)
+ bech32String, err = Encode(hrp, data)
+ if err == nil {
+ t.Errorf("Coverage Encode too long error case : FAIL / bech32String : %v", bech32String)
+ } else {
+ t.Log("Coverage Encode too long error case : ok / error : ", err)
+ }
+ hrp = ""
+ data = make([]int, 90-7-len(hrp))
+ bech32String, err = Encode(hrp, data)
+ if err == nil {
+ t.Errorf("Coverage Encode invalid hrp error case : FAIL / bech32String : %v", bech32String)
+ } else {
+ t.Log("Coverage Encode invalid hrp error case : ok / error : ", err)
+ }
+ hrp = "Bc"
+ data = make([]int, 90-7-len(hrp))
+ bech32String, err = Encode(hrp, data)
+ if err == nil {
+ t.Errorf("Coverage Encode mix case error case : FAIL / bech32String : %v", bech32String)
+ } else {
+ t.Log("Coverage Encode mix case error case : ok / error : ", err)
+ }
+ hrp = string(33) + string(126)
+ data = make([]int, 90-7-len(hrp))
+ bech32String, err = Encode(hrp, data)
+ if err != nil {
+ t.Errorf("Coverage Encode normal case : FAIL / error : %v", err)
+ } else {
+ t.Log("Coverage Encode normal case : ok / bech32String : ", bech32String)
+ }
+ hrp = string(32) + "c"
+ data = make([]int, 90-7-len(hrp))
+ bech32String, err = Encode(hrp, data)
+ if err == nil {
+ t.Errorf("Coverage Encode invalid character human-readable part error case : FAIL / bech32String : %v", bech32String)
+ } else {
+ t.Log("Coverage Encode invalid character human-readable part error case : ok / error : ", err)
+ }
+ hrp = "b" + string(127)
+ data = make([]int, 90-7-len(hrp))
+ bech32String, err = Encode(hrp, data)
+ if err == nil {
+ t.Errorf("Coverage Encode invalid character human-readable part error case : FAIL / bech32String : %v", bech32String)
+ } else {
+ t.Log("Coverage Encode invalid character human-readable part error case : ok / error : ", err)
+ }
+ hrp = "bc"
+ data = []int{0, 31}
+ bech32String, err = Encode(hrp, data)
+ if err != nil {
+ t.Errorf("Coverage Encode normal case : FAIL / error : %v", err)
+ } else {
+ t.Log("Coverage Encode normal case : ok / bech32String : ", bech32String)
+ }
+ hrp = "bc"
+ data = []int{-1}
+ bech32String, err = Encode(hrp, data)
+ if err == nil {
+ t.Errorf("Coverage Encode invalid data error case : FAIL / bech32String : %v", bech32String)
+ } else {
+ t.Log("Coverage Encode invalid data error case : ok / error : ", err)
+ }
+ hrp = "bc"
+ data = []int{32}
+ bech32String, err = Encode(hrp, data)
+ if err == nil {
+ t.Errorf("Coverage Encode invalid data error case : FAIL / bech32String : %v", bech32String)
+ } else {
+ t.Log("Coverage Encode invalid data error case : ok / error : ", err)
+ }
}
package common
import (
- _"encoding/hex"
+ _ "encoding/hex"
"encoding/json"
"errors"
"fmt"
)
const (
- HashLength = 32
- AddressLength = 42
+ HashLength = 32
+ AddressLength = 42
PubkeyHashLength = 20
)
func BigToHash(b *big.Int) Hash { return BytesToHash(b.Bytes()) }
func HexToHash(s string) Hash { return BytesToHash(FromHex(s)) }
-
// Don't use the default 'String' method in case we want to overwrite
// Get the string representation of the underlying hash
"fmt"
"path/filepath"
"time"
-
//"github.com/bytom/types"
)
BaseConfig `mapstructure:",squash"`
// Options for services
- RPC *RPCConfig `mapstructure:"rpc"`
- P2P *P2PConfig `mapstructure:"p2p"`
+ RPC *RPCConfig `mapstructure:"rpc"`
+ P2P *P2PConfig `mapstructure:"p2p"`
}
func DefaultConfig() *Config {
TxIndex: "kv",
DBBackend: "leveldb",
DBPath: "data",
- KeysPath: "keystore",
- HsmUrl: "",
+ KeysPath: "keystore",
+ HsmUrl: "",
}
}
return rootify(b.KeysPath, b.RootDir)
}
-
func DefaultLogLevel() string {
return "info"
}
//"errors"
"github.com/bytom/common"
- "golang.org/x/crypto/sha3"
"golang.org/x/crypto/ripemd160"
+ "golang.org/x/crypto/sha3"
)
-
func Sha256(data ...[]byte) []byte {
d := sha3.New256()
for _, b := range data {
func Sha3(data ...[]byte) []byte { return Sha256(data...) }
func Sha3Hash(data ...[]byte) common.Hash { return Sha256Hash(data...) }
-
func Ripemd160(data []byte) []byte {
ripemd := ripemd160.New()
ripemd.Write(data)
return ripemd.Sum(nil)
}
-func PubkeyToAddress(pubBytes []byte) common.Address{
+func PubkeyToAddress(pubBytes []byte) common.Address {
address, _ := common.AddressEncode("bm", 1, toInt(Ripemd160(Sha3(pubBytes))))
fmt.Printf(address)
return common.StringToAddress(address)
}
}
-func toInt(bytes []byte) []int{
+func toInt(bytes []byte) []int {
ints := make([]int, len(bytes))
for i := range bytes {
ints[i] = int(bytes[i])
return ints
}
-func toBytes(ints []int) []byte{
+func toBytes(ints []int) []byte {
bytes := make([]byte, len(ints))
for i := range ints {
bytes[i] = byte(ints[i])
package crypto
import (
- _"bytes"
- _"crypto/ecdsa"
- _"encoding/hex"
- _"fmt"
- _"io/ioutil"
- _"math/big"
- _"os"
+ _ "bytes"
+ _ "bytom/common"
+ _ "crypto/ecdsa"
+ _ "encoding/hex"
+ _ "fmt"
+ _ "io/ioutil"
+ _ "math/big"
+ _ "os"
"testing"
- _"time"
- _"bytom/common"
-
+ _ "time"
)
var testAddrHex = "970e8128ab834e8eac17ab8e3812f010678cf791"
addr := PubkeyToAddress([]byte("289c2857d4598e37fb9647507e47a309d6133539bf21a8b9cb6df88fd5232032"))
t.Log("address:", addr.Str())
//addr = common.StringToAddress("bm1pyjkqc458pWy4W4H53a06fpl298260mvualj97g")
- ver, data, _ := AddressToPubkey(addr)
+ ver, data, _ := AddressToPubkey(addr)
t.Log("address:", ver, data)
}
fmt.Printf("msg: %x, privkey: %x sig: %x\n", msg1, k1, sig1)
}
-*/
\ No newline at end of file
+*/
"testing"
"time"
+ p2p "github.com/bytom/p2p"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- p2p "github.com/bytom/p2p"
"github.com/tendermint/tmlibs/log"
)
"net"
"time"
- crypto "github.com/tendermint/go-crypto"
cfg "github.com/bytom/config"
+ crypto "github.com/tendermint/go-crypto"
cmn "github.com/tendermint/tmlibs/common"
)
return nil
}
-func (c *Chain) SetAssetsAmount(block *legacy.Block){
+func (c *Chain) SetAssetsAmount(block *legacy.Block) {
assets_amount := c.assets_utxo.assets_amount
- if(block.Transactions != nil){
+ if block.Transactions != nil {
c.assets_utxo.cond.L.Lock()
- for _,item := range block.Transactions[1:]{
- if (item.Outputs != nil){
- for _,utxo := range item.Outputs{
- if _,ok := assets_amount[utxo.AssetId.String()]; ok {
+ for _, item := range block.Transactions[1:] {
+ if item.Outputs != nil {
+ for _, utxo := range item.Outputs {
+ if _, ok := assets_amount[utxo.AssetId.String()]; ok {
assets_amount[utxo.AssetId.String()] += utxo.Amount
- }else{
+ } else {
assets_amount[utxo.AssetId.String()] = utxo.Amount
}
}
}
-func (c *Chain) GetAssetsAmount() ([]interface{}) {
- var result = make([]interface{},0)
+func (c *Chain) GetAssetsAmount() []interface{} {
+ var result = make([]interface{}, 0)
c.assets_utxo.cond.L.Lock()
defer c.assets_utxo.cond.L.Unlock()
- if(len(c.assets_utxo.assets_amount)>0) {
- result = append(result,c.assets_utxo.assets_amount)
+ if len(c.assets_utxo.assets_amount) > 0 {
+ result = append(result, c.assets_utxo.assets_amount)
}
return result
lastQueuedSnapshot time.Time
pendingSnapshots chan pendingSnapshot
- txPool *TxPool
- assets_utxo struct{
- cond sync.Cond
+ txPool *TxPool
+ assets_utxo struct {
+ cond sync.Cond
assets_amount map[string]uint64
}
}
}
c.state.cond.L = new(sync.Mutex)
- c.assets_utxo.assets_amount = make(map[string]uint64,1024) //prepared buffer 1024 key-values
+ c.assets_utxo.assets_amount = make(map[string]uint64, 1024) //prepared buffer 1024 key-values
c.assets_utxo.cond.L = new(sync.Mutex)
log.Printf(ctx, "bytom's Height:%v.", store.Height())
import (
"context"
-// "fmt"
+ // "fmt"
-// "github.com/blockchain/errors"
+ // "github.com/blockchain/errors"
"github.com/bytom/protocol/bc/legacy"
"github.com/bytom/protocol/state"
)
}
}
return b, snapshot, nil
- */
- return nil, nil, nil
+ */
+ return nil, nil, nil
}
"testing"
"time"
- "github.com/stretchr/testify/require"
- merktest "github.com/tendermint/merkleeyes/testutil"
"github.com/bytom/rpc/client"
"github.com/bytom/types"
+ "github.com/stretchr/testify/require"
+ merktest "github.com/tendermint/merkleeyes/testutil"
)
func TestHeaderEvents(t *testing.T) {
import (
"time"
- "github.com/pkg/errors"
"github.com/bytom/types"
+ "github.com/pkg/errors"
cmn "github.com/tendermint/tmlibs/common"
events "github.com/tendermint/tmlibs/events"
)
"strings"
"testing"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
"github.com/bytom/rpc/client"
"github.com/bytom/rpc/client/mock"
ctypes "github.com/bytom/rpc/core/types"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestWaitForHeight(t *testing.T) {
"encoding/json"
"fmt"
- "github.com/pkg/errors"
- data "github.com/tendermint/go-wire/data"
ctypes "github.com/bytom/rpc/core/types"
"github.com/bytom/rpc/lib/client"
"github.com/bytom/types"
+ "github.com/pkg/errors"
+ data "github.com/tendermint/go-wire/data"
events "github.com/tendermint/tmlibs/events"
)
package client
import (
- data "github.com/tendermint/go-wire/data"
ctypes "github.com/bytom/rpc/core/types"
"github.com/bytom/types"
+ data "github.com/tendermint/go-wire/data"
)
// ABCIClient groups together the functionality that principally
package client
import (
- data "github.com/tendermint/go-wire/data"
nm "github.com/bytom/node"
"github.com/bytom/rpc/core"
ctypes "github.com/bytom/rpc/core/types"
"github.com/bytom/types"
+ data "github.com/tendermint/go-wire/data"
)
/*
"os"
"testing"
- meapp "github.com/tendermint/merkleeyes/app"
nm "github.com/bytom/node"
rpctest "github.com/bytom/rpc/test"
+ meapp "github.com/tendermint/merkleeyes/app"
)
var node *nm.Node
"strings"
"testing"
+ "github.com/bytom/rpc/client"
+ rpctest "github.com/bytom/rpc/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/tendermint/merkleeyes/iavl"
merktest "github.com/tendermint/merkleeyes/testutil"
- "github.com/bytom/rpc/client"
- rpctest "github.com/bytom/rpc/test"
)
func getHTTPClient() *client.HTTP {
)
func BlockHeight() (*ctypes.ResultBlockchainInfo, error) {
- return &ctypes.ResultBlockchainInfo{LastHeight: blockStore.Height(),}, nil
+ return &ctypes.ResultBlockchainInfo{LastHeight: blockStore.Height()}, nil
}
package core
import (
+ "github.com/bytom/blockchain/txdb"
p2p "github.com/bytom/p2p"
"github.com/bytom/types"
"github.com/tendermint/tmlibs/log"
- "github.com/bytom/blockchain/txdb"
)
type P2P interface {
var (
// external, thread safe interfaces
- eventSwitch types.EventSwitch
- blockStore *txdb.Store
- p2pSwitch P2P
+ eventSwitch types.EventSwitch
+ blockStore *txdb.Store
+ p2pSwitch P2P
- addrBook *p2p.AddrBook
+ addrBook *p2p.AddrBook
logger log.Logger
)
package core
import (
- ctypes "github.com/bytom/rpc/core/types"
- "github.com/bytom/protocol/bc/legacy"
+ "github.com/bytom/protocol/bc/legacy"
+ ctypes "github.com/bytom/rpc/core/types"
)
-func GetWork()(*ctypes.ResultBlockHeaderInfo, error){
- return &ctypes.ResultBlockHeaderInfo{},nil
+func GetWork() (*ctypes.ResultBlockHeaderInfo, error) {
+ return &ctypes.ResultBlockHeaderInfo{}, nil
}
-func SubmitWork(height uint64) (bool,error) {
- block := legacy.Block{
- BlockHeader: legacy.BlockHeader{
- Version: 1,
- Height: height,
- },
- }
- blockStore.SaveBlock(&block)
- return true,nil
+func SubmitWork(height uint64) (bool, error) {
+ block := legacy.Block{
+ BlockHeader: legacy.BlockHeader{
+ Version: 1,
+ Height: height,
+ },
+ }
+ blockStore.SaveBlock(&block)
+ return true, nil
}
-
// TODO: better system than "unsafe" prefix
var Routes = map[string]*rpc.RPCFunc{
// subscribe/unsubscribe are reserved for websocket events.
- "net_info": rpc.NewRPCFunc(NetInfo, ""),
- "getwork": rpc.NewRPCFunc(GetWork, ""),
- "submitwork": rpc.NewRPCFunc(SubmitWork, "height"),
- "getBlockHeight": rpc.NewRPCFunc(BlockHeight, ""),
+ "net_info": rpc.NewRPCFunc(NetInfo, ""),
+ "getwork": rpc.NewRPCFunc(GetWork, ""),
+ "submitwork": rpc.NewRPCFunc(SubmitWork, "height"),
+ "getBlockHeight": rpc.NewRPCFunc(BlockHeight, ""),
}
func AddUnsafeRoutes() {
import (
"strings"
- "time"
+ "time"
+ "github.com/bytom/p2p"
+ "github.com/bytom/protocol/bc"
+ "github.com/bytom/types"
abci "github.com/tendermint/abci/types"
"github.com/tendermint/go-crypto"
"github.com/tendermint/go-wire/data"
- "github.com/bytom/protocol/bc"
- "github.com/bytom/p2p"
- "github.com/bytom/types"
)
type BlockNonce [8]byte
type ResultBlockchainInfo struct {
- LastHeight uint64 `json:"last_height"`
+ LastHeight uint64 `json:"last_height"`
}
type ResultGenesis struct {
}
type ResultBlockHeaderInfo struct {
- Version int32 `json:"version"`
- //Height uint64 `json:"height"`
- MerkleRoot bc.Hash `json:"merkleroot"`
- PreviousBlockHash bc.Hash `json:"prevblockhash"`
- TimestampMS time.Time `json:"timestamp"`
- Bits uint64 `json:"bits"`
- Nonce uint64 `json:"nonce"`
+ Version int32 `json:"version"`
+ //Height uint64 `json:"height"`
+ MerkleRoot bc.Hash `json:"merkleroot"`
+ PreviousBlockHash bc.Hash `json:"prevblockhash"`
+ TimestampMS time.Time `json:"timestamp"`
+ Bits uint64 `json:"bits"`
+ Nonce uint64 `json:"nonce"`
}
type ResultDialSeeds struct {
import (
"testing"
- "github.com/stretchr/testify/assert"
"github.com/bytom/p2p"
+ "github.com/stretchr/testify/assert"
)
func TestStatusIndexer(t *testing.T) {
}
type ResponseBroadcastTx struct {
- CheckTx *types.ResponseCheckTx `protobuf:"bytes,1,opt,name=check_tx,json=checkTx" json:"check_tx,omitempty"`
+ CheckTx *types.ResponseCheckTx `protobuf:"bytes,1,opt,name=check_tx,json=checkTx" json:"check_tx,omitempty"`
DeliverTx *types.ResponseDeliverTx `protobuf:"bytes,2,opt,name=deliver_tx,json=deliverTx" json:"deliver_tx,omitempty"`
}
"reflect"
"strings"
- "github.com/pkg/errors"
types "github.com/bytom/rpc/lib/types"
+ "github.com/pkg/errors"
cmn "github.com/tendermint/tmlibs/common"
)
"net/http"
"time"
+ types "github.com/bytom/rpc/lib/types"
"github.com/gorilla/websocket"
"github.com/pkg/errors"
- types "github.com/bytom/rpc/lib/types"
cmn "github.com/tendermint/tmlibs/common"
)
"strings"
"time"
- "github.com/pkg/errors"
types "github.com/bytom/rpc/lib/types"
+ "github.com/pkg/errors"
"github.com/tendermint/tmlibs/log"
)
func EventStringRebond() string { return "Rebond" }
func EventStringDupeout() string { return "Dupeout" }
func EventStringFork() string { return "Fork" }
+
//func EventStringTx(tx Tx) string { return cmn.Fmt("Tx:%X", tx.Hash()) }
func EventStringNewBlock() string { return "NewBlock" }
// All txs fire EventDataTx
type EventDataTx struct {
- Height int `json:"height"`
+ Height int `json:"height"`
//Tx Tx `json:"tx"`
- Data data.Bytes `json:"data"`
- Log string `json:"log"`
- Code abci.CodeType `json:"code"`
- Error string `json:"error"` // this is redundant information for now
+ Data data.Bytes `json:"data"`
+ Log string `json:"log"`
+ Code abci.CodeType `json:"code"`
+ Error string `json:"error"` // this is redundant information for now
}
// NOTE: This goes into the replay WAL
//------------------------------------------------------------
// core types for a genesis definition
type GenesisDoc struct {
- GenesisTime time.Time `json:"genesis_time"`
- ChainID string `json:"chain_id"`
- PrivateKey string `json:"private_key"`
- AppHash data.Bytes `json:"app_hash"`
+ GenesisTime time.Time `json:"genesis_time"`
+ ChainID string `json:"chain_id"`
+ PrivateKey string `json:"private_key"`
+ AppHash data.Bytes `json:"app_hash"`
}
// Utility method for saving GenensisDoc as JSON file.