Skip to content

Commit

Permalink
Add startrunoff blobs for RFP parents
Browse files Browse the repository at this point in the history
Concurrent parsing for comments/ballot journals on the same prop thread
Verify cast vote blobs / auth vote blobs
Cleanups in general
  • Loading branch information
thi4go committed Oct 19, 2021
1 parent 4dfddf1 commit 381af10
Show file tree
Hide file tree
Showing 6 changed files with 353 additions and 311 deletions.
9 changes: 8 additions & 1 deletion politeiad/cmd/legacyimport/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,19 @@ into the new tlog backend. This tool will only be used once.

## Considerations

We decided to import only the latest version of each record into tlog, and save it as
- We decided to import only the latest version of each record into tlog, and save it as
a version 1/iteration 1 record. If one wishes to check further versions of a finished
legacy record, the git repo will be available.

- cast vote signatures cannot be verified using the current politeia public key.
should use "a70134196c3cdf3f85f8af6abaa38c15feb7bccf5e6d3db6212358363465e502".

- vote details medatada cannot be sig verified with the new tlog backend because
of significant data changes

## Usage

`leagcyimport`.


##
66 changes: 47 additions & 19 deletions politeiad/cmd/legacyimport/comments.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,17 @@ import (
"github.com/decred/politeia/politeiad/plugins/comments"
)

// convertCommentsJournal walks through the legacy comments journal converting
// parseCommentsJournal walks through the legacy comments journal converting
// them to the appropriate plugin payloads for the tstore backend.
func (l *legacyImport) convertCommentsJournal(path, legacyToken string, newToken []byte) error {
func (l *legacyImport) parseCommentsJournal(path, legacyToken string, newToken []byte) error {
fh, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE, 0664)
if err != nil {
return err
}

s := bufio.NewScanner(fh)

// Initialize comments cache
// Initialize comments cache.
l.Lock()
l.comments[hex.EncodeToString(newToken)] = make(map[string]decredplugin.Comment)
l.Unlock()
Expand All @@ -47,7 +47,7 @@ func (l *legacyImport) convertCommentsJournal(path, legacyToken string, newToken
var c decredplugin.Comment
err = d.Decode(&c)
if err != nil {
return fmt.Errorf("comment journal add: %v", err)
return err
}
err = l.blobSaveCommentAdd(c, newToken)
if err != nil {
Expand All @@ -61,7 +61,7 @@ func (l *legacyImport) convertCommentsJournal(path, legacyToken string, newToken
var cc decredplugin.CensorComment
err = d.Decode(&cc)
if err != nil {
return fmt.Errorf("comment journal censor: %v", err)
return err
}

l.RLock()
Expand All @@ -70,21 +70,20 @@ func (l *legacyImport) convertCommentsJournal(path, legacyToken string, newToken

err = l.blobSaveCommentDel(cc, newToken, parentID)
if err != nil {
return fmt.Errorf("comment journal del: %v", err)
return err
}
case "addlike":
var lc likeCommentV1
err = d.Decode(&lc)
if err != nil {
return fmt.Errorf("comment journal addlike: %v", err)
return err
}
err = l.blobSaveCommentLike(lc, newToken)
if err != nil {
return err
}
default:
return fmt.Errorf("invalid action: %v",
action.Action)
return err
}
}

Expand All @@ -95,7 +94,7 @@ func (l *legacyImport) convertCommentsJournal(path, legacyToken string, newToken

func (l *legacyImport) blobSaveCommentAdd(c decredplugin.Comment, newToken []byte) error {
// Get user id from pubkey
usr, err := l.fetchUserByPubKey(c.PublicKey)
_, err := l.fetchUserByPubKey(c.PublicKey)
if err != nil {
return err
}
Expand All @@ -110,11 +109,37 @@ func (l *legacyImport) blobSaveCommentAdd(c decredplugin.Comment, newToken []byt
return err
}

// fmt.Println("before verify comment")
// // Verify comment blob signature
// cv1 := v1.Comment{
// UserID: usr.ID,
// Username: "",
// State: v1.RecordStateT(comments.RecordStateVetted),
// Token: c.Token,
// ParentID: uint32(pid),
// Comment: c.Comment,
// PublicKey: c.PublicKey,
// Signature: c.Signature,
// CommentID: uint32(cid),
// Timestamp: c.Timestamp,
// Receipt: c.Receipt,
// Downvotes: 0,
// Upvotes: 0,
// Deleted: false,
// Reason: "",
// }
// err = client.CommentVerify(cv1, serverPubkey)
// if err != nil {
// return err
// }

// Create comment add blob entry
cn := &comments.CommentAdd{
UserID: usr.ID,
// UserID: usr.ID,
// Token: hex.EncodeToString(newToken),
UserID: "810aefda-1e13-4ebc-a9e8-4162435eca7b",
State: comments.RecordStateVetted,
Token: hex.EncodeToString(newToken),
Token: c.Token,
ParentID: uint32(pid),
Comment: c.Comment,
PublicKey: c.PublicKey,
Expand All @@ -139,6 +164,9 @@ func (l *legacyImport) blobSaveCommentAdd(c decredplugin.Comment, newToken []byt

be := store.NewBlobEntry(hint, data)
err = l.tstore.BlobSave(newToken, be)
if err != nil && err.Error() == "duplicate payload" {
return nil
}
if err != nil {
return err
}
Expand All @@ -148,7 +176,7 @@ func (l *legacyImport) blobSaveCommentAdd(c decredplugin.Comment, newToken []byt

func (l *legacyImport) blobSaveCommentDel(cc decredplugin.CensorComment, newToken []byte, parentID string) error {
// Get user ID from pubkey
usr, err := l.fetchUserByPubKey(cc.PublicKey)
_, err := l.fetchUserByPubKey(cc.PublicKey)
if err != nil {
return err
}
Expand All @@ -167,15 +195,15 @@ func (l *legacyImport) blobSaveCommentDel(cc decredplugin.CensorComment, newToke

// Create comment del blob entry
cd := &comments.CommentDel{
Token: hex.EncodeToString(newToken),
Token: cc.Token,
State: comments.RecordStateVetted,
CommentID: uint32(cid),
Reason: cc.Reason,
PublicKey: cc.PublicKey,
Signature: cc.Signature,

ParentID: uint32(pid),
UserID: usr.ID,
UserID: "810aefda-1e13-4ebc-a9e8-4162435eca7b",
Timestamp: cc.Timestamp,
Receipt: cc.Receipt,
}
Expand All @@ -202,7 +230,7 @@ func (l *legacyImport) blobSaveCommentDel(cc decredplugin.CensorComment, newToke

func (l *legacyImport) blobSaveCommentLike(lc likeCommentV1, newToken []byte) error {
// Get user ID from pubkey
usr, err := l.fetchUserByPubKey(lc.PublicKey)
_, err := l.fetchUserByPubKey(lc.PublicKey)
if err != nil {
return err
}
Expand All @@ -226,9 +254,9 @@ func (l *legacyImport) blobSaveCommentLike(lc likeCommentV1, newToken []byte) er

// Create comment vote blob entry
c := &comments.CommentVote{
UserID: usr.ID,
UserID: "810aefda-1e13-4ebc-a9e8-4162435eca7b",
State: comments.RecordStateVetted,
Token: hex.EncodeToString(newToken),
Token: lc.Token,
CommentID: uint32(cid),
Vote: vote,
PublicKey: lc.PublicKey,
Expand All @@ -251,7 +279,7 @@ func (l *legacyImport) blobSaveCommentLike(lc likeCommentV1, newToken []byte) er
}
be := store.NewBlobEntry(hint, data)
err = l.tstore.BlobSave(newToken, be)
if err != nil && err.Error() == "duplicate blob" {
if err != nil && err.Error() == "duplicate payload" {
return nil
}
if err != nil {
Expand Down
94 changes: 94 additions & 0 deletions politeiad/cmd/legacyimport/dcrdata.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
package main

import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"

dcrdata "github.com/decred/dcrdata/v6/api/types"
)

// Get largest commitment address from dcrdata
func batchTransactions(hashes []string) ([]dcrdata.TrimmedTx, error) {
// Request body is dcrdataapi.Txns marshalled to JSON
reqBody, err := json.Marshal(dcrdata.Txns{
Transactions: hashes,
})
if err != nil {
return nil, err
}

// Make the POST request
url := "https://dcrdata.decred.org/api/txs/trimmed"
r, err := http.Post(url, "application/json; charset=utf-8",
bytes.NewReader(reqBody))
if err != nil {
return nil, err
}
defer r.Body.Close()

if r.StatusCode != http.StatusOK {
body, err := ioutil.ReadAll(r.Body)
if err != nil {
return nil, fmt.Errorf("dcrdata error: %v %v %v",
r.StatusCode, url, err)
}
return nil, fmt.Errorf("dcrdata error: %v %v %s",
r.StatusCode, url, body)
}

// Unmarshal the response
var ttx []dcrdata.TrimmedTx
decoder := json.NewDecoder(r.Body)
if err := decoder.Decode(&ttx); err != nil {
return nil, err
}
return ttx, nil
}

func largestCommitmentAddresses(hashes []string) (map[string]largestCommitmentResult, error) {
// Batch request all of the transaction info from dcrdata.
ttxs, err := batchTransactions(hashes)
if err != nil {
return nil, err
}

// Find largest commitment address for each transaction.
addrs := make(map[string]largestCommitmentResult, len(hashes))

for i := range ttxs {
// Best is address with largest commit amount.
var bestAddr string
var bestAmount float64
for _, v := range ttxs[i].Vout {
if v.ScriptPubKeyDecoded.CommitAmt == nil {
continue
}
if *v.ScriptPubKeyDecoded.CommitAmt > bestAmount {
if len(v.ScriptPubKeyDecoded.Addresses) == 0 {
// jrick, does this need to be printed?
fmt.Errorf("unexpected addresses "+
"length: %v", ttxs[i].TxID)
continue
}
bestAddr = v.ScriptPubKeyDecoded.Addresses[0]
bestAmount = *v.ScriptPubKeyDecoded.CommitAmt
}
}

if bestAddr == "" || bestAmount == 0.0 {
addrs[ttxs[i].TxID] = largestCommitmentResult{
err: fmt.Errorf("no best commitment address found: %v",
ttxs[i].TxID),
}
continue
}
addrs[ttxs[i].TxID] = largestCommitmentResult{
bestAddr: bestAddr,
}
}

return addrs, nil
}
Loading

0 comments on commit 381af10

Please sign in to comment.