Skip to content

Commit

Permalink
Merge pull request #1949 from blockstack/next
Browse files Browse the repository at this point in the history
Update master
  • Loading branch information
lgalabru authored Oct 6, 2020
2 parents 75e61de + 548c1da commit 5b816c2
Show file tree
Hide file tree
Showing 36 changed files with 1,564 additions and 422 deletions.
4 changes: 4 additions & 0 deletions src/burnchains/bitcoin/blocks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ impl BurnHeaderIPC for BitcoinHeaderIPC {
fn height(&self) -> u64 {
self.block_height
}

fn header_hash(&self) -> [u8; 32] {
self.block_header.header.bitcoin_hash().0
}
}

#[derive(Debug, Clone, PartialEq)]
Expand Down
63 changes: 55 additions & 8 deletions src/burnchains/burnchain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@
along with Blockstack. If not, see <http://www.gnu.org/licenses/>.
*/

use deps;
use deps::bitcoin::util::hash::Sha256dHash as BitcoinSha256dHash;

use std::fs;
use std::path::PathBuf;
use std::sync::mpsc::sync_channel;
Expand All @@ -43,7 +46,7 @@ use burnchains::{
use burnchains::db::BurnchainDB;

use burnchains::indexer::{
BurnBlockIPC, BurnchainBlockDownloader, BurnchainBlockParser, BurnchainIndexer,
BurnBlockIPC, BurnHeaderIPC, BurnchainBlockDownloader, BurnchainBlockParser, BurnchainIndexer,
};

use burnchains::bitcoin::address::address_type_to_version_byte;
Expand Down Expand Up @@ -791,9 +794,16 @@ impl Burnchain {
pub fn sync<I: BurnchainIndexer + 'static>(
&mut self,
comms: &CoordinatorChannels,
target_block_height_opt: Option<u64>,
max_blocks_opt: Option<u64>,
) -> Result<u64, burnchain_error> {
let mut indexer: I = self.make_indexer()?;
let chain_tip = self.sync_with_indexer(&mut indexer, comms.clone())?;
let chain_tip = self.sync_with_indexer(
&mut indexer,
comms.clone(),
target_block_height_opt,
max_blocks_opt,
)?;
Ok(chain_tip.block_height)
}

Expand Down Expand Up @@ -1006,12 +1016,16 @@ impl Burnchain {
}

/// Top-level burnchain sync.
/// Returns the burnchain block header for the new burnchain tip
/// Returns the burnchain block header for the new burnchain tip, which will be _at least_ as
/// high as target_block_height_opt (if given), or whatever is currently at the tip of the
/// burnchain DB.
/// If this method returns Err(burnchain_error::TrySyncAgain), then call this method again.
pub fn sync_with_indexer<I>(
&mut self,
indexer: &mut I,
coord_comm: CoordinatorChannels,
target_block_height_opt: Option<u64>,
max_blocks_opt: Option<u64>,
) -> Result<BurnchainBlockHeader, burnchain_error>
where
I: BurnchainIndexer + 'static,
Expand Down Expand Up @@ -1040,11 +1054,9 @@ impl Burnchain {
// get latest headers.
debug!("Sync headers from {}", sync_height);

let end_block = indexer.sync_headers(sync_height, None)?;
let mut start_block = match sync_height {
0 => 0,
_ => sync_height,
};
// fetch all headers, no matter what
let mut end_block = indexer.sync_headers(sync_height, None)?;
let mut start_block = sync_height;
if db_height < start_block {
start_block = db_height;
}
Expand All @@ -1053,6 +1065,41 @@ impl Burnchain {
"Sync'ed headers from {} to {}. DB at {}",
start_block, end_block, db_height
);

if let Some(target_block_height) = target_block_height_opt {
if target_block_height < end_block {
debug!(
"Will download up to max burn block height {}",
target_block_height
);
end_block = target_block_height;
}
}

if let Some(max_blocks) = max_blocks_opt {
if start_block + max_blocks < end_block {
debug!(
"Will download only {} blocks (up to block height {})",
max_blocks,
start_block + max_blocks
);
end_block = start_block + max_blocks;
}
}

if end_block < start_block {
// nothing to do -- go get the burnchain block data at that height
let mut hdrs = indexer.read_headers(end_block, end_block + 1)?;
if let Some(hdr) = hdrs.pop() {
debug!("Nothing to do; already have blocks up to {}", end_block);
let bhh =
BurnchainHeaderHash::from_bitcoin_hash(&BitcoinSha256dHash(hdr.header_hash()));
return burnchain_db
.get_burnchain_block(&bhh)
.map(|block_data| block_data.header);
}
}

if start_block == db_height && db_height == end_block {
// all caught up
return Ok(burn_chain_tip);
Expand Down
21 changes: 13 additions & 8 deletions src/burnchains/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@ use chainstate::burn::operations::BlockstackOperationType;

use chainstate::stacks::index::MarfTrieId;

use util::db::{query_row, query_rows, u64_to_sql, Error as DBError, FromColumn, FromRow};
use util::db::{
query_row, query_rows, tx_begin_immediate, tx_busy_handler, u64_to_sql, Error as DBError,
FromColumn, FromRow,
};

pub struct BurnchainDB {
conn: Connection,
Expand Down Expand Up @@ -185,10 +188,12 @@ impl BurnchainDB {
}
};

let mut db = BurnchainDB {
conn: Connection::open_with_flags(path, open_flags)
.expect(&format!("FAILED to open: {}", path)),
};
let conn = Connection::open_with_flags(path, open_flags)
.expect(&format!("FAILED to open: {}", path));

conn.busy_handler(Some(tx_busy_handler))?;

let mut db = BurnchainDB { conn };

if create_flag {
let db_tx = db.tx_begin()?;
Expand Down Expand Up @@ -216,14 +221,14 @@ impl BurnchainDB {
OpenFlags::SQLITE_OPEN_READ_ONLY
};
let conn = Connection::open_with_flags(path, open_flags)?;
conn.busy_handler(Some(tx_busy_handler))?;

Ok(BurnchainDB { conn })
}

fn tx_begin<'a>(&'a mut self) -> Result<BurnchainDBTransaction<'a>, BurnchainError> {
Ok(BurnchainDBTransaction {
sql_tx: self.conn.transaction()?,
})
let sql_tx = tx_begin_immediate(&mut self.conn)?;
Ok(BurnchainDBTransaction { sql_tx: sql_tx })
}

pub fn get_canonical_chain_tip(&self) -> Result<BurnchainBlockHeader, BurnchainError> {
Expand Down
1 change: 1 addition & 0 deletions src/burnchains/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ pub trait BurnHeaderIPC {

fn height(&self) -> u64;
fn header(&self) -> Self::H;
fn header_hash(&self) -> [u8; 32];
}

pub trait BurnBlockIPC {
Expand Down
2 changes: 1 addition & 1 deletion src/chainstate/burn/operations/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ impl From<db_error> for Error {

#[derive(Debug, PartialEq, Clone, Eq, Serialize, Deserialize)]
pub struct LeaderBlockCommitOp {
pub block_header_hash: BlockHeaderHash, // hash of Stacks block header (double-sha256)
pub block_header_hash: BlockHeaderHash, // hash of Stacks block header (sha512/256)

pub new_seed: VRFSeed, // new seed for this block
pub parent_block_ptr: u32, // block height of the block that contains the parent block hash
Expand Down
4 changes: 2 additions & 2 deletions src/chainstate/coordinator/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -432,8 +432,8 @@ impl<'a, T: BlockEventDispatcher, N: CoordinatorNotices, U: RewardSetProvider>
self.notifier.notify_sortition_processed();

debug!(
"Sortition processed: {} (tip {})",
&sortition_id, &next_snapshot.burn_header_hash
"Sortition processed: {} (tip {} height {})",
&sortition_id, &next_snapshot.burn_header_hash, next_snapshot.block_height
);

if sortition_tip_snapshot.block_height < header.block_height {
Expand Down
1 change: 1 addition & 0 deletions src/chainstate/coordinator/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1782,6 +1782,7 @@ fn preprocess_block(
&my_sortition.consensus_hash,
&block,
&parent_consensus_hash,
5,
)
.unwrap();
}
2 changes: 1 addition & 1 deletion src/chainstate/stacks/boot/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3202,7 +3202,7 @@ pub mod test {
})
.unwrap();

eprintln!("\ntenure: {}\nreward cycle: {}\nmin-uSTX: {}\naddrs: {:?}\ntotal_liquid_ustx: {}\ntotal-stacked: {}\ntotal-stacked next: {}\n",
eprintln!("\ntenure: {}\nreward cycle: {}\nmin-uSTX: {}\naddrs: {:?}\ntotal_liquid_ustx: {}\ntotal-stacked: {}\ntotal-stacked next: {}\n",
tenure_id, cur_reward_cycle, min_ustx, &reward_addrs, total_liquid_ustx, total_stacked, total_stacked_next);

if tenure_id <= 1 {
Expand Down
Loading

0 comments on commit 5b816c2

Please sign in to comment.