Skip to content

Commit

Permalink
Merge pull request #2199 from blockstack/feat/clarity-db-unification
Browse files Browse the repository at this point in the history
Clarity DB Fixes
  • Loading branch information
kantai authored Dec 22, 2020
2 parents 56c77a3 + 6287a43 commit 9aa7549
Show file tree
Hide file tree
Showing 28 changed files with 1,058 additions and 848 deletions.
2 changes: 1 addition & 1 deletion .github/actions/bitcoin-int-tests/Dockerfile.bitcoin-tests
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ RUN cd / && tar -xvzf bitcoin-0.20.0-x86_64-linux-gnu.tar.gz
RUN ln -s /bitcoin-0.20.0/bin/bitcoind /bin/

ENV BITCOIND_TEST 1
RUN cd testnet/stacks-node && cargo test --release --features prod-genesis-chainstate -- --test-threads 1 --ignored
RUN cd testnet/stacks-node && cargo test -- --test-threads 1 --ignored
15 changes: 15 additions & 0 deletions .github/actions/bitcoin-int-tests/Dockerfile.large-genesis
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
FROM rust:buster

WORKDIR /src

COPY . .

RUN cargo test --no-run --workspace

RUN cd / && wget https://bitcoin.org/bin/bitcoin-core-0.20.0/bitcoin-0.20.0-x86_64-linux-gnu.tar.gz
RUN cd / && tar -xvzf bitcoin-0.20.0-x86_64-linux-gnu.tar.gz

RUN ln -s /bitcoin-0.20.0/bin/bitcoind /bin/

ENV BITCOIND_TEST 1
RUN cd testnet/stacks-node && cargo test --release --features prod-genesis-chainstate -- --test-threads 1 --ignored neon_integrations::bitcoind_integration_test
22 changes: 17 additions & 5 deletions .github/workflows/stacks-blockchain.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,22 @@ jobs:
status: STARTING
color: warning

# Run tests
test:
# Run full genesis test
full-genesis:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run bitcoin tests
- name: Single full genesis integration test
env:
DOCKER_BUILDKIT: 1
run: docker build -f ./.github/actions/bitcoin-int-tests/Dockerfile.large-genesis .

# Run sampled genesis tests
sampled-genesis:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: All integration tests with sampled genesis
env:
DOCKER_BUILDKIT: 1
run: docker build -f ./.github/actions/bitcoin-int-tests/Dockerfile.bitcoin-tests .
Expand Down Expand Up @@ -156,7 +166,8 @@ jobs:
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
needs:
- test
- sampled-genesis
- full-genesis
- dist
- build-publish
- build-publish-stretch
Expand Down Expand Up @@ -208,7 +219,8 @@ jobs:
runs-on: ubuntu-latest
needs:
- notify-start
- test
- sampled-genesis
- full-genesis
- dist
- build-publish
- build-publish-stretch
Expand Down
20 changes: 20 additions & 0 deletions src/address/c32.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,11 @@ fn c32_normalize(input_str: &str) -> String {
}

fn c32_decode(input_str: &str) -> Result<Vec<u8>, Error> {
// must be ASCII
if !input_str.is_ascii() {
return Err(Error::InvalidCrockford32);
}

let mut result = vec![];
let mut carry: u16 = 0;
let mut carry_bits = 0; // can be up to 5
Expand Down Expand Up @@ -173,6 +178,11 @@ fn c32_check_encode(version: u8, data: &[u8]) -> Result<String, Error> {
}

fn c32_check_decode(check_data_unsanitized: &str) -> Result<(u8, Vec<u8>), Error> {
// must be ASCII
if !check_data_unsanitized.is_ascii() {
return Err(Error::InvalidCrockford32);
}

if check_data_unsanitized.len() < 2 {
return Err(Error::InvalidCrockford32);
}
Expand Down Expand Up @@ -386,4 +396,14 @@ mod test {
assert_eq!(decoded_bytes, expected_bytes);
}
}

#[test]
fn test_ascii_only() {
match c32_address_decode("S\u{1D7D8}2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKPVKG2CE") {
Err(Error::InvalidCrockford32) => {}
_ => {
assert!(false);
}
}
}
}
1 change: 1 addition & 0 deletions src/blockstack_cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -770,6 +770,7 @@ fn main_handler(mut argv: Vec<String>) -> Result<String, CliError> {
"addresses" => get_addresses(args, tx_version),
"decode-tx" => decode_transaction(args, tx_version),
"decode-block" => decode_block(args, tx_version),
"decode-microblock" => decode_microblock(args, tx_version),
_ => Err(CliError::Usage),
}
} else {
Expand Down
10 changes: 4 additions & 6 deletions src/chainstate/burn/operations/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,12 +118,10 @@ impl fmt::Display for Error {
Error::BlockCommitBadModulus => {
write!(f, "Block commit included a bad burn block height modulus")
}
Error::MissedBlockCommit(_) => {
write!(
f,
"Block commit included in a burn block that was not intended"
)
}
Error::MissedBlockCommit(_) => write!(
f,
"Block commit included in a burn block that was not intended"
),
Error::LeaderKeyAlreadyRegistered => {
write!(f, "Leader key has already been registered")
}
Expand Down
31 changes: 0 additions & 31 deletions src/chainstate/coordinator/comm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,6 @@ pub struct CoordinatorChannels {
stacks_blocks_processed: Arc<AtomicU64>,
/// how many sortitions have been processed by this Coordinator thread since startup?
sortitions_processed: Arc<AtomicU64>,
/// Horrific kludgy clarity DB lock that should be removed once the Clarty DB doesn't
/// runtime-panic when two threads write to it.
kludgy_temporary_clarity_db_lock: Arc<Mutex<()>>,
}

/// Notification struct for communicating to
Expand All @@ -86,11 +83,6 @@ pub struct CoordinatorReceivers {
signal_wakeup: Arc<Condvar>,
pub stacks_blocks_processed: Arc<AtomicU64>,
pub sortitions_processed: Arc<AtomicU64>,

/// Temporary lock to prevent multiple threads from writing to the (shared) Clarity DB at
/// once. This can be removed once the Clarity DB doesn't runtime-panic when multiple threads
/// try to write to it.
kludgy_temporary_clarity_db_lock: Arc<Mutex<()>>,
}

/// Static struct used to hold all the static methods
Expand Down Expand Up @@ -131,16 +123,6 @@ impl CoordinatorReceivers {
}
signal_bools.receive_signal()
}

/// TODO: remove before mainnet
pub fn kludgy_clarity_db_lock(&self) -> LockResult<MutexGuard<'_, ()>> {
self.kludgy_temporary_clarity_db_lock.lock()
}

/// TODO: remove before mainnet
pub fn kludgy_clarity_db_trylock(&self) -> TryLockResult<MutexGuard<'_, ()>> {
self.kludgy_temporary_clarity_db_lock.try_lock()
}
}

impl CoordinatorChannels {
Expand Down Expand Up @@ -196,16 +178,6 @@ impl CoordinatorChannels {
}
return true;
}

/// TODO: remove before mainnet
pub fn kludgy_clarity_db_lock(&self) -> LockResult<MutexGuard<'_, ()>> {
self.kludgy_temporary_clarity_db_lock.lock()
}

/// TODO: remove before mainnet
pub fn kludgy_clarity_db_trylock(&self) -> TryLockResult<MutexGuard<'_, ()>> {
self.kludgy_temporary_clarity_db_lock.try_lock()
}
}

impl CoordinatorCommunication {
Expand All @@ -220,23 +192,20 @@ impl CoordinatorCommunication {

let stacks_blocks_processed = Arc::new(AtomicU64::new(0));
let sortitions_processed = Arc::new(AtomicU64::new(0));
let kludgy_clarity_db_lock = Arc::new(Mutex::new(()));

let senders = CoordinatorChannels {
signal_bools: signal_bools.clone(),
signal_wakeup: signal_wakeup.clone(),
stacks_blocks_processed: stacks_blocks_processed.clone(),

sortitions_processed: sortitions_processed.clone(),
kludgy_temporary_clarity_db_lock: kludgy_clarity_db_lock.clone(),
};

let rcvrs = CoordinatorReceivers {
signal_bools: signal_bools,
signal_wakeup: signal_wakeup,
stacks_blocks_processed,
sortitions_processed,
kludgy_temporary_clarity_db_lock: kludgy_clarity_db_lock.clone(),
};

(rcvrs, senders)
Expand Down
15 changes: 2 additions & 13 deletions src/chainstate/coordinator/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -289,19 +289,8 @@ impl<'a, T: BlockEventDispatcher>
match comms.wait_on() {
CoordinatorEvents::NEW_STACKS_BLOCK => {
debug!("Received new stacks block notice");
match comms.kludgy_clarity_db_lock() {
Ok(_) => {
if let Err(e) = inst.handle_new_stacks_block() {
warn!("Error processing new stacks block: {:?}", e);
}
}
Err(e) => {
error!(
"FATAL: kludgy temporary Clarity DB lock is poisoned: {:?}",
&e
);
panic!();
}
if let Err(e) = inst.handle_new_stacks_block() {
warn!("Error processing new stacks block: {:?}", e);
}
}
CoordinatorEvents::NEW_BURN_BLOCK => {
Expand Down
22 changes: 12 additions & 10 deletions src/chainstate/stacks/boot/contract_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,25 +119,27 @@ struct TestSimHeadersDB {
impl ClarityTestSim {
pub fn new() -> ClarityTestSim {
let mut marf = MarfedKV::temporary();
marf.begin(
&StacksBlockId::sentinel(),
&StacksBlockId(test_sim_height_to_hash(0)),
);
{
marf.as_clarity_db(&NULL_HEADER_DB, &NULL_BURN_STATE_DB)
let mut store = marf.begin(
&StacksBlockId::sentinel(),
&StacksBlockId(test_sim_height_to_hash(0)),
);

store
.as_clarity_db(&NULL_HEADER_DB, &NULL_BURN_STATE_DB)
.initialize();

let mut owned_env =
OwnedEnvironment::new(marf.as_clarity_db(&NULL_HEADER_DB, &NULL_BURN_STATE_DB));
OwnedEnvironment::new(store.as_clarity_db(&NULL_HEADER_DB, &NULL_BURN_STATE_DB));

for user_key in USER_KEYS.iter() {
owned_env.stx_faucet(
&StandardPrincipalData::from(user_key).into(),
USTX_PER_HOLDER,
);
}
store.test_commit();
}
marf.test_commit();

ClarityTestSim { marf, height: 0 }
}
Expand All @@ -146,7 +148,7 @@ impl ClarityTestSim {
where
F: FnOnce(&mut OwnedEnvironment) -> R,
{
self.marf.begin(
let mut store = self.marf.begin(
&StacksBlockId(test_sim_height_to_hash(self.height)),
&StacksBlockId(test_sim_height_to_hash(self.height + 1)),
);
Expand All @@ -156,11 +158,11 @@ impl ClarityTestSim {
height: self.height + 1,
};
let mut owned_env =
OwnedEnvironment::new(self.marf.as_clarity_db(&headers_db, &NULL_BURN_STATE_DB));
OwnedEnvironment::new(store.as_clarity_db(&headers_db, &NULL_BURN_STATE_DB));
f(&mut owned_env)
};

self.marf.test_commit();
store.test_commit();
self.height += 1;

r
Expand Down
Loading

0 comments on commit 9aa7549

Please sign in to comment.