Skip to content

Commit

Permalink
Merge branch 'master' into re-gius/hide-type-info-for-nonce
Browse files Browse the repository at this point in the history
  • Loading branch information
re-gius authored Nov 25, 2024
2 parents d328a23 + c422d8b commit b4c048c
Show file tree
Hide file tree
Showing 16 changed files with 155 additions and 52 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build-misc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ jobs:
forklift cargo check -p rococo-runtime
forklift cargo check -p polkadot-test-runtime
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down Expand Up @@ -73,7 +73,7 @@ jobs:
cd ./substrate/bin/utils/subkey
forklift cargo build --locked --release
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/check-frame-omni-bencher.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
forklift cargo build --locked --quiet --release -p asset-hub-westend-runtime --features runtime-benchmarks
forklift cargo run --locked --release -p frame-omni-bencher --quiet -- v1 benchmark pallet --runtime target/release/wbuild/asset-hub-westend-runtime/asset_hub_westend_runtime.compact.compressed.wasm --all --steps 2 --repeat 1 --quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down Expand Up @@ -99,7 +99,7 @@ jobs:
echo "Running command: $cmd"
eval "$cmd"
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/checks-quick.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
id: required
run: cargo +nightly fmt --all -- --check
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
cargo clippy --all-targets --locked --workspace --quiet
cargo clippy --all-targets --all-features --locked --workspace --quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand All @@ -62,7 +62,7 @@ jobs:
# experimental code may rely on try-runtime and vice-versa
forklift cargo check --locked --all --features try-runtime,experimental --quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down Expand Up @@ -91,7 +91,7 @@ jobs:
./check-features-variants.sh
cd -
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
env:
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down Expand Up @@ -69,7 +69,7 @@ jobs:
retention-days: 1
if-no-files-found: error
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/tests-linux-stable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
id: required
run: WASM_BUILD_NO_COLOR=1 forklift cargo test -p staging-node-cli --release --locked -- --ignored
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand All @@ -63,7 +63,7 @@ jobs:
id: required
run: forklift cargo nextest run --workspace --features runtime-benchmarks benchmark --locked --cargo-profile testnet --cargo-quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down Expand Up @@ -113,7 +113,7 @@ jobs:
if: ${{ matrix.partition == '1/3' }}
run: forklift cargo nextest run -p sp-api-test --features enable-staging-api --cargo-quiet
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down Expand Up @@ -155,7 +155,7 @@ jobs:
--filter-expr " !test(/all_security_features_work/) - test(/nonexistent_cache_dir/)" \
--partition count:${{ matrix.partition }} \
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' }}
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
Expand Down
8 changes: 8 additions & 0 deletions .github/workflows/tests-misc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -165,12 +165,14 @@ jobs:

- name: Download artifact (master run)
uses: actions/[email protected]
continue-on-error: true
with:
name: cargo-check-benches-master-${{ github.sha }}
path: ./artifacts/master

- name: Download artifact (current run)
uses: actions/[email protected]
continue-on-error: true
with:
name: cargo-check-benches-current-${{ github.sha }}
path: ./artifacts/current
Expand All @@ -183,6 +185,12 @@ jobs:
exit 0
fi
# fail if no artifacts
if [ ! -d ./artifacts/master ] || [ ! -d ./artifacts/current ]; then
echo "No artifacts found"
exit 1
fi
docker run --rm \
-v $PWD/artifacts/master:/artifacts/master \
-v $PWD/artifacts/current:/artifacts/current \
Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions cumulus/client/service/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ workspace = true

[dependencies]
futures = { workspace = true }
futures-timer = { workspace = true }

# Substrate
sc-client-api = { workspace = true, default-features = true }
Expand Down
9 changes: 7 additions & 2 deletions polkadot/runtime/common/src/paras_sudo_wrapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub use pallet::*;
use polkadot_primitives::Id as ParaId;
use polkadot_runtime_parachains::{
configuration, dmp, hrmp,
paras::{self, AssignCoretime, ParaGenesisArgs},
paras::{self, AssignCoretime, ParaGenesisArgs, ParaKind},
ParaLifecycle,
};

Expand Down Expand Up @@ -80,10 +80,15 @@ pub mod pallet {
genesis: ParaGenesisArgs,
) -> DispatchResult {
ensure_root(origin)?;

let assign_coretime = genesis.para_kind == ParaKind::Parachain;

polkadot_runtime_parachains::schedule_para_initialize::<T>(id, genesis)
.map_err(|_| Error::<T>::ParaAlreadyExists)?;

T::AssignCoretime::assign_coretime(id)?;
if assign_coretime {
T::AssignCoretime::assign_coretime(id)?;
}

Ok(())
}
Expand Down
16 changes: 13 additions & 3 deletions polkadot/xcm/pallet-xcm/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,10 @@ pub mod pallet {
let message: Xcm<()> = (*message).try_into().map_err(|()| Error::<T>::BadVersion)?;

let message_id = Self::send_xcm(interior, dest.clone(), message.clone())
.map_err(Error::<T>::from)?;
.map_err(|error| {
tracing::error!(target: "xcm::pallet_xcm::send", ?error, ?dest, ?message, "XCM send failed with error");
Error::<T>::from(error)
})?;
let e = Event::Sent { origin: origin_location, destination: dest, message, message_id };
Self::deposit_event(e);
Ok(message_id)
Expand Down Expand Up @@ -1800,7 +1803,10 @@ impl<T: Config> Pallet<T> {

if let Some(remote_xcm) = remote_xcm {
let (ticket, price) = validate_send::<T::XcmRouter>(dest.clone(), remote_xcm.clone())
.map_err(Error::<T>::from)?;
.map_err(|error| {
tracing::error!(target: "xcm::pallet_xcm::execute_xcm_transfer", ?error, ?dest, ?remote_xcm, "XCM validate_send failed with error");
Error::<T>::from(error)
})?;
if origin != Here.into_location() {
Self::charge_fees(origin.clone(), price.clone()).map_err(|error| {
tracing::error!(
Expand All @@ -1810,7 +1816,11 @@ impl<T: Config> Pallet<T> {
Error::<T>::FeesNotMet
})?;
}
let message_id = T::XcmRouter::deliver(ticket).map_err(Error::<T>::from)?;
let message_id = T::XcmRouter::deliver(ticket)
.map_err(|error| {
tracing::error!(target: "xcm::pallet_xcm::execute_xcm_transfer", ?error, ?dest, ?remote_xcm, "XCM deliver failed with error");
Error::<T>::from(error)
})?;

let e = Event::Sent { origin, destination: dest, message: remote_xcm, message_id };
Self::deposit_event(e);
Expand Down
2 changes: 1 addition & 1 deletion polkadot/xcm/xcm-builder/src/process_xcm_message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ impl<
let message = Xcm::<Call>::try_from(versioned_message).map_err(|_| {
log::trace!(
target: LOG_TARGET,
"Failed to convert `VersionedXcm` into `XcmV3`.",
"Failed to convert `VersionedXcm` into `xcm::prelude::Xcm`!",
);

ProcessMessageError::Unsupported
Expand Down
10 changes: 10 additions & 0 deletions prdoc/pr_6605.prdoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
title: Notify telemetry only every second about the tx pool status
doc:
- audience: Node Operator
description: |-
Before this was done for every imported transaction. When a lot of transactions got imported, the import notification channel was filled. The underlying problem was that the `status` call is read locking the `validated_pool` which will be write locked by the internal submitting logic. Thus, the submitting and status reading was interferring which each other.
crates:
- name: cumulus-client-service
bump: patch
- name: sc-service
bump: patch
58 changes: 41 additions & 17 deletions substrate/client/service/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use crate::{
start_rpc_servers, BuildGenesisBlock, GenesisBlockBuilder, RpcHandlers, SpawnTaskHandle,
TaskManager, TransactionPoolAdapter,
};
use futures::{future::ready, FutureExt, StreamExt};
use futures::{select, FutureExt, StreamExt};
use jsonrpsee::RpcModule;
use log::info;
use prometheus_endpoint::Registry;
Expand Down Expand Up @@ -90,7 +90,11 @@ use sp_consensus::block_validation::{
use sp_core::traits::{CodeExecutor, SpawnNamed};
use sp_keystore::KeystorePtr;
use sp_runtime::traits::{Block as BlockT, BlockIdTo, NumberFor, Zero};
use std::{str::FromStr, sync::Arc, time::SystemTime};
use std::{
str::FromStr,
sync::Arc,
time::{Duration, SystemTime},
};

/// Full client type.
pub type TFullClient<TBl, TRtApi, TExec> =
Expand Down Expand Up @@ -577,22 +581,42 @@ pub async fn propagate_transaction_notifications<Block, ExPool>(
Block: BlockT,
ExPool: MaintainedTransactionPool<Block = Block, Hash = <Block as BlockT>::Hash>,
{
const TELEMETRY_INTERVAL: Duration = Duration::from_secs(1);

// transaction notifications
transaction_pool
.import_notification_stream()
.for_each(move |hash| {
tx_handler_controller.propagate_transaction(hash);
let status = transaction_pool.status();
telemetry!(
telemetry;
SUBSTRATE_INFO;
"txpool.import";
"ready" => status.ready,
"future" => status.future,
);
ready(())
})
.await;
let mut notifications = transaction_pool.import_notification_stream().fuse();
let mut timer = futures_timer::Delay::new(TELEMETRY_INTERVAL).fuse();
let mut tx_imported = false;

loop {
select! {
notification = notifications.next() => {
let Some(hash) = notification else { return };

tx_handler_controller.propagate_transaction(hash);

tx_imported = true;
},
_ = timer => {
timer = futures_timer::Delay::new(TELEMETRY_INTERVAL).fuse();

if !tx_imported {
continue;
}

tx_imported = false;
let status = transaction_pool.status();

telemetry!(
telemetry;
SUBSTRATE_INFO;
"txpool.import";
"ready" => status.ready,
"future" => status.future,
);
}
}
}
}

/// Initialize telemetry with provided configuration and return telemetry handle
Expand Down
15 changes: 13 additions & 2 deletions substrate/frame/support/procedural/src/runtime/parse/pallet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use crate::{
};
use frame_support_procedural_tools::get_doc_literals;
use quote::ToTokens;
use syn::{punctuated::Punctuated, token, Error};
use syn::{punctuated::Punctuated, spanned::Spanned, token, Error};

impl Pallet {
pub fn try_from(
Expand Down Expand Up @@ -78,7 +78,18 @@ impl Pallet {
})
.collect();

let cfg_pattern = vec![];
let cfg_pattern = item
.attrs
.iter()
.filter(|attr| attr.path().segments.first().map_or(false, |s| s.ident == "cfg"))
.map(|attr| {
attr.parse_args_with(|input: syn::parse::ParseStream| {
let input = input.parse::<proc_macro2::TokenStream>()?;
cfg_expr::Expression::parse(&input.to_string())
.map_err(|e| syn::Error::new(attr.span(), e.to_string()))
})
})
.collect::<syn::Result<Vec<_>>>()?;

let docs = get_doc_literals(&item.attrs);

Expand Down
Loading

0 comments on commit b4c048c

Please sign in to comment.