Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
5945bf8
fix(wallet): calculate asset lock tx fee dynamically based on input c…
lklimek Feb 24, 2026
e06b6ac
feat(ui): add sync status panel to wallet screen
lklimek Feb 24, 2026
87bd1b2
docs: add manual test scenarios for asset lock fee fix
lklimek Feb 24, 2026
76c8501
docs: add manual test scenarios for sync status panel
lklimek Feb 24, 2026
55b929e
Merge branch 'v1.0-dev' into zk-extract/asset-lock-fee-fix
lklimek Feb 24, 2026
b1a6c01
Initial plan
Copilot Feb 24, 2026
2740af1
fix(wallet): defer UTXO removal until asset lock tx is fully built an…
Copilot Feb 24, 2026
32c04e1
Merge remote-tracking branch 'origin/v1.0-dev' into zk-extract/sync-s…
lklimek Feb 24, 2026
68e23d2
Merge remote-tracking branch 'origin/v1.0-dev' into copilot/sub-pr-636
lklimek Feb 24, 2026
497bf21
refactor(wallet): consolidate UTXO removal, DB persistence, and balan…
lklimek Feb 24, 2026
333cb82
fix(wallet): address audit findings from PR #645 review
lklimek Feb 24, 2026
059e444
Merge branch 'v1.0-dev' into zk-extract/sync-status-panel
lklimek Feb 24, 2026
6159ef9
chore: minimal improvement in conn status tooltip
lklimek Feb 24, 2026
88c7cb8
Merge remote-tracking branch 'origin/v1.0-dev' into fix/audit-finding…
lklimek Feb 24, 2026
0b7d7ab
refactor(wallet): simplify remove_selected_utxos to take &Database + …
lklimek Feb 24, 2026
998219d
Merge remote-tracking branch 'origin/fix/audit-findings-645' into zk-…
lklimek Feb 24, 2026
4c7320e
fix(wallet): address remaining audit findings from code review
lklimek Feb 24, 2026
502aad2
Merge branch 'v1.0-dev' into zk-extract/sync-status-panel
lklimek Feb 24, 2026
6ffb0ee
Merge branch 'v1.0-dev' into zk-extract/sync-status-panel
lklimek Feb 24, 2026
9c8cb31
fix(ui): refresh sync info cache after platform balance fetch
lklimek Feb 25, 2026
0406d47
feat(ui): make sync status panel collapsible and dev-mode only
lklimek Feb 25, 2026
dafff57
fix(ui): address PR #642 review findings
lklimek Feb 25, 2026
cb8ceaf
refactor(ui): extract shared SPV phase summary and enrich tooltip
lklimek Feb 25, 2026
13a9891
style: apply nightly rustfmt formatting
lklimek Feb 25, 2026
0e7fe20
fix(ui): address second round of PR review comments
lklimek Feb 25, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
423 changes: 423 additions & 0 deletions docs/ai-design/2026-02-24-sync-status-panel/manual-test-scenarios.md

Large diffs are not rendered by default.

47 changes: 39 additions & 8 deletions src/backend_task/core/create_asset_lock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,32 @@ impl AppContext {

// Insert the transaction into waiting for finality
{
let mut proofs = self.transactions_waiting_for_finality.lock().unwrap();
let mut proofs = self
.transactions_waiting_for_finality
.lock()
.map_err(|e| e.to_string())?;
proofs.insert(tx_id, None);
}

// Broadcast the transaction
self.broadcast_raw_transaction(&asset_lock_transaction)
// Broadcast the transaction. If broadcast fails, the UTXOs have already
// been removed from the wallet (inside the transaction builder) but were
// never actually spent on-chain. The caller should handle refreshing
// the wallet so the next UTXO reload reconciles in-memory state with
// the chain. See also: https://github.com/dashpay/dash-evo-tool/issues/657
if let Err(e) = self
.broadcast_raw_transaction(&asset_lock_transaction)
.await
.map_err(|e| format!("Failed to broadcast asset lock transaction: {}", e))?;
{
// Clean up the finality tracking entry
if let Ok(mut proofs) = self.transactions_waiting_for_finality.lock() {
proofs.remove(&tx_id);
} else {
tracing::warn!(
"Failed to clean up finality tracking for tx {tx_id}: Mutex poisoned"
);
}
return Err(format!("Failed to broadcast asset lock transaction: {}", e));
}

Ok(BackendTaskSuccessResult::Message(format!(
"Asset lock transaction broadcast successfully. TX ID: {}",
Expand Down Expand Up @@ -77,14 +95,27 @@ impl AppContext {

// Insert the transaction into waiting for finality
{
let mut proofs = self.transactions_waiting_for_finality.lock().unwrap();
let mut proofs = self
.transactions_waiting_for_finality
.lock()
.map_err(|e| e.to_string())?;
proofs.insert(tx_id, None);
}

// Broadcast the transaction
self.broadcast_raw_transaction(&asset_lock_transaction)
// Broadcast the transaction (see registration path above for cleanup rationale)
if let Err(e) = self
.broadcast_raw_transaction(&asset_lock_transaction)
.await
.map_err(|e| format!("Failed to broadcast asset lock transaction: {}", e))?;
{
if let Ok(mut proofs) = self.transactions_waiting_for_finality.lock() {
proofs.remove(&tx_id);
} else {
tracing::warn!(
"Failed to clean up finality tracking for tx {tx_id}: Mutex poisoned"
);
}
return Err(format!("Failed to broadcast asset lock transaction: {}", e));
}

Ok(BackendTaskSuccessResult::Message(format!(
"Asset lock transaction broadcast successfully. TX ID: {}",
Expand Down
2 changes: 1 addition & 1 deletion src/backend_task/identity/top_up_identity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,7 @@ impl AppContext {
"Top-up fee mismatch: estimated {} vs actual {} (diff: {})",
estimated_fee,
actual_fee,
actual_fee as i64 - estimated_fee as i64
actual_fee as i128 - estimated_fee as i128
);
}
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ impl AppContext {
}
};

// Step 7: Get wallet, SDK, and derive a fresh change address if needed
// Step 6: Get wallet, SDK, and derive a fresh change address if needed
let (wallet, sdk, change_platform_address) = {
let wallet_arc = {
let wallets = self.wallets.read().unwrap();
Expand Down Expand Up @@ -191,7 +191,7 @@ impl AppContext {
(wallet, sdk, change_platform_address)
};

// Step 8: Fund the destination platform address
// Step 7: Fund the destination platform address
let mut outputs = std::collections::BTreeMap::new();

let fee_strategy = if fee_deduct_from_output {
Expand Down Expand Up @@ -239,7 +239,7 @@ impl AppContext {
.await
.map_err(|e| format!("Failed to fund platform address: {}", e))?;

// Step 9: Refresh platform address balances
// Step 8: Refresh platform address balances
self.fetch_platform_address_balances(seed_hash, PlatformSyncMode::Auto)
.await?;

Expand Down
97 changes: 88 additions & 9 deletions src/context/connection_status.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@ use crate::backend_task::BackendTaskSuccessResult;
use crate::backend_task::core::{CoreItem, CoreTask};
use crate::components::core_zmq_listener::ZMQConnectionEvent;
use crate::spv::{CoreBackendMode, SpvStatus};
use dash_sdk::dash_spv::sync::{ProgressPercentage, SyncProgress as SpvSyncProgress, SyncState};
use dash_sdk::dpp::dashcore::{ChainLock, Network};
use std::sync::Mutex;
use std::sync::atomic::{AtomicBool, AtomicU8, AtomicU16, Ordering};
use std::time::{Duration, Instant};

const REFRESH_CONNECTED: Duration = Duration::from_secs(10);
const REFRESH_DISCONNECTED: Duration = Duration::from_secs(2);
const REFRESH_CONNECTED: Duration = Duration::from_secs(4);
const REFRESH_DISCONNECTED: Duration = Duration::from_secs(1);

/// Three-state connection indicator matching the UI's red/orange/green circle.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
Expand Down Expand Up @@ -169,7 +170,7 @@ impl ConnectionStatus {
if total == 0 {
"No endpoints configured".to_string()
} else if available > 0 {
format!("Available ({available}/{total} endpoints)")
format!("Available ({available} unbanned / {total} total endpoints)")
} else {
format!("All {total} endpoints banned")
}
Expand Down Expand Up @@ -215,7 +216,12 @@ impl ConnectionStatus {
self.overall_state.store(state as u8, Ordering::Relaxed);
}

pub fn tooltip_text(&self) -> String {
/// Build the tooltip string for the connection indicator.
///
/// In SPV mode, fetches sync progress from the [`SpvManager`] to display
/// a detailed phase summary (e.g. `"SPV: Headers: 12345 / 27000 (45%)"`)
/// instead of the bare `"SPV: Syncing"`.
pub fn tooltip_text(&self, app_context: &crate::context::AppContext) -> String {
let backend_mode = self.backend_mode();
let disable_zmq = self.disable_zmq();
let spv_status = self.spv_status();
Expand Down Expand Up @@ -250,11 +256,21 @@ impl ConnectionStatus {
format!("{header}\n{rpc_status}\n{zmq_status}\n{dapi_status}")
}
CoreBackendMode::Spv => {
let spv_label = format!("SPV: {:?}", spv_status);
let header = match overall {
OverallConnectionState::Synced => "SPV synced",
OverallConnectionState::Syncing => "SPV syncing",
OverallConnectionState::Disconnected => "SPV disconnected",
OverallConnectionState::Synced => "Ready",
OverallConnectionState::Syncing => "Syncing",
OverallConnectionState::Disconnected => "Disconnected",
};
let spv_label = if spv_status == SpvStatus::Running {
"SPV: Synced".to_string()
} else {
app_context
.spv_manager()
.status()
.sync_progress
.as_ref()
.map(|p| format!("SPV: {}", spv_phase_summary(p)))
.unwrap_or_else(|| format!("SPV: {:?}", spv_status))
};
format!("{header}\n{spv_label}\n{dapi_status}")
}
Expand Down Expand Up @@ -318,7 +334,7 @@ impl ConnectionStatus {
}

pub fn trigger_refresh(&self, app_context: &crate::context::AppContext) -> AppAction {
// throttle updates to once every 2 seconds
// throttle updates based on connection state (1s disconnected, 4s connected)
let mut last_update = match self.last_update.lock() {
Ok(guard) => guard,
Err(poisoned) => poisoned.into_inner(),
Expand Down Expand Up @@ -387,6 +403,69 @@ impl ConnectionStatus {
}
}

/// Compact text summary of the active SPV sync phase.
///
/// Returns e.g. `"Headers: 12345 / 27000 (45%)"`, `"Masternodes: 800 / 2000 (40%)"`,
/// or `"syncing..."` if no phase is actively syncing.
///
/// Phases are checked in pipeline execution order (early → late) so the user
/// sees progression from headers through to blocks.
pub fn spv_phase_summary(progress: &SpvSyncProgress) -> String {
// Check phases in order of execution
if let Ok(headers) = progress.headers()
&& headers.state() == SyncState::Syncing
{
let (cur, tgt) = (headers.current_height(), headers.target_height());
return format!("Headers: {} / {} ({}%)", cur, tgt, pct(cur, tgt));
}

if let Ok(mn) = progress.masternodes()
&& mn.state() == SyncState::Syncing
{
let (cur, tgt) = (mn.current_height(), mn.target_height());
return format!("Masternodes: {} / {} ({}%)", cur, tgt, pct(cur, tgt));
}

if let Ok(fh) = progress.filter_headers()
&& fh.state() == SyncState::Syncing
{
let (cur, tgt) = (fh.current_height(), fh.target_height());
return format!("Filter Headers: {} / {} ({}%)", cur, tgt, pct(cur, tgt));
}

if let Ok(filters) = progress.filters()
&& filters.state() == SyncState::Syncing
{
let (cur, tgt) = (filters.current_height(), filters.target_height());
return format!("Filters: {} / {} ({}%)", cur, tgt, pct(cur, tgt));
}

if let Ok(blocks) = progress.blocks()
&& blocks.state() == SyncState::Syncing
{
// Blocks doesn't expose its own target_height; use the best available
// approximation: max of headers target and blocks last_processed.
let target = progress
.headers()
.ok()
.map(|h| h.target_height())
.unwrap_or(0)
.max(blocks.last_processed());
let cur = blocks.last_processed();
return format!("Blocks: {} / {} ({}%)", cur, target, pct(cur, target));
}

"syncing...".to_string()
}

fn pct(current: u32, target: u32) -> u32 {
if target == 0 {
0
} else {
((current as f64 / target as f64) * 100.0).clamp(0.0, 100.0) as u32
}
}

impl Default for ConnectionStatus {
fn default() -> Self {
Self::new()
Expand Down
27 changes: 19 additions & 8 deletions src/model/wallet/asset_lock_transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -366,22 +366,28 @@ impl Wallet {

// Next, collect the sighashes for each input since that's what we need from the
// cache
let sighashes: Vec<_> = tx
let sighashes: Result<Vec<_>, String> = tx
.input
.iter()
.enumerate()
.map(|(i, input)| {
let script_pubkey = utxos
.get(&input.previous_output)
.expect("expected a txout")
.ok_or_else(|| {
format!(
"UTXO not found in selected set for input {}",
input.previous_output
)
})?
.0
.script_pubkey
.clone();
cache
.legacy_signature_hash(i, &script_pubkey, sighash_u32)
.expect("expected sighash")
.map_err(|e| format!("Failed to compute sighash for input {}: {}", i, e))
})
.collect();
let sighashes = sighashes?;

// Now we can drop the cache to end the immutable borrow
#[allow(clippy::drop_non_drop)]
Expand All @@ -394,9 +400,13 @@ impl Wallet {
.zip(sighashes.into_iter())
.try_for_each(|(input, sighash)| {
// You need to provide the actual script_pubkey of the UTXO being spent
let (_, input_address) = check_utxos
.remove(&input.previous_output)
.expect("expected a txout");
let (_, input_address) =
check_utxos.remove(&input.previous_output).ok_or_else(|| {
format!(
"UTXO not found in selected set for input {}",
input.previous_output
)
})?;
let message = Message::from_digest(sighash.into());

let private_key = self
Expand Down Expand Up @@ -537,16 +547,17 @@ impl Wallet {

// Next, collect the sighashes for each input since that's what we need from the
// cache
let sighashes: Vec<_> = tx
let sighashes: Result<Vec<_>, String> = tx
.input
.iter()
.enumerate()
.map(|(i, _)| {
cache
.legacy_signature_hash(i, &previous_tx_output.script_pubkey, sighash_u32)
.expect("expected sighash")
.map_err(|e| format!("Failed to compute sighash for input {}: {}", i, e))
})
.collect();
let sighashes = sighashes?;

// Now we can drop the cache to end the immutable borrow
#[allow(clippy::drop_non_drop)]
Expand Down
2 changes: 1 addition & 1 deletion src/ui/components/top_panel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ fn add_connection_indicator(ui: &mut Ui, app_context: &Arc<AppContext>) -> AppAc
if overall != OverallConnectionState::Disconnected {
app_context.repaint_animation(ui.ctx());
}
let tip = status.tooltip_text();
let tip = status.tooltip_text(app_context);
let resp = resp.on_hover_text(tip);

if resp.clicked()
Expand Down
Loading
Loading