Skip to content

Commit

Permalink
fixup! lightning: move no_std conditional into main lib
Browse files Browse the repository at this point in the history
  • Loading branch information
GeneFerneau committed Mar 22, 2021
1 parent dd9f430 commit 74d5bb9
Show file tree
Hide file tree
Showing 30 changed files with 137 additions and 84 deletions.
3 changes: 2 additions & 1 deletion lightning/src/chain/chainmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ use util::logger::Logger;
use util::events;
use util::events::Event;

use crate::{HashMap, hash_map, ops::Deref};
use crate::{HashMap, hash_map};
use core::ops::Deref;
use std::sync::RwLock;

/// An implementation of [`chain::Watch`] for monitoring channels.
Expand Down
14 changes: 9 additions & 5 deletions lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,11 @@ use util::ser::{Readable, ReadableArgs, MaybeReadable, Writer, Writeable, U48};
use util::byte_utils;
use util::events::Event;

use crate::{HashMap, HashSet, cmp, hash_map, mem, ops::Deref};
#[cfg(feature = "no_std")]
#[macro_use]
use alloc::{boxed::Box, vec, vec::Vec};
use crate::{HashMap, HashSet, hash_map};
use core::{cmp, mem, ops::Deref};
use std::io::Error;
use std::sync::Mutex;

Expand Down Expand Up @@ -85,7 +89,7 @@ pub struct ChannelMonitorUpdate {
/// then we allow the `ChannelManager` to send a `ChannelMonitorUpdate` with this update ID,
/// with the update providing said payment preimage. No other update types are allowed after
/// force-close.
pub const CLOSED_CHANNEL_UPDATE_ID: u64 = crate::u64::MAX;
pub const CLOSED_CHANNEL_UPDATE_ID: u64 = ::core::u64::MAX;

impl Writeable for ChannelMonitorUpdate {
fn write<W: Writer>(&self, w: &mut W) -> Result<(), ::std::io::Error> {
Expand All @@ -101,7 +105,7 @@ impl Readable for ChannelMonitorUpdate {
fn read<R: ::std::io::Read>(r: &mut R) -> Result<Self, DecodeError> {
let update_id: u64 = Readable::read(r)?;
let len: u64 = Readable::read(r)?;
let mut updates = Vec::with_capacity(cmp::min(len as usize, MAX_ALLOC_SIZE / ::std::mem::size_of::<ChannelMonitorUpdateStep>()));
let mut updates = Vec::with_capacity(cmp::min(len as usize, MAX_ALLOC_SIZE / ::core::mem::size_of::<ChannelMonitorUpdateStep>()));
for _ in 0..len {
updates.push(Readable::read(r)?);
}
Expand Down Expand Up @@ -1819,7 +1823,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {

for &(ref htlc, _, _) in holder_tx.htlc_outputs.iter() {
if let Some(transaction_output_index) = htlc.transaction_output_index {
claim_requests.push(ClaimRequest { absolute_timelock: crate::u32::MAX, aggregable: false, outpoint: BitcoinOutPoint { txid: holder_tx.txid, vout: transaction_output_index as u32 },
claim_requests.push(ClaimRequest { absolute_timelock: ::core::u32::MAX, aggregable: false, outpoint: BitcoinOutPoint { txid: holder_tx.txid, vout: transaction_output_index as u32 },
witness_data: InputMaterial::HolderHTLC {
preimage: if !htlc.offered {
if let Some(preimage) = self.payment_preimages.get(&htlc.payment_hash) {
Expand Down Expand Up @@ -2367,7 +2371,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
fn is_paying_spendable_output<L: Deref>(&mut self, tx: &Transaction, height: u32, logger: &L) where L::Target: Logger {
let mut spendable_output = None;
for (i, outp) in tx.output.iter().enumerate() { // There is max one spendable output for any channel tx, including ones generated by us
if i > crate::u16::MAX as usize {
if i > ::core::u16::MAX as usize {
// While it is possible that an output exists on chain which is greater than the
// 2^16th output in a given transaction, this is only possible if the output is not
// in a lightning transaction and was instead placed there by some third party who
Expand Down
10 changes: 7 additions & 3 deletions lightning/src/chain/keysinterface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,11 @@ use ln::chan_utils;
use ln::chan_utils::{HTLCOutputInCommitment, make_funding_redeemscript, ChannelPublicKeys, HolderCommitmentTransaction, ChannelTransactionParameters, CommitmentTransaction};
use ln::msgs::UnsignedChannelAnnouncement;

use crate::{HashSet, atomic::{AtomicUsize, Ordering}};
#[cfg(feature = "no_std")]
#[macro_use]
use alloc::{vec, vec::Vec};
use crate::HashSet;
use core::sync::atomic::{AtomicUsize, Ordering};
use std::io::Error;
use ln::msgs::{DecodeError, MAX_VALUE_MSAT};

Expand Down Expand Up @@ -832,7 +836,7 @@ impl KeysManager {
/// onchain output detection for which a corresponding delayed_payment_key must be derived.
pub fn derive_channel_keys(&self, channel_value_satoshis: u64, params: &[u8; 32]) -> InMemorySigner {
let chan_id = byte_utils::slice_to_be64(&params[0..8]);
assert!(chan_id <= crate::u32::MAX as u64); // Otherwise the params field wasn't created by us
assert!(chan_id <= core::u32::MAX as u64); // Otherwise the params field wasn't created by us
let mut unique_start = Sha256::engine();
unique_start.input(params);
unique_start.input(&self.seed);
Expand Down Expand Up @@ -1014,7 +1018,7 @@ impl KeysInterface for KeysManager {

fn get_channel_signer(&self, _inbound: bool, channel_value_satoshis: u64) -> Self::Signer {
let child_ix = self.channel_child_index.fetch_add(1, Ordering::AcqRel);
assert!(child_ix <= crate::u32::MAX as usize);
assert!(child_ix <= core::u32::MAX as usize);
let mut id = [0; 32];
id[0..8].copy_from_slice(&byte_utils::be64_to_array(child_ix as u64));
id[8..16].copy_from_slice(&byte_utils::be64_to_array(self.starting_time_nanos as u64));
Expand Down
7 changes: 5 additions & 2 deletions lightning/src/chain/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@

//! Structs and traits which allow other parts of rust-lightning to interact with the blockchain.

#[cfg(feature = "no_std")]
use alloc::vec::Vec;

use bitcoin::blockdata::block::{Block, BlockHeader};
use bitcoin::blockdata::script::Script;
use bitcoin::blockdata::transaction::TxOut;
Expand Down Expand Up @@ -137,7 +140,7 @@ pub trait Filter: Send + Sync {
fn register_output(&self, outpoint: &OutPoint, script_pubkey: &Script);
}

impl<T: Listen> Listen for crate::ops::Deref<Target = T> {
impl<T: Listen> Listen for core::ops::Deref<Target = T> {
fn block_connected(&self, block: &Block, height: u32) {
(**self).block_connected(block, height);
}
Expand All @@ -147,7 +150,7 @@ impl<T: Listen> Listen for crate::ops::Deref<Target = T> {
}
}

impl<T: crate::ops::Deref, U: crate::ops::Deref> Listen for (T, U)
impl<T: core::ops::Deref, U: core::ops::Deref> Listen for (T, U)
where
T::Target: Listen,
U::Target: Listen,
Expand Down
8 changes: 2 additions & 6 deletions lightning/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

#![cfg_attr(not(any(feature = "fuzztarget", feature = "_test_utils")), deny(missing_docs))]
#![cfg_attr(not(any(test, feature = "fuzztarget", feature = "_test_utils")), forbid(unsafe_code))]
#![cfg_attr(feature = "no_std", no_std)]

// In general, rust is absolutely horrid at supporting users doing things like,
// for example, compiling Rust code for real environments. Disable useless lints
Expand All @@ -35,14 +36,9 @@ extern crate bitcoin;
#[cfg(any(test, feature = "fuzztarget", feature = "_test_utils"))] extern crate regex;

#[cfg(feature = "no_std")] extern crate alloc;
#[cfg(feature = "no_std")] extern crate core;
#[cfg(not(feature = "no_std"))] extern crate core;
#[cfg(feature = "no_std")] extern crate hashbrown;

#[cfg(feature = "no_std")]
use core::{cell, cmp, default, fmt, hash, iter, marker, mem, ops, sync::atomic, time, u16, u32, u64, usize};
#[cfg(not(feature = "no_std"))]
use std::{cell, cmp, default, fmt, hash, iter, marker, mem, ops, sync::atomic, time, u16, u32, u64, usize};

#[cfg(feature = "no_std")]
use hashbrown::{HashMap, HashSet, hash_map};
#[cfg(feature = "no_std")]
Expand Down
5 changes: 4 additions & 1 deletion lightning/src/ln/chan_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,16 @@ use bitcoin::secp256k1::{Secp256k1, Signature, Message};
use bitcoin::secp256k1::Error as SecpError;
use bitcoin::secp256k1;

use crate::{cmp, ops::Deref};
use core::{cmp, ops::Deref};
use ln::chan_utils;
use util::transaction_utils::sort_outputs;
use ln::channel::INITIAL_COMMITMENT_NUMBER;
use std::io::Read;
use chain;

#[cfg(feature = "no_std")]
use alloc::vec::Vec;

const HTLC_OUTPUT_IN_COMMITMENT_SIZE: usize = 1 + 8 + 4 + 32 + 5;

pub(crate) const MAX_HTLCS: u16 = 483;
Expand Down
18 changes: 11 additions & 7 deletions lightning/src/ln/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,16 @@ use util::logger::Logger;
use util::errors::APIError;
use util::config::{UserConfig,ChannelConfig};

use crate::{cmp, mem, fmt, ops::Deref};
use core::{cmp, mem, fmt, ops::Deref};
#[cfg(any(test, feature = "fuzztarget"))]
use std::sync::Mutex;
use bitcoin::hashes::hex::ToHex;
use bitcoin::blockdata::opcodes::all::OP_PUSHBYTES_0;

#[cfg(feature = "no_std")]
#[macro_use]
use alloc::{format, boxed::Box, string::String, vec, vec::Vec};

#[cfg(test)]
pub struct ChannelValueStat {
pub value_to_self_msat: u64,
Expand Down Expand Up @@ -1189,7 +1193,7 @@ impl<Signer: Sign> Channel<Signer> {
// on-chain ChannelsMonitors during block rescan. Ideally we'd figure out a way to drop
// these, but for now we just have to treat them as normal.

let mut pending_idx = crate::usize::MAX;
let mut pending_idx = core::usize::MAX;
for (idx, htlc) in self.pending_inbound_htlcs.iter().enumerate() {
if htlc.htlc_id == htlc_id_arg {
assert_eq!(htlc.payment_hash, payment_hash_calc);
Expand All @@ -1212,7 +1216,7 @@ impl<Signer: Sign> Channel<Signer> {
break;
}
}
if pending_idx == crate::usize::MAX {
if pending_idx == core::usize::MAX {
return Err(ChannelError::Ignore("Unable to find a pending HTLC which matched the given HTLC ID".to_owned()));
}

Expand Down Expand Up @@ -1311,7 +1315,7 @@ impl<Signer: Sign> Channel<Signer> {
// on-chain ChannelsMonitors during block rescan. Ideally we'd figure out a way to drop
// these, but for now we just have to treat them as normal.

let mut pending_idx = crate::usize::MAX;
let mut pending_idx = core::usize::MAX;
for (idx, htlc) in self.pending_inbound_htlcs.iter().enumerate() {
if htlc.htlc_id == htlc_id_arg {
match htlc.state {
Expand All @@ -1328,7 +1332,7 @@ impl<Signer: Sign> Channel<Signer> {
pending_idx = idx;
}
}
if pending_idx == crate::usize::MAX {
if pending_idx == core::usize::MAX {
return Err(ChannelError::Ignore("Unable to find a pending HTLC which matched the given HTLC ID".to_owned()));
}

Expand Down Expand Up @@ -4281,8 +4285,8 @@ impl<Signer: Sign> Writeable for Channel<Signer> {

let mut key_data = VecWriter(Vec::new());
self.holder_signer.write(&mut key_data)?;
assert!(key_data.0.len() < crate::usize::MAX);
assert!(key_data.0.len() < crate::u32::MAX as usize);
assert!(key_data.0.len() < core::usize::MAX);
assert!(key_data.0.len() < core::u32::MAX as usize);
(key_data.0.len() as u32).write(writer)?;
writer.write_all(&key_data.0[..])?;

Expand Down
13 changes: 7 additions & 6 deletions lightning/src/ln/channelmanager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,16 @@ use util::chacha20::{ChaCha20, ChaChaReader};
use util::logger::Logger;
use util::errors::APIError;

use crate::{
HashMap,
HashSet,
#[cfg(feature = "no_std")]
#[macro_use]
use alloc::{format, vec, vec::Vec};
use crate::{HashMap, HashSet, hash_map};
use core::{
cmp,
hash_map,
mem,
marker::{Sync, Send},
ops::Deref,
atomic::{AtomicUsize, Ordering},
sync::atomic::{AtomicUsize, Ordering},
time::Duration,
};
use std::io::{Cursor, Read};
Expand Down Expand Up @@ -1583,7 +1584,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
// be absurd. We ensure this by checking that at least 500 (our stated public contract on when
// broadcast_node_announcement panics) of the maximum-length addresses would fit in a 64KB
// message...
const HALF_MESSAGE_IS_ADDRS: u32 = crate::u16::MAX as u32 / (NetAddress::MAX_LEN as u32 + 1) / 2;
const HALF_MESSAGE_IS_ADDRS: u32 = core::u16::MAX as u32 / (NetAddress::MAX_LEN as u32 + 1) / 2;
#[deny(const_err)]
#[allow(dead_code)]
// ...by failing to compile if the number of addresses that would be half of a message is
Expand Down
23 changes: 9 additions & 14 deletions lightning/src/ln/features.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,14 @@
//! [`Features`]: struct.Features.html
//! [`Context`]: sealed/trait.Context.html

use crate::{cmp, fmt, marker::PhantomData};
use core::{cmp, fmt, marker::PhantomData};

use ln::msgs::DecodeError;
use util::ser::{Readable, Writeable, Writer};

#[cfg(feature = "no_std")]
use alloc::vec::Vec;

mod sealed {
use ln::features::Features;

Expand Down Expand Up @@ -197,42 +200,34 @@ mod sealed {

/// Returns whether the feature is required by the given flags.
#[inline]
fn requires_feature(flags: &Vec<u8>) -> bool {
fn requires_feature(flags: &[u8]) -> bool {
flags.len() > Self::BYTE_OFFSET &&
(flags[Self::BYTE_OFFSET] & Self::REQUIRED_MASK) != 0
}

/// Returns whether the feature is supported by the given flags.
#[inline]
fn supports_feature(flags: &Vec<u8>) -> bool {
fn supports_feature(flags: &[u8]) -> bool {
flags.len() > Self::BYTE_OFFSET &&
(flags[Self::BYTE_OFFSET] & (Self::REQUIRED_MASK | Self::OPTIONAL_MASK)) != 0
}

/// Sets the feature's required (even) bit in the given flags.
#[inline]
fn set_required_bit(flags: &mut Vec<u8>) {
if flags.len() <= Self::BYTE_OFFSET {
flags.resize(Self::BYTE_OFFSET + 1, 0u8);
}

fn set_required_bit(flags: &mut [u8]) {
flags[Self::BYTE_OFFSET] |= Self::REQUIRED_MASK;
}

/// Sets the feature's optional (odd) bit in the given flags.
#[inline]
fn set_optional_bit(flags: &mut Vec<u8>) {
if flags.len() <= Self::BYTE_OFFSET {
flags.resize(Self::BYTE_OFFSET + 1, 0u8);
}

fn set_optional_bit(flags: &mut [u8]) {
flags[Self::BYTE_OFFSET] |= Self::OPTIONAL_MASK;
}

/// Clears the feature's required (even) and optional (odd) bits from the given
/// flags.
#[inline]
fn clear_bits(flags: &mut Vec<u8>) {
fn clear_bits(flags: &mut [u8]) {
if flags.len() > Self::BYTE_OFFSET {
flags[Self::BYTE_OFFSET] &= !Self::REQUIRED_MASK;
flags[Self::BYTE_OFFSET] &= !Self::OPTIONAL_MASK;
Expand Down
3 changes: 2 additions & 1 deletion lightning/src/ln/functional_test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ use bitcoin::secp256k1::key::PublicKey;

use std::rc::Rc;
use std::sync::Mutex;
use crate::{HashMap, cell::RefCell, mem};
use core::{cell::RefCell, mem};
use crate::HashMap;

pub const CHAN_CONFIRM_DEPTH: u32 = 100;

Expand Down
3 changes: 2 additions & 1 deletion lightning/src/ln/functional_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ use bitcoin::secp256k1::key::{PublicKey,SecretKey};
use regex;

use std::sync::Mutex;
use crate::{BTreeSet, HashMap, HashSet, atomic::Ordering, default::Default};
use core::{sync::atomic::Ordering, default::Default};
use crate::{BTreeSet, HashMap, HashSet};

use ln::functional_test_utils::*;
use ln::chan_utils::CommitmentTransaction;
Expand Down
5 changes: 4 additions & 1 deletion lightning/src/ln/msgs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@ use bitcoin::hash_types::{Txid, BlockHash};

use ln::features::{ChannelFeatures, InitFeatures, NodeFeatures};

use crate::{cmp, fmt::{self, Debug}};
#[cfg(feature = "no_std")]
#[macro_use]
use alloc::{vec, vec::Vec};
use core::{cmp, fmt::{self, Debug}};
use std::io::Read;

use util::events::MessageSendEventsProvider;
Expand Down
13 changes: 9 additions & 4 deletions lightning/src/ln/onchaintx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,12 @@ use util::logger::Logger;
use util::ser::{Readable, ReadableArgs, Writer, Writeable, VecWriter};
use util::byte_utils;

use crate::{HashMap, hash_map, cmp, mem::replace, ops::Deref};
use crate::{HashMap, hash_map};
use core::{cmp, mem::replace, ops::Deref};

#[cfg(feature = "no_std")]
#[macro_use]
use alloc::{vec, vec::Vec};

const MAX_ALLOC_SIZE: usize = 64*1024;

Expand Down Expand Up @@ -296,8 +301,8 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {

let mut key_data = VecWriter(Vec::new());
self.signer.write(&mut key_data)?;
assert!(key_data.0.len() < crate::usize::MAX);
assert!(key_data.0.len() < crate::u32::MAX as usize);
assert!(key_data.0.len() < core::usize::MAX);
assert!(key_data.0.len() < core::u32::MAX as usize);
(key_data.0.len() as u32).write(writer)?;
writer.write_all(&key_data.0[..])?;

Expand Down Expand Up @@ -689,7 +694,7 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
log_trace!(logger, "Updating claims view at height {} with {} matched transactions and {} claim requests", height, txn_matched.len(), claimable_outpoints.len());
let mut new_claims = Vec::new();
let mut aggregated_claim = HashMap::new();
let mut aggregated_soonest = crate::u32::MAX;
let mut aggregated_soonest = core::u32::MAX;

// Try to aggregate outputs if their timelock expiration isn't imminent (absolute_timelock
// <= CLTV_SHARED_CLAIM_BUFFER) and they don't require an immediate nLockTime (aggregable).
Expand Down
2 changes: 1 addition & 1 deletion lightning/src/ln/onion_route_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ use bitcoin::hashes::Hash;
use bitcoin::secp256k1::Secp256k1;
use bitcoin::secp256k1::key::SecretKey;

use crate::{atomic::Ordering, default::Default};
use core::{sync::atomic::Ordering, default::Default};
use std::io;

use ln::functional_test_utils::*;
Expand Down

0 comments on commit 74d5bb9

Please sign in to comment.