Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 1 addition & 8 deletions beacon_node/beacon_chain/src/beacon_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -501,7 +501,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
})?;

if let Some(block_root) = root {
Ok(self.store.get_item(&block_root)?)
Ok(self.store.get_block(&block_root)?)
} else {
Ok(None)
}
Expand Down Expand Up @@ -2656,13 +2656,6 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
}
}

/// Returns `true` if the given block root has not been processed.
pub fn is_new_block_root(&self, beacon_block_root: &Hash256) -> Result<bool, Error> {
Ok(!self
.store
.item_exists::<SignedBeaconBlock<T::EthSpec>>(beacon_block_root)?)
}

/// Dumps the entire canonical chain, from the head to genesis to a vector for analysis.
///
/// This could be a very expensive operation and should only be done in testing/analysis
Expand Down
7 changes: 2 additions & 5 deletions beacon_node/beacon_chain/src/beacon_fork_choice_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,7 @@ use ssz_derive::{Decode, Encode};
use std::marker::PhantomData;
use std::sync::Arc;
use store::{Error as StoreError, HotColdDB, ItemStore};
use types::{
BeaconBlock, BeaconState, BeaconStateError, Checkpoint, EthSpec, Hash256, SignedBeaconBlock,
Slot,
};
use types::{BeaconBlock, BeaconState, BeaconStateError, Checkpoint, EthSpec, Hash256, Slot};

#[derive(Debug)]
pub enum Error {
Expand Down Expand Up @@ -318,7 +315,7 @@ where
metrics::inc_counter(&metrics::BALANCES_CACHE_MISSES);
let justified_block = self
.store
.get_item::<SignedBeaconBlock<E>>(&self.justified_checkpoint.root)
.get_block(&self.justified_checkpoint.root)
.map_err(Error::FailedToReadBlock)?
.ok_or(Error::MissingBlock(self.justified_checkpoint.root))?
.deconstruct()
Expand Down
3 changes: 1 addition & 2 deletions beacon_node/beacon_chain/src/beacon_snapshot.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
use serde_derive::Serialize;
use ssz_derive::{Decode, Encode};
use types::{beacon_state::CloneConfig, BeaconState, EthSpec, Hash256, SignedBeaconBlock};

/// Represents some block and its associated state. Generally, this will be used for tracking the
/// head, justified head and finalized head.
#[derive(Clone, Serialize, PartialEq, Debug, Encode, Decode)]
#[derive(Clone, Serialize, PartialEq, Debug)]
pub struct BeaconSnapshot<E: EthSpec> {
pub beacon_block: SignedBeaconBlock<E>,
pub beacon_block_root: Hash256,
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/beacon_chain/src/block_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1125,7 +1125,7 @@ pub fn check_block_is_finalized_descendant<T: BeaconChainTypes, F: ForkChoiceSto
// 2. The parent is unknown to us, we probably want to download it since it might actually
// descend from the finalized root.
if store
.item_exists::<SignedBeaconBlock<T::EthSpec>>(&block.parent_root())
.block_exists(&block.parent_root())
.map_err(|e| BlockError::BeaconChainError(e.into()))?
{
Err(BlockError::NotFinalizedDescendant {
Expand Down
8 changes: 4 additions & 4 deletions beacon_node/beacon_chain/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ where
.ok_or("Fork choice not found in store")?;

let genesis_block = store
.get_item::<SignedBeaconBlock<TEthSpec>>(&chain.genesis_block_root)
.get_block(&chain.genesis_block_root)
.map_err(|e| format!("DB error when reading genesis block: {:?}", e))?
.ok_or("Genesis block not found in store")?;
let genesis_state = store
Expand Down Expand Up @@ -291,12 +291,12 @@ where
.put_state(&beacon_state_root, &beacon_state)
.map_err(|e| format!("Failed to store genesis state: {:?}", e))?;
store
.put_item(&beacon_block_root, &beacon_block)
.put_block(&beacon_block_root, beacon_block.clone())
.map_err(|e| format!("Failed to store genesis block: {:?}", e))?;

// Store the genesis block under the `ZERO_HASH` key.
store
.put_item(&Hash256::zero(), &beacon_block)
.put_block(&Hash256::zero(), beacon_block.clone())
.map_err(|e| {
format!(
"Failed to store genesis block under 0x00..00 alias: {:?}",
Expand Down Expand Up @@ -434,7 +434,7 @@ where
.map_err(|e| format!("Unable to get fork choice head: {:?}", e))?;

let head_block = store
.get_item::<SignedBeaconBlock<TEthSpec>>(&head_block_root)
.get_block(&head_block_root)
.map_err(|e| format!("DB error when reading head block: {:?}", e))?
.ok_or("Head block not found in store")?;
let head_state_root = head_block.state_root();
Expand Down
20 changes: 5 additions & 15 deletions beacon_node/beacon_chain/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,11 @@ use store::{config::StoreConfig, BlockReplay, HotColdDB, ItemStore, LevelDB, Mem
use tempfile::{tempdir, TempDir};
use tree_hash::TreeHash;
use types::{
init_fork_schedule, AggregateSignature, Attestation, AttestationData, AttesterSlashing,
BeaconState, BeaconStateHash, ChainSpec, Checkpoint, Domain, Epoch, EthSpec, ForkSchedule,
Graffiti, Hash256, IndexedAttestation, Keypair, ProposerSlashing, SelectionProof,
SignedAggregateAndProof, SignedBeaconBlock, SignedBeaconBlockHash, SignedRoot,
SignedVoluntaryExit, Slot, SubnetId, VariableList, VoluntaryExit,
AggregateSignature, Attestation, AttestationData, AttesterSlashing, BeaconState,
BeaconStateHash, ChainSpec, Checkpoint, Domain, Epoch, EthSpec, Graffiti, Hash256,
IndexedAttestation, Keypair, ProposerSlashing, SelectionProof, SignedAggregateAndProof,
SignedBeaconBlock, SignedBeaconBlockHash, SignedRoot, SignedVoluntaryExit, Slot, SubnetId,
VariableList, VoluntaryExit,
};

pub use types::test_utils::generate_deterministic_keypairs;
Expand Down Expand Up @@ -171,11 +171,6 @@ impl<E: EthSpec> BeaconChainHarness<EphemeralHarnessType<E>> {
store_config: StoreConfig,
chain_config: ChainConfig,
) -> Self {
//TODO: handle altair
init_fork_schedule(ForkSchedule {
altair_fork_slot: None,
});

let data_dir = tempdir().expect("should create temporary data_dir");
let mut spec = E::default_spec();

Expand Down Expand Up @@ -228,11 +223,6 @@ impl<E: EthSpec> BeaconChainHarness<DiskHarnessType<E>> {
store: Arc<HotColdDB<E, LevelDB<E>, LevelDB<E>>>,
validator_keypairs: Vec<Keypair>,
) -> Self {
//TODO: handle altair
init_fork_schedule(ForkSchedule {
altair_fork_slot: None,
});

let data_dir = tempdir().expect("should create temporary data_dir");
let spec = E::default_spec();

Expand Down
3 changes: 1 addition & 2 deletions beacon_node/client/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ use network::{NetworkConfig, NetworkMessage, NetworkService};
use slasher::Slasher;
use slasher_service::SlasherService;
use slog::{debug, info, warn};
use ssz::Decode;
use std::net::TcpListener;
use std::path::{Path, PathBuf};
use std::sync::Arc;
Expand Down Expand Up @@ -195,7 +194,7 @@ where
"Starting from known genesis state";
);

let genesis_state = BeaconState::from_ssz_bytes(&genesis_state_bytes)
let genesis_state = BeaconState::from_ssz_bytes(&genesis_state_bytes, &spec)
.map_err(|e| format!("Unable to parse genesis state SSZ: {:?}", e))?;

builder.genesis_state(genesis_state).map(|v| (v, None))?
Expand Down
12 changes: 9 additions & 3 deletions beacon_node/eth2_libp2p/src/rpc/codec/ssz_snappy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use std::io::ErrorKind;
use std::io::{Read, Write};
use std::marker::PhantomData;
use tokio_util::codec::{Decoder, Encoder};
use types::{EthSpec, SignedBeaconBlock};
use types::{EthSpec, SignedBeaconBlock, SignedBeaconBlockBase};
use unsigned_varint::codec::Uvi;

/* Inbound Codec */
Expand Down Expand Up @@ -293,12 +293,18 @@ impl<TSpec: EthSpec> Decoder for SSZSnappyOutboundCodec<TSpec> {
Protocol::Goodbye => Err(RPCError::InvalidData),
Protocol::BlocksByRange => match self.protocol.version {
Version::V1 => Ok(Some(RPCResponse::BlocksByRange(Box::new(
SignedBeaconBlock::from_ssz_bytes(&decoded_buffer)?,
// FIXME(altair): support Altair blocks
SignedBeaconBlock::Base(SignedBeaconBlockBase::from_ssz_bytes(
&decoded_buffer,
)?),
)))),
},
Protocol::BlocksByRoot => match self.protocol.version {
// FIXME(altair): support Altair blocks
Version::V1 => Ok(Some(RPCResponse::BlocksByRoot(Box::new(
SignedBeaconBlock::from_ssz_bytes(&decoded_buffer)?,
SignedBeaconBlock::Base(SignedBeaconBlockBase::from_ssz_bytes(
&decoded_buffer,
)?),
)))),
},
Protocol::Ping => match self.protocol.version {
Expand Down
9 changes: 6 additions & 3 deletions beacon_node/eth2_libp2p/src/types/pubsub.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::io::{Error, ErrorKind};
use types::SubnetId;
use types::{
Attestation, AttesterSlashing, EthSpec, ProposerSlashing, SignedAggregateAndProof,
SignedBeaconBlock, SignedVoluntaryExit,
SignedBeaconBlock, SignedBeaconBlockBase, SignedVoluntaryExit,
};

#[derive(Debug, Clone, PartialEq)]
Expand Down Expand Up @@ -141,8 +141,11 @@ impl<T: EthSpec> PubsubMessage<T> {
))))
}
GossipKind::BeaconBlock => {
let beacon_block = SignedBeaconBlock::from_ssz_bytes(data)
.map_err(|e| format!("{:?}", e))?;
// FIXME(altair): support Altair blocks
let beacon_block = SignedBeaconBlock::Base(
SignedBeaconBlockBase::from_ssz_bytes(data)
.map_err(|e| format!("{:?}", e))?,
);
Ok(PubsubMessage::BeaconBlock(Box::new(beacon_block)))
}
GossipKind::VoluntaryExit => {
Expand Down
10 changes: 4 additions & 6 deletions beacon_node/http_api/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ use tokio::sync::mpsc::UnboundedSender;
use tokio_stream::{wrappers::BroadcastStream, StreamExt};
use types::{
Attestation, AttesterSlashing, CommitteeCache, Epoch, EthSpec, ProposerSlashing, RelativeEpoch,
SignedAggregateAndProof, SignedBeaconBlock, SignedVoluntaryExit, Slot, YamlConfig,
SignedAggregateAndProof, SignedBeaconBlock, SignedVoluntaryExit, Slot, StandardConfig,
};
use warp::http::StatusCode;
use warp::sse::Event;
Expand Down Expand Up @@ -1274,11 +1274,9 @@ pub fn serve<T: BeaconChainTypes>(
.and(chain_filter.clone())
.and_then(|chain: Arc<BeaconChain<T>>| {
blocking_json_task(move || {
Ok(api_types::GenericResponse::from(YamlConfig::from_spec::<
T::EthSpec,
>(
&chain.spec
)))
Ok(api_types::GenericResponse::from(
StandardConfig::from_chain_spec::<T::EthSpec>(&chain.spec),
))
})
});

Expand Down
8 changes: 6 additions & 2 deletions beacon_node/http_api/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -970,7 +970,11 @@ impl ApiTester {
.map(|res| res.data);
assert_eq!(json_result, expected, "{:?}", block_id);

let ssz_result = self.client.get_beacon_blocks_ssz(block_id).await.unwrap();
let ssz_result = self
.client
.get_beacon_blocks_ssz(block_id, &harness.chain.spec)
.await
.unwrap();
assert_eq!(ssz_result, expected, "{:?}", block_id);
}

Expand Down Expand Up @@ -1214,7 +1218,7 @@ impl ApiTester {
pub async fn test_get_config_spec(self) -> Self {
let result = self.client.get_config_spec().await.unwrap().data;

let expected = YamlConfig::from_spec::<E>(&self.chain.spec);
let expected = StandardConfig::from_spec::<E>(&self.chain.spec);

assert_eq!(result, expected);

Expand Down
45 changes: 33 additions & 12 deletions beacon_node/store/src/hot_cold_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@ use crate::chunked_vector::{
};
use crate::config::{OnDiskStoreConfig, StoreConfig};
use crate::forwards_iter::HybridForwardsBlockRootsIterator;
use crate::impls::beacon_state::{get_full_state, store_full_state};
use crate::impls::{
beacon_block_as_kv_store_op,
beacon_state::{get_full_state, store_full_state},
};
use crate::iter::{ParentRootBlockIterator, StateRootsIterator};
use crate::leveldb_store::BytesKey;
use crate::leveldb_store::LevelDB;
Expand Down Expand Up @@ -235,7 +238,8 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
block: SignedBeaconBlock<E>,
) -> Result<(), Error> {
// Store on disk.
self.hot_db.put(block_root, &block)?;
self.hot_db
.do_atomically(vec![beacon_block_as_kv_store_op(block_root, &block)])?;

// Update cache.
self.block_cache.lock().put(*block_root, block);
Expand All @@ -254,20 +258,34 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
}

// Fetch from database.
match self.hot_db.get::<SignedBeaconBlock<E>>(block_root)? {
Some(block) => {
match self
.hot_db
.get_bytes(DBColumn::BeaconBlock.into(), block_root.as_bytes())?
{
Some(block_bytes) => {
// Deserialize.
let block = SignedBeaconBlock::from_ssz_bytes(&block_bytes, &self.spec)?;

// Add to cache.
self.block_cache.lock().put(*block_root, block.clone());

Ok(Some(block))
}
None => Ok(None),
}
}

/// Determine whether a block exists in the database.
pub fn block_exists(&self, block_root: &Hash256) -> Result<bool, Error> {
self.hot_db
.key_exists(DBColumn::BeaconBlock.into(), block_root.as_bytes())
}

/// Delete a block from the store and the block cache.
pub fn delete_block(&self, block_root: &Hash256) -> Result<(), Error> {
self.block_cache.lock().pop(block_root);
self.hot_db.delete::<SignedBeaconBlock<E>>(block_root)
self.hot_db
.key_delete(DBColumn::BeaconBlock.into(), block_root.as_bytes())
}

pub fn put_state_summary(
Expand Down Expand Up @@ -435,7 +453,7 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
for op in batch {
match op {
StoreOp::PutBlock(block_root, block) => {
key_value_batch.push(block.as_kv_store_op(*block_root));
key_value_batch.push(beacon_block_as_kv_store_op(block_root, block));
}

StoreOp::PutState(state_root, state) => {
Expand Down Expand Up @@ -559,9 +577,10 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
epoch_boundary_state_root,
}) = self.load_hot_state_summary(state_root)?
{
let boundary_state = get_full_state(&self.hot_db, &epoch_boundary_state_root)?.ok_or(
HotColdDBError::MissingEpochBoundaryState(epoch_boundary_state_root),
)?;
let boundary_state =
get_full_state(&self.hot_db, &epoch_boundary_state_root, &self.spec)?.ok_or(
HotColdDBError::MissingEpochBoundaryState(epoch_boundary_state_root),
)?;

// Optimization to avoid even *thinking* about replaying blocks if we're already
// on an epoch boundary.
Expand Down Expand Up @@ -649,10 +668,12 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>

/// Load a restore point state by its `state_root`.
fn load_restore_point(&self, state_root: &Hash256) -> Result<BeaconState<E>, Error> {
let mut partial_state: PartialBeaconState<E> = self
let partial_state_bytes = self
.cold_db
.get(state_root)?
.get_bytes(DBColumn::BeaconState.into(), state_root.as_bytes())?
.ok_or_else(|| HotColdDBError::MissingRestorePoint(*state_root))?;
let mut partial_state: PartialBeaconState<E> =
PartialBeaconState::from_ssz_bytes(&partial_state_bytes, &self.spec)?;

// Fill in the fields of the partial state.
partial_state.load_block_roots(&self.cold_db, &self.spec)?;
Expand Down Expand Up @@ -1042,7 +1063,7 @@ pub fn migrate_database<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>>(
let mut cold_db_ops: Vec<KeyValueStoreOp> = Vec::new();

if slot % store.config.slots_per_restore_point == 0 {
let state: BeaconState<E> = get_full_state(&store.hot_db, &state_root)?
let state: BeaconState<E> = get_full_state(&store.hot_db, &state_root, &store.spec)?
.ok_or(HotColdDBError::MissingStateToFreeze(state_root))?;

store.store_cold_state(&state_root, &state, &mut cold_db_ops)?;
Expand Down
Loading