use codec::DecodeAll;
use futures::{channel::oneshot, StreamExt};
use log::{debug, trace};
use sc_client_api::BlockBackend;
use sc_network::{
config as netconfig, service::traits::RequestResponseConfig, types::ProtocolName,
NetworkBackend, ReputationChange,
};
use sc_network_types::PeerId;
use sp_consensus_beefy::BEEFY_ENGINE_ID;
use sp_runtime::traits::Block;
use std::{marker::PhantomData, sync::Arc};
use crate::{
communication::{
cost,
request_response::{
on_demand_justifications_protocol_config, Error, JustificationRequest,
BEEFY_SYNC_LOG_TARGET,
},
},
metric_inc,
metrics::{register_metrics, OnDemandIncomingRequestsMetrics},
};
#[derive(Debug)]
pub(crate) struct IncomingRequest<B: Block> {
pub peer: PeerId,
pub payload: JustificationRequest<B>,
pub pending_response: oneshot::Sender<netconfig::OutgoingResponse>,
}
impl<B: Block> IncomingRequest<B> {
pub fn new(
peer: PeerId,
payload: JustificationRequest<B>,
pending_response: oneshot::Sender<netconfig::OutgoingResponse>,
) -> Self {
Self { peer, payload, pending_response }
}
pub fn try_from_raw<F>(
raw: netconfig::IncomingRequest,
reputation_changes_on_err: F,
) -> Result<Self, Error>
where
F: FnOnce(usize) -> Vec<ReputationChange>,
{
let netconfig::IncomingRequest { payload, peer, pending_response } = raw;
let payload = match JustificationRequest::decode_all(&mut payload.as_ref()) {
Ok(payload) => payload,
Err(err) => {
let response = netconfig::OutgoingResponse {
result: Err(()),
reputation_changes: reputation_changes_on_err(payload.len()),
sent_feedback: None,
};
if let Err(_) = pending_response.send(response) {
return Err(Error::DecodingErrorNoReputationChange(peer, err));
}
return Err(Error::DecodingError(peer, err));
},
};
Ok(Self::new(peer, payload, pending_response))
}
}
pub(crate) struct IncomingRequestReceiver {
raw: async_channel::Receiver<netconfig::IncomingRequest>,
}
impl IncomingRequestReceiver {
pub fn new(inner: async_channel::Receiver<netconfig::IncomingRequest>) -> Self {
Self { raw: inner }
}
pub async fn recv<B, F>(&mut self, reputation_changes: F) -> Result<IncomingRequest<B>, Error>
where
B: Block,
F: FnOnce(usize) -> Vec<ReputationChange>,
{
let req = match self.raw.next().await {
None => return Err(Error::RequestChannelExhausted),
Some(raw) => IncomingRequest::<B>::try_from_raw(raw, reputation_changes)?,
};
Ok(req)
}
}
pub struct BeefyJustifsRequestHandler<B, Client> {
pub(crate) request_receiver: IncomingRequestReceiver,
pub(crate) justif_protocol_name: ProtocolName,
pub(crate) client: Arc<Client>,
pub(crate) metrics: Option<OnDemandIncomingRequestsMetrics>,
pub(crate) _block: PhantomData<B>,
}
impl<B, Client> BeefyJustifsRequestHandler<B, Client>
where
B: Block,
Client: BlockBackend<B> + Send + Sync,
{
pub fn new<Hash: AsRef<[u8]>, Network: NetworkBackend<B, <B as Block>::Hash>>(
genesis_hash: Hash,
fork_id: Option<&str>,
client: Arc<Client>,
prometheus_registry: Option<prometheus_endpoint::Registry>,
) -> (Self, Network::RequestResponseProtocolConfig) {
let (request_receiver, config): (_, Network::RequestResponseProtocolConfig) =
on_demand_justifications_protocol_config::<_, _, Network>(genesis_hash, fork_id);
let justif_protocol_name = config.protocol_name().clone();
let metrics = register_metrics(prometheus_registry);
(
Self { request_receiver, justif_protocol_name, client, metrics, _block: PhantomData },
config,
)
}
pub fn protocol_name(&self) -> ProtocolName {
self.justif_protocol_name.clone()
}
fn handle_request(&self, request: IncomingRequest<B>) -> Result<(), Error> {
let mut reputation_changes = vec![];
let maybe_encoded_proof = self
.client
.block_hash(request.payload.begin)
.ok()
.flatten()
.and_then(|hash| self.client.justifications(hash).ok().flatten())
.and_then(|justifs| justifs.get(BEEFY_ENGINE_ID).cloned())
.ok_or_else(|| reputation_changes.push(cost::UNKNOWN_PROOF_REQUEST));
request
.pending_response
.send(netconfig::OutgoingResponse {
result: maybe_encoded_proof,
reputation_changes,
sent_feedback: None,
})
.map_err(|_| Error::SendResponse)
}
pub async fn run(&mut self) -> Error {
trace!(target: BEEFY_SYNC_LOG_TARGET, "🥩 Running BeefyJustifsRequestHandler");
while let Ok(request) = self
.request_receiver
.recv(|bytes| {
let bytes = bytes.min(i32::MAX as usize) as i32;
vec![ReputationChange::new(
bytes.saturating_mul(cost::PER_UNDECODABLE_BYTE),
"BEEFY: Bad request payload",
)]
})
.await
{
let peer = request.peer;
match self.handle_request(request) {
Ok(()) => {
metric_inc!(self.metrics, beefy_successful_justification_responses);
debug!(
target: BEEFY_SYNC_LOG_TARGET,
"🥩 Handled BEEFY justification request from {:?}.", peer
)
},
Err(e) => {
metric_inc!(self.metrics, beefy_failed_justification_responses);
debug!(
target: BEEFY_SYNC_LOG_TARGET,
"🥩 Failed to handle BEEFY justification request from {:?}: {}", peer, e,
)
},
}
}
Error::RequestsReceiverStreamClosed
}
}