Browse Source

Run 'cargo fmt'

bitcoin_e
Roman Zeyde 6 years ago
parent
commit
5fedce2485
No known key found for this signature in database GPG Key ID: 87CAE5FA46917CBB
  1. 3
      src/bin/electrs.rs
  2. 3
      src/bulk.rs
  3. 6
      src/daemon.rs
  4. 15
      src/index.rs
  5. 3
      src/mempool.rs
  6. 6
      src/query.rs
  7. 51
      src/rest.rs
  8. 6
      src/store.rs
  9. 12
      src/util.rs

3
src/bin/electrs.rs

@ -54,7 +54,8 @@ fn run_server(config: Config) -> Result<()> {
index.reload(&store); // make sure the block header index is up-to-date
store
}
}.enable_compaction(); // enable auto compactions before starting incremental index updates.
}
.enable_compaction(); // enable auto compactions before starting incremental index updates.
let app = App::new(store, index, daemon)?;
let query = Query::new(app.clone(), config.extended_db_enabled, &metrics);

3
src/bulk.rs

@ -254,6 +254,7 @@ pub fn index_blk_files(
});
store.write(vec![parser.last_indexed_row()]);
store
}).join()
})
.join()
.expect("writer panicked"))
}

6
src/daemon.rs

@ -26,7 +26,8 @@ fn parse_hash(value: &Value) -> Result<Sha256dHash> {
value
.as_str()
.chain_err(|| format!("non-string value: {}", value))?,
).chain_err(|| format!("non-hex value: {}", value))?)
)
.chain_err(|| format!("non-hex value: {}", value))?)
}
fn header_from_value(value: Value) -> Result<BlockHeader> {
@ -207,7 +208,8 @@ impl Connection {
.next()
.chain_err(|| {
ErrorKind::Connection("disconnected from daemon while receiving".to_owned())
})?.chain_err(|| "failed to read status")?;
})?
.chain_err(|| "failed to read status")?;
let mut headers = HashMap::new();
for line in iter {
let line = line.chain_err(|| ErrorKind::Connection("failed to read".to_owned()))?;

15
src/index.rs

@ -53,7 +53,8 @@ impl TxInRow {
code: b'I',
prev_hash_prefix: hash_prefix(&txid[..]),
prev_index: output_index as u16,
}).unwrap()
})
.unwrap()
}
pub fn to_row(&self) -> Row {
@ -95,7 +96,8 @@ impl TxOutRow {
bincode::serialize(&TxOutKey {
code: b'O',
script_hash_prefix: hash_prefix(&script_hash[..HASH_PREFIX_LEN]),
}).unwrap()
})
.unwrap()
}
pub fn to_row(&self) -> Row {
@ -252,7 +254,8 @@ pub fn index_block(block: &Block, height: u32, extended_db_enabled: bool) -> Vec
key: bincode::serialize(&BlockKey {
code: b'B',
hash: full_hash(&blockhash[..]),
}).unwrap(),
})
.unwrap(),
value: serialize(&block.header),
});
@ -263,7 +266,8 @@ pub fn index_block(block: &Block, height: u32, extended_db_enabled: bool) -> Vec
key: bincode::serialize(&BlockKey {
code: b'M',
hash: full_hash(&blockhash[..]),
}).unwrap(),
})
.unwrap(),
value: bincode::serialize(&blockmeta).unwrap(),
});
}
@ -275,7 +279,8 @@ pub fn index_block(block: &Block, height: u32, extended_db_enabled: bool) -> Vec
key: bincode::serialize(&BlockKey {
code: b'X',
hash: full_hash(&blockhash[..]),
}).unwrap(),
})
.unwrap(),
value: bincode::serialize(&txids).unwrap(),
});
}

3
src/mempool.rs

@ -206,7 +206,8 @@ impl Tracker {
None // ignore this transaction for now
}
}
}).collect();
})
.collect();
if entries.is_empty() {
return Ok(());
}

6
src/query.rs

@ -488,7 +488,8 @@ impl Query {
.unwrap()
.get_txn(&txid)
.map(|tx| serialize(&tx))
}).chain_err(|| format!("cannot find tx {}", txid))?)
})
.chain_err(|| format!("cannot find tx {}", txid))?)
} else {
// fetch from bitcoind
let blockhash_from_index: Option<Sha256dHash> = match blockhash {
@ -562,7 +563,8 @@ impl Query {
.map(|txid| {
Sha256dHash::from_hex(txid.as_str().chain_err(|| "txid not string")?)
.chain_err(|| "invalid hex")
}).collect::<Result<Vec<Sha256dHash>>>()?)
})
.collect::<Result<Vec<Sha256dHash>>>()?)
}
}

51
src/rest.rs

@ -20,7 +20,9 @@ use std::num::ParseIntError;
use std::str::FromStr;
use std::sync::Arc;
use std::thread;
use util::{get_script_asm, script_to_address, full_hash, BlockHeaderMeta, FullHash, TransactionStatus};
use util::{
full_hash, get_script_asm, script_to_address, BlockHeaderMeta, FullHash, TransactionStatus,
};
const TX_LIMIT: usize = 25;
const BLOCK_LIMIT: usize = 10;
@ -487,12 +489,13 @@ fn handle_request(
query
.load_txn(&txid, Some(&hash))
.map(TransactionValue::from)
}).collect::<Result<Vec<TransactionValue>, _>>()?;
})
.collect::<Result<Vec<TransactionValue>, _>>()?;
attach_txs_data(&mut txs, config, query);
json_response(txs, TTL_LONG)
}
(&Method::GET, Some(script_type @ &"address"), Some(script_str), None, None) |
(&Method::GET, Some(script_type @ &"scripthash"), Some(script_str), None, None) => {
(&Method::GET, Some(script_type @ &"address"), Some(script_str), None, None)
| (&Method::GET, Some(script_type @ &"scripthash"), Some(script_str), None, None) => {
// @TODO create new AddressStatsValue struct?
let script_hash = to_scripthash(script_type, script_str, &config.network_type)?;
match query.status(&script_hash[..]) {
@ -517,8 +520,20 @@ fn handle_request(
Err(err) => bail!(err),
}
}
(&Method::GET, Some(script_type @ &"address"), Some(script_str), Some(&"txs"), start_index) |
(&Method::GET, Some(script_type @ &"scripthash"), Some(script_str), Some(&"txs"), start_index) => {
(
&Method::GET,
Some(script_type @ &"address"),
Some(script_str),
Some(&"txs"),
start_index,
)
| (
&Method::GET,
Some(script_type @ &"scripthash"),
Some(script_str),
Some(&"txs"),
start_index,
) => {
let start_index = start_index
.map_or(0u32, |el| el.parse().unwrap_or(0))
.max(0u32) as usize;
@ -548,8 +563,14 @@ fn handle_request(
json_response(txs, TTL_SHORT)
}
(&Method::GET, Some(script_type @ &"address"), Some(script_str), Some(&"utxo"), None) |
(&Method::GET, Some(script_type @ &"scripthash"), Some(script_str), Some(&"utxo"), None) => {
(&Method::GET, Some(script_type @ &"address"), Some(script_str), Some(&"utxo"), None)
| (
&Method::GET,
Some(script_type @ &"scripthash"),
Some(script_str),
Some(&"utxo"),
None,
) => {
let script_hash = to_scripthash(script_type, script_str, &config.network_type)?;
let status = query.status(&script_hash[..])?;
let utxos: Vec<UtxoValue> = status
@ -629,7 +650,8 @@ fn handle_request(
|| SpendingValue::default(),
|spend| SpendingValue::from(spend),
)
}).collect();
})
.collect();
// @TODO long ttl if all outputs are either spent long ago or unspendable
json_response(spends, TTL_SHORT)
}
@ -687,11 +709,15 @@ fn blocks(query: &Arc<Query>, start_height: Option<usize>) -> Result<Response<Bo
json_response(values, TTL_SHORT)
}
fn to_scripthash(script_type: &str, script_str: &str, network: &Network) -> Result<FullHash, HttpError> {
fn to_scripthash(
script_type: &str,
script_str: &str,
network: &Network,
) -> Result<FullHash, HttpError> {
match script_type {
"address" => address_to_scripthash(script_str, network),
"scripthash" => Ok(full_hash(&hex::decode(script_str)?)),
_ => bail!("Invalid script type".to_string())
_ => bail!("Invalid script type".to_string()),
}
}
@ -824,7 +850,8 @@ mod tests {
.and_then(|el| el.as_u64())
.ok_or(HttpError::from(
"confirmations absent or not a u64".to_string(),
)).unwrap();
))
.unwrap();
assert_eq!(10, confirmations);

6
src/store.rs

@ -212,8 +212,10 @@ pub fn verify_index_compatibility(store: &DBStore, config: &Config) {
key: b"C".to_vec(),
value: compatibility_bytes,
}]),
Some(x) => if x != compatibility_bytes {
Some(x) => {
if x != compatibility_bytes {
panic!("Incompatible database found. Changing --light mode requires a reindex.");
},
}
}
}
}

12
src/util.rs

@ -190,7 +190,8 @@ impl HeaderList {
height: height,
hash: hashed_header.blockhash,
header: hashed_header.header,
}).collect()
})
.collect()
}
pub fn apply(&mut self, new_headers: Vec<HeaderEntry>) {
@ -352,7 +353,8 @@ pub fn script_to_address(script: &Script, network: &Network) -> Option<String> {
u5::try_from_u8(0).expect("0<32"),
script[2..22].to_vec(),
to_bech_network(network),
).unwrap(),
)
.unwrap(),
))
} else if script.is_v0_p2wsh() {
Some(Payload::WitnessProgram(
@ -360,7 +362,8 @@ pub fn script_to_address(script: &Script, network: &Network) -> Option<String> {
u5::try_from_u8(0).expect("0<32"),
script[2..34].to_vec(),
to_bech_network(network),
).unwrap(),
)
.unwrap(),
))
} else {
None
@ -370,7 +373,8 @@ pub fn script_to_address(script: &Script, network: &Network) -> Option<String> {
Address {
payload: payload?,
network: *network,
}.to_string(),
}
.to_string(),
)
}

Loading…
Cancel
Save