Browse Source
lnmsg rewrite, implement TLV, invoice features, varonion, payment secrethard-fail-on-bad-server-string
ghost43
5 years ago
committed by
GitHub
18 changed files with 2001 additions and 1320 deletions
@ -1,903 +0,0 @@ |
|||
{ |
|||
"init": { |
|||
"type": "16", |
|||
"payload": { |
|||
"gflen": { |
|||
"position": "0", |
|||
"length": "2" |
|||
}, |
|||
"globalfeatures": { |
|||
"position": "2", |
|||
"length": "gflen" |
|||
}, |
|||
"lflen": { |
|||
"position": "2+gflen", |
|||
"length": "2" |
|||
}, |
|||
"localfeatures": { |
|||
"position": "4+gflen", |
|||
"length": "lflen" |
|||
} |
|||
} |
|||
}, |
|||
"error": { |
|||
"type": "17", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"len": { |
|||
"position": "32", |
|||
"length": "2" |
|||
}, |
|||
"data": { |
|||
"position": "34", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"ping": { |
|||
"type": "18", |
|||
"payload": { |
|||
"num_pong_bytes": { |
|||
"position": "0", |
|||
"length": "2" |
|||
}, |
|||
"byteslen": { |
|||
"position": "2", |
|||
"length": "2" |
|||
}, |
|||
"ignored": { |
|||
"position": "4", |
|||
"length": "byteslen" |
|||
} |
|||
} |
|||
}, |
|||
"pong": { |
|||
"type": "19", |
|||
"payload": { |
|||
"byteslen": { |
|||
"position": "0", |
|||
"length": "2" |
|||
}, |
|||
"ignored": { |
|||
"position": "2", |
|||
"length": "byteslen" |
|||
} |
|||
} |
|||
}, |
|||
"open_channel": { |
|||
"type": "32", |
|||
"payload": { |
|||
"chain_hash": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"temporary_channel_id": { |
|||
"position": "32", |
|||
"length": "32" |
|||
}, |
|||
"funding_satoshis": { |
|||
"position": "64", |
|||
"length": "8" |
|||
}, |
|||
"push_msat": { |
|||
"position": "72", |
|||
"length": "8" |
|||
}, |
|||
"dust_limit_satoshis": { |
|||
"position": "80", |
|||
"length": "8" |
|||
}, |
|||
"max_htlc_value_in_flight_msat": { |
|||
"position": "88", |
|||
"length": "8" |
|||
}, |
|||
"channel_reserve_satoshis": { |
|||
"position": "96", |
|||
"length": "8" |
|||
}, |
|||
"htlc_minimum_msat": { |
|||
"position": "104", |
|||
"length": "8" |
|||
}, |
|||
"feerate_per_kw": { |
|||
"position": "112", |
|||
"length": "4" |
|||
}, |
|||
"to_self_delay": { |
|||
"position": "116", |
|||
"length": "2" |
|||
}, |
|||
"max_accepted_htlcs": { |
|||
"position": "118", |
|||
"length": "2" |
|||
}, |
|||
"funding_pubkey": { |
|||
"position": "120", |
|||
"length": "33" |
|||
}, |
|||
"revocation_basepoint": { |
|||
"position": "153", |
|||
"length": "33" |
|||
}, |
|||
"payment_basepoint": { |
|||
"position": "186", |
|||
"length": "33" |
|||
}, |
|||
"delayed_payment_basepoint": { |
|||
"position": "219", |
|||
"length": "33" |
|||
}, |
|||
"htlc_basepoint": { |
|||
"position": "252", |
|||
"length": "33" |
|||
}, |
|||
"first_per_commitment_point": { |
|||
"position": "285", |
|||
"length": "33" |
|||
}, |
|||
"channel_flags": { |
|||
"position": "318", |
|||
"length": "1" |
|||
}, |
|||
"shutdown_len": { |
|||
"position": "319", |
|||
"length": "2", |
|||
"feature": "option_upfront_shutdown_script" |
|||
}, |
|||
"shutdown_scriptpubkey": { |
|||
"position": "321", |
|||
"length": "shutdown_len", |
|||
"feature": "option_upfront_shutdown_script" |
|||
} |
|||
} |
|||
}, |
|||
"accept_channel": { |
|||
"type": "33", |
|||
"payload": { |
|||
"temporary_channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"dust_limit_satoshis": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"max_htlc_value_in_flight_msat": { |
|||
"position": "40", |
|||
"length": "8" |
|||
}, |
|||
"channel_reserve_satoshis": { |
|||
"position": "48", |
|||
"length": "8" |
|||
}, |
|||
"htlc_minimum_msat": { |
|||
"position": "56", |
|||
"length": "8" |
|||
}, |
|||
"minimum_depth": { |
|||
"position": "64", |
|||
"length": "4" |
|||
}, |
|||
"to_self_delay": { |
|||
"position": "68", |
|||
"length": "2" |
|||
}, |
|||
"max_accepted_htlcs": { |
|||
"position": "70", |
|||
"length": "2" |
|||
}, |
|||
"funding_pubkey": { |
|||
"position": "72", |
|||
"length": "33" |
|||
}, |
|||
"revocation_basepoint": { |
|||
"position": "105", |
|||
"length": "33" |
|||
}, |
|||
"payment_basepoint": { |
|||
"position": "138", |
|||
"length": "33" |
|||
}, |
|||
"delayed_payment_basepoint": { |
|||
"position": "171", |
|||
"length": "33" |
|||
}, |
|||
"htlc_basepoint": { |
|||
"position": "204", |
|||
"length": "33" |
|||
}, |
|||
"first_per_commitment_point": { |
|||
"position": "237", |
|||
"length": "33" |
|||
}, |
|||
"shutdown_len": { |
|||
"position": "270", |
|||
"length": "2", |
|||
"feature": "option_upfront_shutdown_script" |
|||
}, |
|||
"shutdown_scriptpubkey": { |
|||
"position": "272", |
|||
"length": "shutdown_len", |
|||
"feature": "option_upfront_shutdown_script" |
|||
} |
|||
} |
|||
}, |
|||
"funding_created": { |
|||
"type": "34", |
|||
"payload": { |
|||
"temporary_channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"funding_txid": { |
|||
"position": "32", |
|||
"length": "32" |
|||
}, |
|||
"funding_output_index": { |
|||
"position": "64", |
|||
"length": "2" |
|||
}, |
|||
"signature": { |
|||
"position": "66", |
|||
"length": "64" |
|||
} |
|||
} |
|||
}, |
|||
"funding_signed": { |
|||
"type": "35", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"signature": { |
|||
"position": "32", |
|||
"length": "64" |
|||
} |
|||
} |
|||
}, |
|||
"funding_locked": { |
|||
"type": "36", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"next_per_commitment_point": { |
|||
"position": "32", |
|||
"length": "33" |
|||
} |
|||
} |
|||
}, |
|||
"shutdown": { |
|||
"type": "38", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"len": { |
|||
"position": "32", |
|||
"length": "2" |
|||
}, |
|||
"scriptpubkey": { |
|||
"position": "34", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"closing_signed": { |
|||
"type": "39", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"fee_satoshis": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"signature": { |
|||
"position": "40", |
|||
"length": "64" |
|||
} |
|||
} |
|||
}, |
|||
"update_add_htlc": { |
|||
"type": "128", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"id": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"amount_msat": { |
|||
"position": "40", |
|||
"length": "8" |
|||
}, |
|||
"payment_hash": { |
|||
"position": "48", |
|||
"length": "32" |
|||
}, |
|||
"cltv_expiry": { |
|||
"position": "80", |
|||
"length": "4" |
|||
}, |
|||
"onion_routing_packet": { |
|||
"position": "84", |
|||
"length": "1366" |
|||
} |
|||
} |
|||
}, |
|||
"update_fulfill_htlc": { |
|||
"type": "130", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"id": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"payment_preimage": { |
|||
"position": "40", |
|||
"length": "32" |
|||
} |
|||
} |
|||
}, |
|||
"update_fail_htlc": { |
|||
"type": "131", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"id": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"len": { |
|||
"position": "40", |
|||
"length": "2" |
|||
}, |
|||
"reason": { |
|||
"position": "42", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"update_fail_malformed_htlc": { |
|||
"type": "135", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"id": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"sha256_of_onion": { |
|||
"position": "40", |
|||
"length": "32" |
|||
}, |
|||
"failure_code": { |
|||
"position": "72", |
|||
"length": "2" |
|||
} |
|||
} |
|||
}, |
|||
"commitment_signed": { |
|||
"type": "132", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"signature": { |
|||
"position": "32", |
|||
"length": "64" |
|||
}, |
|||
"num_htlcs": { |
|||
"position": "96", |
|||
"length": "2" |
|||
}, |
|||
"htlc_signature": { |
|||
"position": "98", |
|||
"length": "num_htlcs*64" |
|||
} |
|||
} |
|||
}, |
|||
"revoke_and_ack": { |
|||
"type": "133", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"per_commitment_secret": { |
|||
"position": "32", |
|||
"length": "32" |
|||
}, |
|||
"next_per_commitment_point": { |
|||
"position": "64", |
|||
"length": "33" |
|||
} |
|||
} |
|||
}, |
|||
"update_fee": { |
|||
"type": "134", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"feerate_per_kw": { |
|||
"position": "32", |
|||
"length": "4" |
|||
} |
|||
} |
|||
}, |
|||
"channel_reestablish": { |
|||
"type": "136", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"next_local_commitment_number": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"next_remote_revocation_number": { |
|||
"position": "40", |
|||
"length": "8" |
|||
}, |
|||
"your_last_per_commitment_secret": { |
|||
"position": "48", |
|||
"length": "32", |
|||
"feature": "option_data_loss_protect" |
|||
}, |
|||
"my_current_per_commitment_point": { |
|||
"position": "80", |
|||
"length": "33", |
|||
"feature": "option_data_loss_protect" |
|||
} |
|||
} |
|||
}, |
|||
"invalid_realm": { |
|||
"type": "PERM|1", |
|||
"payload": {} |
|||
}, |
|||
"temporary_node_failure": { |
|||
"type": "NODE|2", |
|||
"payload": {} |
|||
}, |
|||
"permanent_node_failure": { |
|||
"type": "PERM|NODE|2", |
|||
"payload": {} |
|||
}, |
|||
"required_node_feature_missing": { |
|||
"type": "PERM|NODE|3", |
|||
"payload": {} |
|||
}, |
|||
"invalid_onion_version": { |
|||
"type": "BADONION|PERM|4", |
|||
"payload": { |
|||
"sha256_of_onion": { |
|||
"position": "0", |
|||
"length": "32" |
|||
} |
|||
} |
|||
}, |
|||
"invalid_onion_hmac": { |
|||
"type": "BADONION|PERM|5", |
|||
"payload": { |
|||
"sha256_of_onion": { |
|||
"position": "0", |
|||
"length": "32" |
|||
} |
|||
} |
|||
}, |
|||
"invalid_onion_key": { |
|||
"type": "BADONION|PERM|6", |
|||
"payload": { |
|||
"sha256_of_onion": { |
|||
"position": "0", |
|||
"length": "32" |
|||
} |
|||
} |
|||
}, |
|||
"temporary_channel_failure": { |
|||
"type": "UPDATE|7", |
|||
"payload": { |
|||
"len": { |
|||
"position": "0", |
|||
"length": "2" |
|||
}, |
|||
"channel_update": { |
|||
"position": "2", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"permanent_channel_failure": { |
|||
"type": "PERM|8", |
|||
"payload": {} |
|||
}, |
|||
"required_channel_feature_missing": { |
|||
"type": "PERM|9", |
|||
"payload": {} |
|||
}, |
|||
"unknown_next_peer": { |
|||
"type": "PERM|10", |
|||
"payload": {} |
|||
}, |
|||
"amount_below_minimum": { |
|||
"type": "UPDATE|11", |
|||
"payload": { |
|||
"htlc_msat": { |
|||
"position": "0", |
|||
"length": "8" |
|||
}, |
|||
"len": { |
|||
"position": "8", |
|||
"length": "2" |
|||
}, |
|||
"channel_update": { |
|||
"position": "10", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"fee_insufficient": { |
|||
"type": "UPDATE|12", |
|||
"payload": { |
|||
"htlc_msat": { |
|||
"position": "0", |
|||
"length": "8" |
|||
}, |
|||
"len": { |
|||
"position": "8", |
|||
"length": "2" |
|||
}, |
|||
"channel_update": { |
|||
"position": "10", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"incorrect_cltv_expiry": { |
|||
"type": "UPDATE|13", |
|||
"payload": { |
|||
"cltv_expiry": { |
|||
"position": "0", |
|||
"length": "4" |
|||
}, |
|||
"len": { |
|||
"position": "4", |
|||
"length": "2" |
|||
}, |
|||
"channel_update": { |
|||
"position": "6", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"expiry_too_soon": { |
|||
"type": "UPDATE|14", |
|||
"payload": { |
|||
"len": { |
|||
"position": "0", |
|||
"length": "2" |
|||
}, |
|||
"channel_update": { |
|||
"position": "2", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"unknown_payment_hash": { |
|||
"type": "PERM|15", |
|||
"payload": {} |
|||
}, |
|||
"incorrect_payment_amount": { |
|||
"type": "PERM|16", |
|||
"payload": {} |
|||
}, |
|||
"final_expiry_too_soon": { |
|||
"type": "17", |
|||
"payload": {} |
|||
}, |
|||
"final_incorrect_cltv_expiry": { |
|||
"type": "18", |
|||
"payload": { |
|||
"cltv_expiry": { |
|||
"position": "0", |
|||
"length": "4" |
|||
} |
|||
} |
|||
}, |
|||
"final_incorrect_htlc_amount": { |
|||
"type": "19", |
|||
"payload": { |
|||
"incoming_htlc_amt": { |
|||
"position": "0", |
|||
"length": "8" |
|||
} |
|||
} |
|||
}, |
|||
"channel_disabled": { |
|||
"type": "UPDATE|20", |
|||
"payload": {} |
|||
}, |
|||
"expiry_too_far": { |
|||
"type": "21", |
|||
"payload": {} |
|||
}, |
|||
"announcement_signatures": { |
|||
"type": "259", |
|||
"payload": { |
|||
"channel_id": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"short_channel_id": { |
|||
"position": "32", |
|||
"length": "8" |
|||
}, |
|||
"node_signature": { |
|||
"position": "40", |
|||
"length": "64" |
|||
}, |
|||
"bitcoin_signature": { |
|||
"position": "104", |
|||
"length": "64" |
|||
} |
|||
} |
|||
}, |
|||
"channel_announcement": { |
|||
"type": "256", |
|||
"payload": { |
|||
"node_signature_1": { |
|||
"position": "0", |
|||
"length": "64" |
|||
}, |
|||
"node_signature_2": { |
|||
"position": "64", |
|||
"length": "64" |
|||
}, |
|||
"bitcoin_signature_1": { |
|||
"position": "128", |
|||
"length": "64" |
|||
}, |
|||
"bitcoin_signature_2": { |
|||
"position": "192", |
|||
"length": "64" |
|||
}, |
|||
"len": { |
|||
"position": "256", |
|||
"length": "2" |
|||
}, |
|||
"features": { |
|||
"position": "258", |
|||
"length": "len" |
|||
}, |
|||
"chain_hash": { |
|||
"position": "258+len", |
|||
"length": "32" |
|||
}, |
|||
"short_channel_id": { |
|||
"position": "290+len", |
|||
"length": "8" |
|||
}, |
|||
"node_id_1": { |
|||
"position": "298+len", |
|||
"length": "33" |
|||
}, |
|||
"node_id_2": { |
|||
"position": "331+len", |
|||
"length": "33" |
|||
}, |
|||
"bitcoin_key_1": { |
|||
"position": "364+len", |
|||
"length": "33" |
|||
}, |
|||
"bitcoin_key_2": { |
|||
"position": "397+len", |
|||
"length": "33" |
|||
} |
|||
} |
|||
}, |
|||
"node_announcement": { |
|||
"type": "257", |
|||
"payload": { |
|||
"signature": { |
|||
"position": "0", |
|||
"length": "64" |
|||
}, |
|||
"flen": { |
|||
"position": "64", |
|||
"length": "2" |
|||
}, |
|||
"features": { |
|||
"position": "66", |
|||
"length": "flen" |
|||
}, |
|||
"timestamp": { |
|||
"position": "66+flen", |
|||
"length": "4" |
|||
}, |
|||
"node_id": { |
|||
"position": "70+flen", |
|||
"length": "33" |
|||
}, |
|||
"rgb_color": { |
|||
"position": "103+flen", |
|||
"length": "3" |
|||
}, |
|||
"alias": { |
|||
"position": "106+flen", |
|||
"length": "32" |
|||
}, |
|||
"addrlen": { |
|||
"position": "138+flen", |
|||
"length": "2" |
|||
}, |
|||
"addresses": { |
|||
"position": "140+flen", |
|||
"length": "addrlen" |
|||
} |
|||
} |
|||
}, |
|||
"channel_update": { |
|||
"type": "258", |
|||
"payload": { |
|||
"signature": { |
|||
"position": "0", |
|||
"length": "64" |
|||
}, |
|||
"chain_hash": { |
|||
"position": "64", |
|||
"length": "32" |
|||
}, |
|||
"short_channel_id": { |
|||
"position": "96", |
|||
"length": "8" |
|||
}, |
|||
"timestamp": { |
|||
"position": "104", |
|||
"length": "4" |
|||
}, |
|||
"message_flags": { |
|||
"position": "108", |
|||
"length": "1" |
|||
}, |
|||
"channel_flags": { |
|||
"position": "109", |
|||
"length": "1" |
|||
}, |
|||
"cltv_expiry_delta": { |
|||
"position": "110", |
|||
"length": "2" |
|||
}, |
|||
"htlc_minimum_msat": { |
|||
"position": "112", |
|||
"length": "8" |
|||
}, |
|||
"fee_base_msat": { |
|||
"position": "120", |
|||
"length": "4" |
|||
}, |
|||
"fee_proportional_millionths": { |
|||
"position": "124", |
|||
"length": "4" |
|||
}, |
|||
"htlc_maximum_msat": { |
|||
"position": "128", |
|||
"length": "8", |
|||
"feature": "option_channel_htlc_max" |
|||
} |
|||
} |
|||
}, |
|||
"query_short_channel_ids": { |
|||
"type": "261", |
|||
"payload": { |
|||
"chain_hash": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"len": { |
|||
"position": "32", |
|||
"length": "2" |
|||
}, |
|||
"encoded_short_ids": { |
|||
"position": "34", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"reply_short_channel_ids_end": { |
|||
"type": "262", |
|||
"payload": { |
|||
"chain_hash": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"complete": { |
|||
"position": "32", |
|||
"length": "1" |
|||
} |
|||
} |
|||
}, |
|||
"query_channel_range": { |
|||
"type": "263", |
|||
"payload": { |
|||
"chain_hash": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"first_blocknum": { |
|||
"position": "32", |
|||
"length": "4" |
|||
}, |
|||
"number_of_blocks": { |
|||
"position": "36", |
|||
"length": "4" |
|||
} |
|||
} |
|||
}, |
|||
"reply_channel_range": { |
|||
"type": "264", |
|||
"payload": { |
|||
"chain_hash": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"first_blocknum": { |
|||
"position": "32", |
|||
"length": "4" |
|||
}, |
|||
"number_of_blocks": { |
|||
"position": "36", |
|||
"length": "4" |
|||
}, |
|||
"complete": { |
|||
"position": "40", |
|||
"length": "1" |
|||
}, |
|||
"len": { |
|||
"position": "41", |
|||
"length": "2" |
|||
}, |
|||
"encoded_short_ids": { |
|||
"position": "43", |
|||
"length": "len" |
|||
} |
|||
} |
|||
}, |
|||
"gossip_timestamp_filter": { |
|||
"type": "265", |
|||
"payload": { |
|||
"chain_hash": { |
|||
"position": "0", |
|||
"length": "32" |
|||
}, |
|||
"first_timestamp": { |
|||
"position": "32", |
|||
"length": "4" |
|||
}, |
|||
"timestamp_range": { |
|||
"position": "36", |
|||
"length": "4" |
|||
} |
|||
} |
|||
} |
|||
} |
@ -1,153 +1,513 @@ |
|||
import json |
|||
import os |
|||
from typing import Callable, Tuple |
|||
import csv |
|||
import io |
|||
from typing import Callable, Tuple, Any, Dict, List, Sequence, Union, Optional |
|||
from collections import OrderedDict |
|||
|
|||
def _eval_length_term(x, ma: dict) -> int: |
|||
""" |
|||
Evaluate a term of the simple language used |
|||
to specify lightning message field lengths. |
|||
from .lnutil import OnionFailureCodeMetaFlag |
|||
|
|||
If `x` is an integer, it is returned as is, |
|||
otherwise it is treated as a variable and |
|||
looked up in `ma`. |
|||
|
|||
If the value in `ma` was no integer, it is |
|||
assumed big-endian bytes and decoded. |
|||
class MalformedMsg(Exception): pass |
|||
class UnknownMsgFieldType(MalformedMsg): pass |
|||
class UnexpectedEndOfStream(MalformedMsg): pass |
|||
class FieldEncodingNotMinimal(MalformedMsg): pass |
|||
class UnknownMandatoryTLVRecordType(MalformedMsg): pass |
|||
class MsgTrailingGarbage(MalformedMsg): pass |
|||
class MsgInvalidFieldOrder(MalformedMsg): pass |
|||
class UnexpectedFieldSizeForEncoder(MalformedMsg): pass |
|||
|
|||
Returns evaluated result as int |
|||
""" |
|||
try: |
|||
x = int(x) |
|||
except ValueError: |
|||
x = ma[x] |
|||
|
|||
def _num_remaining_bytes_to_read(fd: io.BytesIO) -> int: |
|||
cur_pos = fd.tell() |
|||
end_pos = fd.seek(0, io.SEEK_END) |
|||
fd.seek(cur_pos) |
|||
return end_pos - cur_pos |
|||
|
|||
|
|||
def _assert_can_read_at_least_n_bytes(fd: io.BytesIO, n: int) -> None: |
|||
# note: it's faster to read n bytes and then check if we read n, than |
|||
# to assert we can read at least n and then read n bytes. |
|||
nremaining = _num_remaining_bytes_to_read(fd) |
|||
if nremaining < n: |
|||
raise UnexpectedEndOfStream(f"wants to read {n} bytes but only {nremaining} bytes left") |
|||
|
|||
|
|||
def write_bigsize_int(i: int) -> bytes: |
|||
assert i >= 0, i |
|||
if i < 0xfd: |
|||
return int.to_bytes(i, length=1, byteorder="big", signed=False) |
|||
elif i < 0x1_0000: |
|||
return b"\xfd" + int.to_bytes(i, length=2, byteorder="big", signed=False) |
|||
elif i < 0x1_0000_0000: |
|||
return b"\xfe" + int.to_bytes(i, length=4, byteorder="big", signed=False) |
|||
else: |
|||
return b"\xff" + int.to_bytes(i, length=8, byteorder="big", signed=False) |
|||
|
|||
|
|||
def read_bigsize_int(fd: io.BytesIO) -> Optional[int]: |
|||
try: |
|||
x = int(x) |
|||
except ValueError: |
|||
x = int.from_bytes(x, byteorder='big') |
|||
return x |
|||
first = fd.read(1)[0] |
|||
except IndexError: |
|||
return None # end of file |
|||
if first < 0xfd: |
|||
return first |
|||
elif first == 0xfd: |
|||
buf = fd.read(2) |
|||
if len(buf) != 2: |
|||
raise UnexpectedEndOfStream() |
|||
val = int.from_bytes(buf, byteorder="big", signed=False) |
|||
if not (0xfd <= val < 0x1_0000): |
|||
raise FieldEncodingNotMinimal() |
|||
return val |
|||
elif first == 0xfe: |
|||
buf = fd.read(4) |
|||
if len(buf) != 4: |
|||
raise UnexpectedEndOfStream() |
|||
val = int.from_bytes(buf, byteorder="big", signed=False) |
|||
if not (0x1_0000 <= val < 0x1_0000_0000): |
|||
raise FieldEncodingNotMinimal() |
|||
return val |
|||
elif first == 0xff: |
|||
buf = fd.read(8) |
|||
if len(buf) != 8: |
|||
raise UnexpectedEndOfStream() |
|||
val = int.from_bytes(buf, byteorder="big", signed=False) |
|||
if not (0x1_0000_0000 <= val): |
|||
raise FieldEncodingNotMinimal() |
|||
return val |
|||
raise Exception() |
|||
|
|||
def _eval_exp_with_ctx(exp, ctx: dict) -> int: |
|||
""" |
|||
Evaluate simple mathematical expression given |
|||
in `exp` with context (variables assigned) |
|||
from the dict `ctx`. |
|||
|
|||
Returns evaluated result as int |
|||
""" |
|||
exp = str(exp) |
|||
if "*" in exp: |
|||
assert "+" not in exp |
|||
result = 1 |
|||
for term in exp.split("*"): |
|||
result *= _eval_length_term(term, ctx) |
|||
return result |
|||
return sum(_eval_length_term(x, ctx) for x in exp.split("+")) |
|||
|
|||
def _make_handler(msg_name: str, v: dict) -> Callable[[bytes], Tuple[str, dict]]: |
|||
""" |
|||
Generate a message handler function (taking bytes) |
|||
for message type `msg_name` with specification `v` |
|||
# TODO: maybe if field_type is not "byte", we could return a list of type_len sized chunks? |
|||
# if field_type is a numeric, we could return a list of ints? |
|||
def _read_field(*, fd: io.BytesIO, field_type: str, count: Union[int, str]) -> Union[bytes, int]: |
|||
if not fd: raise Exception() |
|||
if isinstance(count, int): |
|||
assert count >= 0, f"{count!r} must be non-neg int" |
|||
elif count == "...": |
|||
pass |
|||
else: |
|||
raise Exception(f"unexpected field count: {count!r}") |
|||
if count == 0: |
|||
return b"" |
|||
type_len = None |
|||
if field_type == 'byte': |
|||
type_len = 1 |
|||
elif field_type in ('u8', 'u16', 'u32', 'u64'): |
|||
if field_type == 'u8': |
|||
type_len = 1 |
|||
elif field_type == 'u16': |
|||
type_len = 2 |
|||
elif field_type == 'u32': |
|||
type_len = 4 |
|||
else: |
|||
assert field_type == 'u64' |
|||
type_len = 8 |
|||
assert count == 1, count |
|||
buf = fd.read(type_len) |
|||
if len(buf) != type_len: |
|||
raise UnexpectedEndOfStream() |
|||
return int.from_bytes(buf, byteorder="big", signed=False) |
|||
elif field_type in ('tu16', 'tu32', 'tu64'): |
|||
if field_type == 'tu16': |
|||
type_len = 2 |
|||
elif field_type == 'tu32': |
|||
type_len = 4 |
|||
else: |
|||
assert field_type == 'tu64' |
|||
type_len = 8 |
|||
assert count == 1, count |
|||
raw = fd.read(type_len) |
|||
if len(raw) > 0 and raw[0] == 0x00: |
|||
raise FieldEncodingNotMinimal() |
|||
return int.from_bytes(raw, byteorder="big", signed=False) |
|||
elif field_type == 'varint': |
|||
assert count == 1, count |
|||
val = read_bigsize_int(fd) |
|||
if val is None: |
|||
raise UnexpectedEndOfStream() |
|||
return val |
|||
elif field_type == 'chain_hash': |
|||
type_len = 32 |
|||
elif field_type == 'channel_id': |
|||
type_len = 32 |
|||
elif field_type == 'sha256': |
|||
type_len = 32 |
|||
elif field_type == 'signature': |
|||
type_len = 64 |
|||
elif field_type == 'point': |
|||
type_len = 33 |
|||
elif field_type == 'short_channel_id': |
|||
type_len = 8 |
|||
|
|||
if count == "...": |
|||
total_len = -1 # read all |
|||
else: |
|||
if type_len is None: |
|||
raise UnknownMsgFieldType(f"unknown field type: {field_type!r}") |
|||
total_len = count * type_len |
|||
|
|||
buf = fd.read(total_len) |
|||
if total_len >= 0 and len(buf) != total_len: |
|||
raise UnexpectedEndOfStream() |
|||
return buf |
|||
|
|||
|
|||
Check lib/lightning.json, `msg_name` could be 'init', |
|||
and `v` could be |
|||
# TODO: maybe for "value" we could accept a list with len "count" of appropriate items |
|||
def _write_field(*, fd: io.BytesIO, field_type: str, count: Union[int, str], |
|||
value: Union[bytes, int]) -> None: |
|||
if not fd: raise Exception() |
|||
if isinstance(count, int): |
|||
assert count >= 0, f"{count!r} must be non-neg int" |
|||
elif count == "...": |
|||
pass |
|||
else: |
|||
raise Exception(f"unexpected field count: {count!r}") |
|||
if count == 0: |
|||
return |
|||
type_len = None |
|||
if field_type == 'byte': |
|||
type_len = 1 |
|||
elif field_type == 'u8': |
|||
type_len = 1 |
|||
elif field_type == 'u16': |
|||
type_len = 2 |
|||
elif field_type == 'u32': |
|||
type_len = 4 |
|||
elif field_type == 'u64': |
|||
type_len = 8 |
|||
elif field_type in ('tu16', 'tu32', 'tu64'): |
|||
if field_type == 'tu16': |
|||
type_len = 2 |
|||
elif field_type == 'tu32': |
|||
type_len = 4 |
|||
else: |
|||
assert field_type == 'tu64' |
|||
type_len = 8 |
|||
assert count == 1, count |
|||
if isinstance(value, int): |
|||
value = int.to_bytes(value, length=type_len, byteorder="big", signed=False) |
|||
if not isinstance(value, (bytes, bytearray)): |
|||
raise Exception(f"can only write bytes into fd. got: {value!r}") |
|||
while len(value) > 0 and value[0] == 0x00: |
|||
value = value[1:] |
|||
nbytes_written = fd.write(value) |
|||
if nbytes_written != len(value): |
|||
raise Exception(f"tried to write {len(value)} bytes, but only wrote {nbytes_written}!?") |
|||
return |
|||
elif field_type == 'varint': |
|||
assert count == 1, count |
|||
if isinstance(value, int): |
|||
value = write_bigsize_int(value) |
|||
if not isinstance(value, (bytes, bytearray)): |
|||
raise Exception(f"can only write bytes into fd. got: {value!r}") |
|||
nbytes_written = fd.write(value) |
|||
if nbytes_written != len(value): |
|||
raise Exception(f"tried to write {len(value)} bytes, but only wrote {nbytes_written}!?") |
|||
return |
|||
elif field_type == 'chain_hash': |
|||
type_len = 32 |
|||
elif field_type == 'channel_id': |
|||
type_len = 32 |
|||
elif field_type == 'sha256': |
|||
type_len = 32 |
|||
elif field_type == 'signature': |
|||
type_len = 64 |
|||
elif field_type == 'point': |
|||
type_len = 33 |
|||
elif field_type == 'short_channel_id': |
|||
type_len = 8 |
|||
total_len = -1 |
|||
if count != "...": |
|||
if type_len is None: |
|||
raise UnknownMsgFieldType(f"unknown field type: {field_type!r}") |
|||
total_len = count * type_len |
|||
if isinstance(value, int) and (count == 1 or field_type == 'byte'): |
|||
value = int.to_bytes(value, length=total_len, byteorder="big", signed=False) |
|||
if not isinstance(value, (bytes, bytearray)): |
|||
raise Exception(f"can only write bytes into fd. got: {value!r}") |
|||
if count != "..." and total_len != len(value): |
|||
raise UnexpectedFieldSizeForEncoder(f"expected: {total_len}, got {len(value)}") |
|||
nbytes_written = fd.write(value) |
|||
if nbytes_written != len(value): |
|||
raise Exception(f"tried to write {len(value)} bytes, but only wrote {nbytes_written}!?") |
|||
|
|||
{ type: 16, payload: { 'gflen': ..., ... }, ... } |
|||
|
|||
Returns function taking bytes |
|||
def _read_tlv_record(*, fd: io.BytesIO) -> Tuple[int, bytes]: |
|||
if not fd: raise Exception() |
|||
tlv_type = _read_field(fd=fd, field_type="varint", count=1) |
|||
tlv_len = _read_field(fd=fd, field_type="varint", count=1) |
|||
tlv_val = _read_field(fd=fd, field_type="byte", count=tlv_len) |
|||
return tlv_type, tlv_val |
|||
|
|||
|
|||
def _write_tlv_record(*, fd: io.BytesIO, tlv_type: int, tlv_val: bytes) -> None: |
|||
if not fd: raise Exception() |
|||
tlv_len = len(tlv_val) |
|||
_write_field(fd=fd, field_type="varint", count=1, value=tlv_type) |
|||
_write_field(fd=fd, field_type="varint", count=1, value=tlv_len) |
|||
_write_field(fd=fd, field_type="byte", count=tlv_len, value=tlv_val) |
|||
|
|||
|
|||
def _resolve_field_count(field_count_str: str, *, vars_dict: dict, allow_any=False) -> Union[int, str]: |
|||
"""Returns an evaluated field count, typically an int. |
|||
If allow_any is True, the return value can be a str with value=="...". |
|||
""" |
|||
def handler(data: bytes) -> Tuple[str, dict]: |
|||
ma = {} # map of field name -> field data; after parsing msg |
|||
pos = 0 |
|||
for fieldname in v["payload"]: |
|||
poslenMap = v["payload"][fieldname] |
|||
if "feature" in poslenMap and pos == len(data): |
|||
continue |
|||
#assert pos == _eval_exp_with_ctx(poslenMap["position"], ma) # this assert is expensive... |
|||
length = poslenMap["length"] |
|||
length = _eval_exp_with_ctx(length, ma) |
|||
ma[fieldname] = data[pos:pos+length] |
|||
pos += length |
|||
# BOLT-01: "MUST ignore any additional data within a message beyond the length that it expects for that type." |
|||
assert pos <= len(data), (msg_name, pos, len(data)) |
|||
return msg_name, ma |
|||
return handler |
|||
if field_count_str == "": |
|||
field_count = 1 |
|||
elif field_count_str == "...": |
|||
if not allow_any: |
|||
raise Exception("field count is '...' but allow_any is False") |
|||
return field_count_str |
|||
else: |
|||
try: |
|||
field_count = int(field_count_str) |
|||
except ValueError: |
|||
field_count = vars_dict[field_count_str] |
|||
if isinstance(field_count, (bytes, bytearray)): |
|||
field_count = int.from_bytes(field_count, byteorder="big") |
|||
assert isinstance(field_count, int) |
|||
return field_count |
|||
|
|||
|
|||
def _parse_msgtype_intvalue_for_onion_wire(value: str) -> int: |
|||
msg_type_int = 0 |
|||
for component in value.split("|"): |
|||
try: |
|||
msg_type_int |= int(component) |
|||
except ValueError: |
|||
msg_type_int |= OnionFailureCodeMetaFlag[component] |
|||
return msg_type_int |
|||
|
|||
|
|||
class LNSerializer: |
|||
def __init__(self): |
|||
message_types = {} |
|||
path = os.path.join(os.path.dirname(__file__), 'lightning.json') |
|||
with open(path) as f: |
|||
structured = json.loads(f.read(), object_pairs_hook=OrderedDict) |
|||
|
|||
for msg_name in structured: |
|||
v = structured[msg_name] |
|||
# these message types are skipped since their types collide |
|||
# (for example with pong, which also uses type=19) |
|||
# we don't need them yet |
|||
if msg_name in ["final_incorrect_cltv_expiry", "final_incorrect_htlc_amount"]: |
|||
continue |
|||
if len(v["payload"]) == 0: |
|||
|
|||
def __init__(self, *, for_onion_wire: bool = False): |
|||
# TODO msg_type could be 'int' everywhere... |
|||
self.msg_scheme_from_type = {} # type: Dict[bytes, List[Sequence[str]]] |
|||
self.msg_type_from_name = {} # type: Dict[str, bytes] |
|||
|
|||
self.in_tlv_stream_get_tlv_record_scheme_from_type = {} # type: Dict[str, Dict[int, List[Sequence[str]]]] |
|||
self.in_tlv_stream_get_record_type_from_name = {} # type: Dict[str, Dict[str, int]] |
|||
self.in_tlv_stream_get_record_name_from_type = {} # type: Dict[str, Dict[int, str]] |
|||
|
|||
if for_onion_wire: |
|||
path = os.path.join(os.path.dirname(__file__), "lnwire", "onion_wire.csv") |
|||
else: |
|||
path = os.path.join(os.path.dirname(__file__), "lnwire", "peer_wire.csv") |
|||
with open(path, newline='') as f: |
|||
csvreader = csv.reader(f) |
|||
for row in csvreader: |
|||
#print(f">>> {row!r}") |
|||
if row[0] == "msgtype": |
|||
# msgtype,<msgname>,<value>[,<option>] |
|||
msg_type_name = row[1] |
|||
if for_onion_wire: |
|||
msg_type_int = _parse_msgtype_intvalue_for_onion_wire(str(row[2])) |
|||
else: |
|||
msg_type_int = int(row[2]) |
|||
msg_type_bytes = msg_type_int.to_bytes(2, 'big') |
|||
assert msg_type_bytes not in self.msg_scheme_from_type, f"type collision? for {msg_type_name}" |
|||
assert msg_type_name not in self.msg_type_from_name, f"type collision? for {msg_type_name}" |
|||
row[2] = msg_type_int |
|||
self.msg_scheme_from_type[msg_type_bytes] = [tuple(row)] |
|||
self.msg_type_from_name[msg_type_name] = msg_type_bytes |
|||
elif row[0] == "msgdata": |
|||
# msgdata,<msgname>,<fieldname>,<typename>,[<count>][,<option>] |
|||
assert msg_type_name == row[1] |
|||
self.msg_scheme_from_type[msg_type_bytes].append(tuple(row)) |
|||
elif row[0] == "tlvtype": |
|||
# tlvtype,<tlvstreamname>,<tlvname>,<value>[,<option>] |
|||
tlv_stream_name = row[1] |
|||
tlv_record_name = row[2] |
|||
tlv_record_type = int(row[3]) |
|||
row[3] = tlv_record_type |
|||
if tlv_stream_name not in self.in_tlv_stream_get_tlv_record_scheme_from_type: |
|||
self.in_tlv_stream_get_tlv_record_scheme_from_type[tlv_stream_name] = OrderedDict() |
|||
self.in_tlv_stream_get_record_type_from_name[tlv_stream_name] = {} |
|||
self.in_tlv_stream_get_record_name_from_type[tlv_stream_name] = {} |
|||
assert tlv_record_type not in self.in_tlv_stream_get_tlv_record_scheme_from_type[tlv_stream_name], f"type collision? for {tlv_stream_name}/{tlv_record_name}" |
|||
assert tlv_record_name not in self.in_tlv_stream_get_record_type_from_name[tlv_stream_name], f"type collision? for {tlv_stream_name}/{tlv_record_name}" |
|||
assert tlv_record_type not in self.in_tlv_stream_get_record_type_from_name[tlv_stream_name], f"type collision? for {tlv_stream_name}/{tlv_record_name}" |
|||
self.in_tlv_stream_get_tlv_record_scheme_from_type[tlv_stream_name][tlv_record_type] = [tuple(row)] |
|||
self.in_tlv_stream_get_record_type_from_name[tlv_stream_name][tlv_record_name] = tlv_record_type |
|||
self.in_tlv_stream_get_record_name_from_type[tlv_stream_name][tlv_record_type] = tlv_record_name |
|||
if max(self.in_tlv_stream_get_tlv_record_scheme_from_type[tlv_stream_name].keys()) > tlv_record_type: |
|||
raise Exception(f"tlv record types must be listed in monotonically increasing order for stream. " |
|||
f"stream={tlv_stream_name}") |
|||
elif row[0] == "tlvdata": |
|||
# tlvdata,<tlvstreamname>,<tlvname>,<fieldname>,<typename>,[<count>][,<option>] |
|||
assert tlv_stream_name == row[1] |
|||
assert tlv_record_name == row[2] |
|||
self.in_tlv_stream_get_tlv_record_scheme_from_type[tlv_stream_name][tlv_record_type].append(tuple(row)) |
|||
else: |
|||
pass # TODO |
|||
|
|||
def write_tlv_stream(self, *, fd: io.BytesIO, tlv_stream_name: str, **kwargs) -> None: |
|||
scheme_map = self.in_tlv_stream_get_tlv_record_scheme_from_type[tlv_stream_name] |
|||
for tlv_record_type, scheme in scheme_map.items(): # note: tlv_record_type is monotonically increasing |
|||
tlv_record_name = self.in_tlv_stream_get_record_name_from_type[tlv_stream_name][tlv_record_type] |
|||
if tlv_record_name not in kwargs: |
|||
continue |
|||
with io.BytesIO() as tlv_record_fd: |
|||
for row in scheme: |
|||
if row[0] == "tlvtype": |
|||
pass |
|||
elif row[0] == "tlvdata": |
|||
# tlvdata,<tlvstreamname>,<tlvname>,<fieldname>,<typename>,[<count>][,<option>] |
|||
assert tlv_stream_name == row[1] |
|||
assert tlv_record_name == row[2] |
|||
field_name = row[3] |
|||
field_type = row[4] |
|||
field_count_str = row[5] |
|||
field_count = _resolve_field_count(field_count_str, |
|||
vars_dict=kwargs[tlv_record_name], |
|||
allow_any=True) |
|||
field_value = kwargs[tlv_record_name][field_name] |
|||
_write_field(fd=tlv_record_fd, |
|||
field_type=field_type, |
|||
count=field_count, |
|||
value=field_value) |
|||
else: |
|||
raise Exception(f"unexpected row in scheme: {row!r}") |
|||
_write_tlv_record(fd=fd, tlv_type=tlv_record_type, tlv_val=tlv_record_fd.getvalue()) |
|||
|
|||
def read_tlv_stream(self, *, fd: io.BytesIO, tlv_stream_name: str) -> Dict[str, Dict[str, Any]]: |
|||
parsed = {} # type: Dict[str, Dict[str, Any]] |
|||
scheme_map = self.in_tlv_stream_get_tlv_record_scheme_from_type[tlv_stream_name] |
|||
last_seen_tlv_record_type = -1 # type: int |
|||
while _num_remaining_bytes_to_read(fd) > 0: |
|||
tlv_record_type, tlv_record_val = _read_tlv_record(fd=fd) |
|||
if not (tlv_record_type > last_seen_tlv_record_type): |
|||
raise MsgInvalidFieldOrder(f"TLV records must be monotonically increasing by type. " |
|||
f"cur: {tlv_record_type}. prev: {last_seen_tlv_record_type}") |
|||
last_seen_tlv_record_type = tlv_record_type |
|||
try: |
|||
num = int(v["type"]) |
|||
except ValueError: |
|||
#print("skipping", k) |
|||
continue |
|||
byts = num.to_bytes(2, 'big') |
|||
assert byts not in message_types, (byts, message_types[byts].__name__, msg_name) |
|||
names = [x.__name__ for x in message_types.values()] |
|||
assert msg_name + "_handler" not in names, (msg_name, names) |
|||
message_types[byts] = _make_handler(msg_name, v) |
|||
message_types[byts].__name__ = msg_name + "_handler" |
|||
|
|||
assert message_types[b"\x00\x10"].__name__ == "init_handler" |
|||
self.structured = structured |
|||
self.message_types = message_types |
|||
|
|||
def encode_msg(self, msg_type : str, **kwargs) -> bytes: |
|||
scheme = scheme_map[tlv_record_type] |
|||
except KeyError: |
|||
if tlv_record_type % 2 == 0: |
|||
# unknown "even" type: hard fail |
|||
raise UnknownMandatoryTLVRecordType(f"{tlv_stream_name}/{tlv_record_type}") from None |
|||
else: |
|||
# unknown "odd" type: skip it |
|||
continue |
|||
tlv_record_name = self.in_tlv_stream_get_record_name_from_type[tlv_stream_name][tlv_record_type] |
|||
parsed[tlv_record_name] = {} |
|||
with io.BytesIO(tlv_record_val) as tlv_record_fd: |
|||
for row in scheme: |
|||
#print(f"row: {row!r}") |
|||
if row[0] == "tlvtype": |
|||
pass |
|||
elif row[0] == "tlvdata": |
|||
# tlvdata,<tlvstreamname>,<tlvname>,<fieldname>,<typename>,[<count>][,<option>] |
|||
assert tlv_stream_name == row[1] |
|||
assert tlv_record_name == row[2] |
|||
field_name = row[3] |
|||
field_type = row[4] |
|||
field_count_str = row[5] |
|||
field_count = _resolve_field_count(field_count_str, |
|||
vars_dict=parsed[tlv_record_name], |
|||
allow_any=True) |
|||
#print(f">> count={field_count}. parsed={parsed}") |
|||
parsed[tlv_record_name][field_name] = _read_field(fd=tlv_record_fd, |
|||
field_type=field_type, |
|||
count=field_count) |
|||
else: |
|||
raise Exception(f"unexpected row in scheme: {row!r}") |
|||
if _num_remaining_bytes_to_read(tlv_record_fd) > 0: |
|||
raise MsgTrailingGarbage(f"TLV record ({tlv_stream_name}/{tlv_record_name}) has extra trailing garbage") |
|||
return parsed |
|||
|
|||
def encode_msg(self, msg_type: str, **kwargs) -> bytes: |
|||
""" |
|||
Encode kwargs into a Lightning message (bytes) |
|||
of the type given in the msg_type string |
|||
""" |
|||
typ = self.structured[msg_type] |
|||
data = int(typ["type"]).to_bytes(2, 'big') |
|||
lengths = {} |
|||
for k in typ["payload"]: |
|||
poslenMap = typ["payload"][k] |
|||
if k not in kwargs and "feature" in poslenMap: |
|||
continue |
|||
param = kwargs.get(k, 0) |
|||
leng = _eval_exp_with_ctx(poslenMap["length"], lengths) |
|||
try: |
|||
clone = dict(lengths) |
|||
clone.update(kwargs) |
|||
leng = _eval_exp_with_ctx(poslenMap["length"], clone) |
|||
except KeyError: |
|||
pass |
|||
try: |
|||
if not isinstance(param, bytes): |
|||
assert isinstance(param, int), "field {} is neither bytes or int".format(k) |
|||
param = param.to_bytes(leng, 'big') |
|||
except ValueError: |
|||
raise Exception("{} does not fit in {} bytes".format(k, leng)) |
|||
lengths[k] = len(param) |
|||
if lengths[k] != leng: |
|||
raise Exception("field {} is {} bytes long, should be {} bytes long".format(k, lengths[k], leng)) |
|||
data += param |
|||
return data |
|||
|
|||
def decode_msg(self, data : bytes) -> Tuple[str, dict]: |
|||
#print(f">>> encode_msg. msg_type={msg_type}, payload={kwargs!r}") |
|||
msg_type_bytes = self.msg_type_from_name[msg_type] |
|||
scheme = self.msg_scheme_from_type[msg_type_bytes] |
|||
with io.BytesIO() as fd: |
|||
fd.write(msg_type_bytes) |
|||
for row in scheme: |
|||
if row[0] == "msgtype": |
|||
pass |
|||
elif row[0] == "msgdata": |
|||
# msgdata,<msgname>,<fieldname>,<typename>,[<count>][,<option>] |
|||
field_name = row[2] |
|||
field_type = row[3] |
|||
field_count_str = row[4] |
|||
#print(f">>> encode_msg. msgdata. field_name={field_name!r}. field_type={field_type!r}. field_count_str={field_count_str!r}") |
|||
field_count = _resolve_field_count(field_count_str, vars_dict=kwargs) |
|||
if field_name == "tlvs": |
|||
tlv_stream_name = field_type |
|||
if tlv_stream_name in kwargs: |
|||
self.write_tlv_stream(fd=fd, tlv_stream_name=tlv_stream_name, **(kwargs[tlv_stream_name])) |
|||
continue |
|||
try: |
|||
field_value = kwargs[field_name] |
|||
except KeyError: |
|||
if len(row) > 5: |
|||
break # optional feature field not present |
|||
else: |
|||
field_value = 0 # default mandatory fields to zero |
|||
#print(f">>> encode_msg. writing field: {field_name}. value={field_value!r}. field_type={field_type!r}. count={field_count!r}") |
|||
_write_field(fd=fd, |
|||
field_type=field_type, |
|||
count=field_count, |
|||
value=field_value) |
|||
#print(f">>> encode_msg. so far: {fd.getvalue().hex()}") |
|||
else: |
|||
raise Exception(f"unexpected row in scheme: {row!r}") |
|||
return fd.getvalue() |
|||
|
|||
def decode_msg(self, data: bytes) -> Tuple[str, dict]: |
|||
""" |
|||
Decode Lightning message by reading the first |
|||
two bytes to determine message type. |
|||
|
|||
Returns message type string and parsed message contents dict |
|||
""" |
|||
typ = data[:2] |
|||
k, parsed = self.message_types[typ](data[2:]) |
|||
return k, parsed |
|||
#print(f"decode_msg >>> {data.hex()}") |
|||
assert len(data) >= 2 |
|||
msg_type_bytes = data[:2] |
|||
msg_type_int = int.from_bytes(msg_type_bytes, byteorder="big", signed=False) |
|||
scheme = self.msg_scheme_from_type[msg_type_bytes] |
|||
assert scheme[0][2] == msg_type_int |
|||
msg_type_name = scheme[0][1] |
|||
parsed = {} |
|||
with io.BytesIO(data[2:]) as fd: |
|||
for row in scheme: |
|||
#print(f"row: {row!r}") |
|||
if row[0] == "msgtype": |
|||
pass |
|||
elif row[0] == "msgdata": |
|||
field_name = row[2] |
|||
field_type = row[3] |
|||
field_count_str = row[4] |
|||
field_count = _resolve_field_count(field_count_str, vars_dict=parsed) |
|||
if field_name == "tlvs": |
|||
tlv_stream_name = field_type |
|||
d = self.read_tlv_stream(fd=fd, tlv_stream_name=tlv_stream_name) |
|||
parsed[tlv_stream_name] = d |
|||
continue |
|||
#print(f">> count={field_count}. parsed={parsed}") |
|||
try: |
|||
parsed[field_name] = _read_field(fd=fd, |
|||
field_type=field_type, |
|||
count=field_count) |
|||
except UnexpectedEndOfStream as e: |
|||
if len(row) > 5: |
|||
break # optional feature field not present |
|||
else: |
|||
raise |
|||
else: |
|||
raise Exception(f"unexpected row in scheme: {row!r}") |
|||
return msg_type_name, parsed |
|||
|
|||
|
|||
_inst = LNSerializer() |
|||
encode_msg = _inst.encode_msg |
|||
decode_msg = _inst.decode_msg |
|||
|
|||
|
|||
OnionWireSerializer = LNSerializer(for_onion_wire=True) |
|||
|
@ -0,0 +1,5 @@ |
|||
These files are generated from the BOLT repository: |
|||
``` |
|||
$ python3 tools/extract-formats.py 01-*.md 02-*.md 07-*.md > peer_wire.csv |
|||
$ python3 tools/extract-formats.py 04-*.md > onion_wire.csv |
|||
``` |
Can't render this file because it has a wrong number of fields in line 2.
|
Can't render this file because it has a wrong number of fields in line 2.
|
@ -0,0 +1,385 @@ |
|||
import io |
|||
|
|||
from electrum.lnmsg import (read_bigsize_int, write_bigsize_int, FieldEncodingNotMinimal, |
|||
UnexpectedEndOfStream, LNSerializer, UnknownMandatoryTLVRecordType, |
|||
MalformedMsg, MsgTrailingGarbage, MsgInvalidFieldOrder, encode_msg, |
|||
decode_msg, UnexpectedFieldSizeForEncoder) |
|||
from electrum.util import bfh |
|||
from electrum.lnutil import ShortChannelID, LnFeatures |
|||
from electrum import constants |
|||
|
|||
from . import TestCaseForTestnet |
|||
|
|||
|
|||
class TestLNMsg(TestCaseForTestnet): |
|||
|
|||
def test_write_bigsize_int(self): |
|||
self.assertEqual(bfh("00"), write_bigsize_int(0)) |
|||
self.assertEqual(bfh("fc"), write_bigsize_int(252)) |
|||
self.assertEqual(bfh("fd00fd"), write_bigsize_int(253)) |
|||
self.assertEqual(bfh("fdffff"), write_bigsize_int(65535)) |
|||
self.assertEqual(bfh("fe00010000"), write_bigsize_int(65536)) |
|||
self.assertEqual(bfh("feffffffff"), write_bigsize_int(4294967295)) |
|||
self.assertEqual(bfh("ff0000000100000000"), write_bigsize_int(4294967296)) |
|||
self.assertEqual(bfh("ffffffffffffffffff"), write_bigsize_int(18446744073709551615)) |
|||
|
|||
def test_read_bigsize_int(self): |
|||
self.assertEqual(0, read_bigsize_int(io.BytesIO(bfh("00")))) |
|||
self.assertEqual(252, read_bigsize_int(io.BytesIO(bfh("fc")))) |
|||
self.assertEqual(253, read_bigsize_int(io.BytesIO(bfh("fd00fd")))) |
|||
self.assertEqual(65535, read_bigsize_int(io.BytesIO(bfh("fdffff")))) |
|||
self.assertEqual(65536, read_bigsize_int(io.BytesIO(bfh("fe00010000")))) |
|||
self.assertEqual(4294967295, read_bigsize_int(io.BytesIO(bfh("feffffffff")))) |
|||
self.assertEqual(4294967296, read_bigsize_int(io.BytesIO(bfh("ff0000000100000000")))) |
|||
self.assertEqual(18446744073709551615, read_bigsize_int(io.BytesIO(bfh("ffffffffffffffffff")))) |
|||
|
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
read_bigsize_int(io.BytesIO(bfh("fd00fc"))) |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
read_bigsize_int(io.BytesIO(bfh("fe0000ffff"))) |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
read_bigsize_int(io.BytesIO(bfh("ff00000000ffffffff"))) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
read_bigsize_int(io.BytesIO(bfh("fd00"))) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
read_bigsize_int(io.BytesIO(bfh("feffff"))) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
read_bigsize_int(io.BytesIO(bfh("ffffffffff"))) |
|||
self.assertEqual(None, read_bigsize_int(io.BytesIO(bfh("")))) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
read_bigsize_int(io.BytesIO(bfh("fd"))) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
read_bigsize_int(io.BytesIO(bfh("fe"))) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
read_bigsize_int(io.BytesIO(bfh("ff"))) |
|||
|
|||
def test_read_tlv_stream_tests1(self): |
|||
# from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-failures |
|||
lnser = LNSerializer() |
|||
for tlv_stream_name in ("n1", "n2"): |
|||
with self.subTest(tlv_stream_name=tlv_stream_name): |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd01")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd000100")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd0101")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd26")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd2602")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd000100")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0ffd0201000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnknownMandatoryTLVRecordType): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1200")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnknownMandatoryTLVRecordType): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd010200")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnknownMandatoryTLVRecordType): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0100000200")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(UnknownMandatoryTLVRecordType): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("ff010000000000000200")), tlv_stream_name=tlv_stream_name) |
|||
with self.assertRaises(MsgTrailingGarbage): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0109ffffffffffffffffff")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010100")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020001")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103000100")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010400010000")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050001000000")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106000100000000")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010700010000000000")), tlv_stream_name="n1") |
|||
with self.assertRaises(FieldEncodingNotMinimal): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080001000000000000")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("020701010101010101")), tlv_stream_name="n1") |
|||
with self.assertRaises(MsgTrailingGarbage): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0209010101010101010101")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0321023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0329023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0330023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb000000000000000100000000000001")), tlv_stream_name="n1") |
|||
# check if ECC point is valid?... skip for now. |
|||
#with self.assertRaises(Exception): |
|||
# lnser.read_tlv_stream(fd=io.BytesIO(bfh("0331043da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002")), tlv_stream_name="n1") |
|||
with self.assertRaises(MsgTrailingGarbage): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0332023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001000000000000000001")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe00")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnexpectedEndOfStream): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe0101")), tlv_stream_name="n1") |
|||
with self.assertRaises(MsgTrailingGarbage): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe03010101")), tlv_stream_name="n1") |
|||
with self.assertRaises(UnknownMandatoryTLVRecordType): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0000")), tlv_stream_name="n1") |
|||
|
|||
def test_read_tlv_stream_tests2(self): |
|||
# from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-decoding-successes |
|||
lnser = LNSerializer() |
|||
for tlv_stream_name in ("n1", "n2"): |
|||
with self.subTest(tlv_stream_name=tlv_stream_name): |
|||
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("")), tlv_stream_name=tlv_stream_name)) |
|||
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("2100")), tlv_stream_name=tlv_stream_name)) |
|||
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd020100")), tlv_stream_name=tlv_stream_name)) |
|||
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fd00")), tlv_stream_name=tlv_stream_name)) |
|||
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00ff00")), tlv_stream_name=tlv_stream_name)) |
|||
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("fe0200000100")), tlv_stream_name=tlv_stream_name)) |
|||
self.assertEqual({}, lnser.read_tlv_stream(fd=io.BytesIO(bfh("ff020000000000000100")), tlv_stream_name=tlv_stream_name)) |
|||
|
|||
self.assertEqual({"tlv1": {"amount_msat": 0}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0100")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 1}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010101")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 256}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01020100")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 65536}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0103010000")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 16777216}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010401000000")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 4294967296}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01050100000000")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 1099511627776}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0106010000000000")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 281474976710656}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("010701000000000000")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv1": {"amount_msat": 72057594037927936}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("01080100000000000000")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv2": {"scid": ShortChannelID.from_components(0, 0, 550)}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("02080000000000000226")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv3": {"node_id": bfh("023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb"), |
|||
"amount_msat_1": 1, |
|||
"amount_msat_2": 2}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0331023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002")), tlv_stream_name="n1")) |
|||
self.assertEqual({"tlv4": {"cltv_delta": 550}}, |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("fd00fe020226")), tlv_stream_name="n1")) |
|||
|
|||
def test_read_tlv_stream_tests3(self): |
|||
# from https://github.com/lightningnetwork/lightning-rfc/blob/452a0eb916fedf4c954137b4fd0b61b5002b34ad/01-messaging.md#tlv-stream-decoding-failure |
|||
lnser = LNSerializer() |
|||
with self.assertRaises(MsgInvalidFieldOrder): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0208000000000000022601012a")), tlv_stream_name="n1") |
|||
with self.assertRaises(MsgInvalidFieldOrder): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("0208000000000000023102080000000000000451")), tlv_stream_name="n1") |
|||
with self.assertRaises(MsgInvalidFieldOrder): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f000f012a")), tlv_stream_name="n1") |
|||
with self.assertRaises(MsgInvalidFieldOrder): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("1f001f012a")), tlv_stream_name="n1") |
|||
with self.assertRaises(MsgInvalidFieldOrder): |
|||
lnser.read_tlv_stream(fd=io.BytesIO(bfh("ffffffffffffffffff000000")), tlv_stream_name="n2") |
|||
|
|||
def test_encode_decode_msg__missing_mandatory_field_gets_set_to_zeroes(self): |
|||
# "channel_update": "signature" missing -> gets set to zeroes |
|||
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"), |
|||
encode_msg( |
|||
"channel_update", |
|||
short_channel_id=ShortChannelID.from_components(54321, 111, 2), |
|||
channel_flags=b'\x00', |
|||
message_flags=b'\x01', |
|||
cltv_expiry_delta=144, |
|||
htlc_minimum_msat=200, |
|||
htlc_maximum_msat=1_000_000_000, |
|||
fee_base_msat=500, |
|||
fee_proportional_millionths=35, |
|||
chain_hash=constants.net.rev_genesis_bytes(), |
|||
timestamp=1584320643, |
|||
)) |
|||
self.assertEqual(('channel_update', |
|||
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00', |
|||
'channel_flags': b'\x00', |
|||
'cltv_expiry_delta': 144, |
|||
'fee_base_msat': 500, |
|||
'fee_proportional_millionths': 35, |
|||
'htlc_maximum_msat': 1000000000, |
|||
'htlc_minimum_msat': 200, |
|||
'message_flags': b'\x01', |
|||
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02', |
|||
'signature': bytes(64), |
|||
'timestamp': 1584320643} |
|||
), |
|||
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"))) |
|||
|
|||
def test_encode_decode_msg__missing_optional_field_will_not_appear_in_decoded_dict(self): |
|||
# "channel_update": optional field "htlc_maximum_msat" missing -> does not get put into dict |
|||
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023"), |
|||
encode_msg( |
|||
"channel_update", |
|||
short_channel_id=ShortChannelID.from_components(54321, 111, 2), |
|||
channel_flags=b'\x00', |
|||
message_flags=b'\x01', |
|||
cltv_expiry_delta=144, |
|||
htlc_minimum_msat=200, |
|||
fee_base_msat=500, |
|||
fee_proportional_millionths=35, |
|||
chain_hash=constants.net.rev_genesis_bytes(), |
|||
timestamp=1584320643, |
|||
)) |
|||
self.assertEqual(('channel_update', |
|||
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00', |
|||
'channel_flags': b'\x00', |
|||
'cltv_expiry_delta': 144, |
|||
'fee_base_msat': 500, |
|||
'fee_proportional_millionths': 35, |
|||
'htlc_minimum_msat': 200, |
|||
'message_flags': b'\x01', |
|||
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02', |
|||
'signature': bytes(64), |
|||
'timestamp': 1584320643} |
|||
), |
|||
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023"))) |
|||
|
|||
def test_encode_decode_msg__ints_can_be_passed_as_bytes(self): |
|||
self.assertEqual(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"), |
|||
encode_msg( |
|||
"channel_update", |
|||
short_channel_id=ShortChannelID.from_components(54321, 111, 2), |
|||
channel_flags=b'\x00', |
|||
message_flags=b'\x01', |
|||
cltv_expiry_delta=int.to_bytes(144, length=2, byteorder="big", signed=False), |
|||
htlc_minimum_msat=int.to_bytes(200, length=8, byteorder="big", signed=False), |
|||
htlc_maximum_msat=int.to_bytes(1_000_000_000, length=8, byteorder="big", signed=False), |
|||
fee_base_msat=int.to_bytes(500, length=4, byteorder="big", signed=False), |
|||
fee_proportional_millionths=int.to_bytes(35, length=4, byteorder="big", signed=False), |
|||
chain_hash=constants.net.rev_genesis_bytes(), |
|||
timestamp=int.to_bytes(1584320643, length=4, byteorder="big", signed=False), |
|||
)) |
|||
self.assertEqual(('channel_update', |
|||
{'chain_hash': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00', |
|||
'channel_flags': b'\x00', |
|||
'cltv_expiry_delta': 144, |
|||
'fee_base_msat': 500, |
|||
'fee_proportional_millionths': 35, |
|||
'htlc_maximum_msat': 1000000000, |
|||
'htlc_minimum_msat': 200, |
|||
'message_flags': b'\x01', |
|||
'short_channel_id': b'\x00\xd41\x00\x00o\x00\x02', |
|||
'signature': bytes(64), |
|||
'timestamp': 1584320643} |
|||
), |
|||
decode_msg(bfh("01020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea33090000000000d43100006f00025e6ed0830100009000000000000000c8000001f400000023000000003b9aca00"))) |
|||
# "htlc_minimum_msat" is passed as bytes but with incorrect length |
|||
with self.assertRaises(UnexpectedFieldSizeForEncoder): |
|||
encode_msg( |
|||
"channel_update", |
|||
short_channel_id=ShortChannelID.from_components(54321, 111, 2), |
|||
channel_flags=b'\x00', |
|||
message_flags=b'\x01', |
|||
cltv_expiry_delta=int.to_bytes(144, length=2, byteorder="big", signed=False), |
|||
htlc_minimum_msat=int.to_bytes(200, length=4, byteorder="big", signed=False), |
|||
htlc_maximum_msat=int.to_bytes(1_000_000_000, length=8, byteorder="big", signed=False), |
|||
fee_base_msat=int.to_bytes(500, length=4, byteorder="big", signed=False), |
|||
fee_proportional_millionths=int.to_bytes(35, length=4, byteorder="big", signed=False), |
|||
chain_hash=constants.net.rev_genesis_bytes(), |
|||
timestamp=int.to_bytes(1584320643, length=4, byteorder="big", signed=False), |
|||
) |
|||
|
|||
def test_encode_decode_msg__commitment_signed(self): |
|||
# "commitment_signed" is interesting because of the "htlc_signature" field, |
|||
# which is a concatenation of multiple ("num_htlcs") signatures. |
|||
# 5 htlcs |
|||
self.assertEqual(bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"), |
|||
encode_msg( |
|||
"commitment_signed", |
|||
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', |
|||
signature=b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-", |
|||
num_htlcs=5, |
|||
htlc_signature=bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"), |
|||
)) |
|||
self.assertEqual(('commitment_signed', |
|||
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', |
|||
'signature': b"\x06\x11)Q\xd0\xa6\xd7\xfc\x1d\xbc\xa3\xbd\x1c\xdb\xda\x9a\xcf\xee\x7ff\x8b<\n6\xbd\x94O~/0['K\xa4ja'\x9e\x15\x16;-7lfK\xb3H\x1d|^\x10z[&\x83\x01\xe3\x9a\xeb\xbd\xa2}-", |
|||
'num_htlcs': 5, |
|||
'htlc_signature': bfh("6548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542")} |
|||
), |
|||
decode_msg(bfh("0084010101010101010101010101010101010101010101010101010101010101010106112951d0a6d7fc1dbca3bd1cdbda9acfee7f668b3c0a36bd944f7e2f305b274ba46a61279e15163b2d376c664bb3481d7c5e107a5b268301e39aebbda27d2d00056548bd093a2bd2f4f053f0c6eb2c5f541d55eb8a2ede4d35fe974e5d3cd0eec3138bfd4115f4483c3b14e7988b48811d2da75f29f5e6eee691251fb4fba5a2610ba8fe7007117fe1c9fa1a6b01805c84cfffbb0eba674b64342c7cac567dea50728c1bb1aadc6d23fc2f4145027eafca82d6072cc9ce6529542099f728a0521e4b2044df5d02f7f2cdf84404762b1979528aa689a3e060a2a90ba8ef9a83d24d31ffb0d95c71d9fb9049b24ecf2c949c1486e7eb3ae160d70d54e441dc785dc57f7f3c9901b9537398c66f546cfc1d65e0748895d14699342c407fe119ac17db079b103720124a5ba22d4ba14c12832324dea9cb60c61ee74376ee7dcffdd1836e354aa8838ce3b37854fa91465cc40c73b702915e3580bfebaace805d52373b57ac755ebe4a8fe97e5fc21669bea124b809c79968479148f7174f39b8014542"))) |
|||
# single htlc |
|||
self.assertEqual(bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"), |
|||
encode_msg( |
|||
"commitment_signed", |
|||
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', |
|||
signature=b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e', |
|||
num_htlcs=1, |
|||
htlc_signature=bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"), |
|||
)) |
|||
self.assertEqual(('commitment_signed', |
|||
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', |
|||
'signature': b';\x14\xaf\x0cT\x9d\xfb\x1f\xb2\x87\xffW\xc0\x127\x1b92\x99m\xb5\x92\x9e\xda_%\x17\x04u\x1f\xb4\x9d\r\xc2\xdc\xb8\x8eP!W\\\xb5r\xfbqi7XT?\x97\xd8\x9e\x91e\xf9\x13\xbf\xb7H\x8d|\xc2e', |
|||
'num_htlcs': 1, |
|||
'htlc_signature': bfh("2d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a")} |
|||
), |
|||
decode_msg(bfh("008401010101010101010101010101010101010101010101010101010101010101013b14af0c549dfb1fb287ff57c012371b3932996db5929eda5f251704751fb49d0dc2dcb88e5021575cb572fb71693758543f97d89e9165f913bfb7488d7cc26500012d31103b9f6e71131e4fee86fdfbdeba90e52b43fcfd11e8e53811cd4d59b2575ae6c3c82f85bea144c88cc35e568f1e6bdd0c57337e86de0b5da7cd9994067a"))) |
|||
# zero htlcs |
|||
self.assertEqual(bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000"), |
|||
encode_msg( |
|||
"commitment_signed", |
|||
channel_id=b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', |
|||
signature=b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E', |
|||
num_htlcs=0, |
|||
htlc_signature=bfh(""), |
|||
)) |
|||
self.assertEqual(('commitment_signed', |
|||
{'channel_id': b'\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01', |
|||
'signature': b'N n\xcf\x90M\x927\xb1\xc5\xb4\xe0\x85\x13U^\x9aY2\xc4[_h\xbe\x87d\xce\x99\x8d\xf65\xae\x04\xf6\xce{\xbc\xd3\xb4\xfd\x08\xe2\xda\xab\x7f\x90Y\xb2\x87\xec\xabAU6{\x83F\x82c4\x97\x17?E', |
|||
'num_htlcs': 0, |
|||
'htlc_signature': bfh("")} |
|||
), |
|||
decode_msg(bfh("008401010101010101010101010101010101010101010101010101010101010101014e206ecf904d9237b1c5b4e08513555e9a5932c45b5f68be8764ce998df635ae04f6ce7bbcd3b4fd08e2daab7f9059b287ecab4155367b834682633497173f450000"))) |
|||
|
|||
def test_encode_decode_msg__init(self): |
|||
# "init" is interesting because it has TLVs optionally |
|||
self.assertEqual(bfh("00100000000220c2"), |
|||
encode_msg( |
|||
"init", |
|||
gflen=0, |
|||
flen=2, |
|||
features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT | |
|||
LnFeatures.GOSSIP_QUERIES_OPT | |
|||
LnFeatures.GOSSIP_QUERIES_REQ | |
|||
LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT), |
|||
)) |
|||
self.assertEqual(bfh("00100000000220c2"), |
|||
encode_msg("init", gflen=0, flen=2, features=bfh("20c2"))) |
|||
self.assertEqual(bfh("00100000000220c2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000"), |
|||
encode_msg( |
|||
"init", |
|||
gflen=0, |
|||
flen=2, |
|||
features=(LnFeatures.OPTION_STATIC_REMOTEKEY_OPT | |
|||
LnFeatures.GOSSIP_QUERIES_OPT | |
|||
LnFeatures.GOSSIP_QUERIES_REQ | |
|||
LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT), |
|||
init_tlvs={ |
|||
'networks': |
|||
{'chains': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'} |
|||
} |
|||
)) |
|||
self.assertEqual(('init', |
|||
{'gflen': 2, |
|||
'globalfeatures': b'"\x00', |
|||
'flen': 3, |
|||
'features': b'\x02\xa2\xa1', |
|||
'init_tlvs': {}} |
|||
), |
|||
decode_msg(bfh("001000022200000302a2a1"))) |
|||
self.assertEqual(('init', |
|||
{'gflen': 2, |
|||
'globalfeatures': b'"\x00', |
|||
'flen': 3, |
|||
'features': b'\x02\xaa\xa2', |
|||
'init_tlvs': { |
|||
'networks': |
|||
{'chains': b'CI\x7f\xd7\xf8&\x95q\x08\xf4\xa3\x0f\xd9\xce\xc3\xae\xbay\x97 \x84\xe9\x0e\xad\x01\xea3\t\x00\x00\x00\x00'} |
|||
}}), |
|||
decode_msg(bfh("001000022200000302aaa2012043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000"))) |
Loading…
Reference in new issue