Browse Source

pyln.spec.bolt*: make these separate packages, so versions can change indep.

They're almost entirely autogenerated, and we use symlinks into the
top directory to reduce replication.

They can't be under pyln.spec.message, because a package can't also
be a namespace.

We also add fulltext and desc fields, and exclude our "gen" files from
flake8, since the spec quotes contain weird whitespace.

Changelog-Added: Python: pyln.spec.bolt{1,2,4,7} packages.
Signed-off-by: Rusty Russell <rusty@rustcorp.com.au>
paymod-01
Rusty Russell 5 years ago
committed by Christian Decker
parent
commit
3882e8bdf7
  1. 2
      Makefile
  2. 7
      contrib/pyln-proto/pyln/proto/message/bolt1/Makefile
  3. 16
      contrib/pyln-proto/pyln/proto/message/bolt1/__init__.py
  4. 5
      contrib/pyln-proto/pyln/proto/message/bolt1/bolt.py
  5. 35
      contrib/pyln-proto/pyln/proto/message/bolt1/csv.py
  6. 7
      contrib/pyln-proto/pyln/proto/message/bolt2/Makefile
  7. 16
      contrib/pyln-proto/pyln/proto/message/bolt2/__init__.py
  8. 100
      contrib/pyln-proto/pyln/proto/message/bolt2/csv.py
  9. 7
      contrib/pyln-proto/pyln/proto/message/bolt4/Makefile
  10. 5
      contrib/pyln-proto/pyln/proto/message/bolt4/bolt.py
  11. 55
      contrib/pyln-proto/pyln/proto/message/bolt4/csv.py
  12. 7
      contrib/pyln-proto/pyln/proto/message/bolt7/Makefile
  13. 16
      contrib/pyln-proto/pyln/proto/message/bolt7/__init__.py
  14. 5
      contrib/pyln-proto/pyln/proto/message/bolt7/bolt.py
  15. 83
      contrib/pyln-proto/pyln/proto/message/bolt7/csv.py
  16. 47
      contrib/pyln-spec/Makefile
  17. 2
      contrib/pyln-spec/bolt.py
  18. 1
      contrib/pyln-spec/bolt1/pyln/spec/bolt1/__init__.py
  19. 1
      contrib/pyln-spec/bolt1/pyln/spec/bolt1/bolt.py
  20. 976
      contrib/pyln-spec/bolt1/pyln/spec/bolt1/gen.py
  21. 2
      contrib/pyln-spec/bolt1/pyln/spec/bolt1/gen_version.py
  22. 1
      contrib/pyln-spec/bolt1/requirements.txt
  23. 23
      contrib/pyln-spec/bolt1/setup.py
  24. 2
      contrib/pyln-spec/bolt1/tests/test_bolt1.py
  25. 1
      contrib/pyln-spec/bolt2/pyln/spec/bolt2/__init__.py
  26. 1
      contrib/pyln-spec/bolt2/pyln/spec/bolt2/bolt.py
  27. 1471
      contrib/pyln-spec/bolt2/pyln/spec/bolt2/gen.py
  28. 2
      contrib/pyln-spec/bolt2/pyln/spec/bolt2/gen_version.py
  29. 1
      contrib/pyln-spec/bolt2/requirements.txt
  30. 23
      contrib/pyln-spec/bolt2/setup.py
  31. 2
      contrib/pyln-spec/bolt2/tests/test_bolt2.py
  32. 1
      contrib/pyln-spec/bolt4/pyln/spec/bolt4/__init__.py
  33. 1
      contrib/pyln-spec/bolt4/pyln/spec/bolt4/bolt.py
  34. 1267
      contrib/pyln-spec/bolt4/pyln/spec/bolt4/gen.py
  35. 2
      contrib/pyln-spec/bolt4/pyln/spec/bolt4/gen_version.py
  36. 1
      contrib/pyln-spec/bolt4/requirements.txt
  37. 23
      contrib/pyln-spec/bolt4/setup.py
  38. 2
      contrib/pyln-spec/bolt4/tests/test_bolt4.py
  39. 1
      contrib/pyln-spec/bolt7/pyln/spec/bolt7/__init__.py
  40. 1
      contrib/pyln-spec/bolt7/pyln/spec/bolt7/bolt.py
  41. 1211
      contrib/pyln-spec/bolt7/pyln/spec/bolt7/gen.py
  42. 2
      contrib/pyln-spec/bolt7/pyln/spec/bolt7/gen_version.py
  43. 1
      contrib/pyln-spec/bolt7/requirements.txt
  44. 23
      contrib/pyln-spec/bolt7/setup.py
  45. 2
      contrib/pyln-spec/bolt7/tests/test_bolt7.py
  46. 1
      contrib/pyln-spec/requirements.txt
  47. 10
      contrib/pyln-spec/subinit.py

2
Makefile

@ -340,7 +340,7 @@ check-markdown:
check-spelling:
@tools/check-spelling.sh
PYSRC=$(shell git ls-files "*.py") contrib/pylightning/lightning-pay
PYSRC=$(shell git ls-files "*.py" | grep -v /text.py) contrib/pylightning/lightning-pay
check-python:
@# E501 line too long (N > 79 characters)

7
contrib/pyln-proto/pyln/proto/message/bolt1/Makefile

@ -1,7 +0,0 @@
#! /usr/bin/make
SPECDIR := ../../../../../../../lightning-rfc
csv.py: $(SPECDIR)/01-messaging.md Makefile
SPECNUM=`basename $< | sed 's/-.*//'`; (echo csv = '['; python3 $(SPECDIR)/tools/extract-formats.py $< | sed 's/\(.*\)/ "\1",/'; echo ']') > $@
chmod a+x $@

16
contrib/pyln-proto/pyln/proto/message/bolt1/__init__.py

@ -1,16 +0,0 @@
from .csv import csv
from .bolt import namespace
import sys
__version__ = '0.0.1'
__all__ = [
'csv',
'namespace',
]
mod = sys.modules[__name__]
for d in namespace.subtypes, namespace.tlvtypes, namespace.messagetypes:
for name in d:
setattr(mod, name, d[name])
__all__.append(name)

5
contrib/pyln-proto/pyln/proto/message/bolt1/bolt.py

@ -1,5 +0,0 @@
from pyln.proto.message import MessageNamespace
from .csv import csv
namespace = MessageNamespace(csv_lines=csv)

35
contrib/pyln-proto/pyln/proto/message/bolt1/csv.py

@ -1,35 +0,0 @@
csv = [
"msgtype,init,16",
"msgdata,init,gflen,u16,",
"msgdata,init,globalfeatures,byte,gflen",
"msgdata,init,flen,u16,",
"msgdata,init,features,byte,flen",
"msgdata,init,tlvs,init_tlvs,",
"tlvtype,init_tlvs,networks,1",
"tlvdata,init_tlvs,networks,chains,chain_hash,...",
"msgtype,error,17",
"msgdata,error,channel_id,channel_id,",
"msgdata,error,len,u16,",
"msgdata,error,data,byte,len",
"msgtype,ping,18",
"msgdata,ping,num_pong_bytes,u16,",
"msgdata,ping,byteslen,u16,",
"msgdata,ping,ignored,byte,byteslen",
"msgtype,pong,19",
"msgdata,pong,byteslen,u16,",
"msgdata,pong,ignored,byte,byteslen",
"tlvtype,n1,tlv1,1",
"tlvdata,n1,tlv1,amount_msat,tu64,",
"tlvtype,n1,tlv2,2",
"tlvdata,n1,tlv2,scid,short_channel_id,",
"tlvtype,n1,tlv3,3",
"tlvdata,n1,tlv3,node_id,point,",
"tlvdata,n1,tlv3,amount_msat_1,u64,",
"tlvdata,n1,tlv3,amount_msat_2,u64,",
"tlvtype,n1,tlv4,254",
"tlvdata,n1,tlv4,cltv_delta,u16,",
"tlvtype,n2,tlv1,0",
"tlvdata,n2,tlv1,amount_msat,tu64,",
"tlvtype,n2,tlv2,11",
"tlvdata,n2,tlv2,cltv_expiry,tu32,",
]

7
contrib/pyln-proto/pyln/proto/message/bolt2/Makefile

@ -1,7 +0,0 @@
#! /usr/bin/make
SPECDIR := ../../../../../../../lightning-rfc
csv.py: $(SPECDIR)/02-peer-protocol.md Makefile
SPECNUM=`basename $< | sed 's/-.*//'`; (echo csv = '['; python3 $(SPECDIR)/tools/extract-formats.py $< | sed 's/\(.*\)/ "\1",/'; echo ']') > $@
chmod a+x $@

16
contrib/pyln-proto/pyln/proto/message/bolt2/__init__.py

@ -1,16 +0,0 @@
from .csv import csv
from .bolt import namespace
import sys
__version__ = '0.0.1'
__all__ = [
'csv',
'namespace',
]
mod = sys.modules[__name__]
for d in namespace.subtypes, namespace.tlvtypes, namespace.messagetypes:
for name in d:
setattr(mod, name, d[name])
__all__.append(name)

100
contrib/pyln-proto/pyln/proto/message/bolt2/csv.py

@ -1,100 +0,0 @@
csv = [
"msgtype,open_channel,32",
"msgdata,open_channel,chain_hash,chain_hash,",
"msgdata,open_channel,temporary_channel_id,byte,32",
"msgdata,open_channel,funding_satoshis,u64,",
"msgdata,open_channel,push_msat,u64,",
"msgdata,open_channel,dust_limit_satoshis,u64,",
"msgdata,open_channel,max_htlc_value_in_flight_msat,u64,",
"msgdata,open_channel,channel_reserve_satoshis,u64,",
"msgdata,open_channel,htlc_minimum_msat,u64,",
"msgdata,open_channel,feerate_per_kw,u32,",
"msgdata,open_channel,to_self_delay,u16,",
"msgdata,open_channel,max_accepted_htlcs,u16,",
"msgdata,open_channel,funding_pubkey,point,",
"msgdata,open_channel,revocation_basepoint,point,",
"msgdata,open_channel,payment_basepoint,point,",
"msgdata,open_channel,delayed_payment_basepoint,point,",
"msgdata,open_channel,htlc_basepoint,point,",
"msgdata,open_channel,first_per_commitment_point,point,",
"msgdata,open_channel,channel_flags,byte,",
"msgdata,open_channel,tlvs,open_channel_tlvs,",
"tlvtype,open_channel_tlvs,upfront_shutdown_script,0",
"tlvdata,open_channel_tlvs,upfront_shutdown_script,shutdown_scriptpubkey,byte,...",
"msgtype,accept_channel,33",
"msgdata,accept_channel,temporary_channel_id,byte,32",
"msgdata,accept_channel,dust_limit_satoshis,u64,",
"msgdata,accept_channel,max_htlc_value_in_flight_msat,u64,",
"msgdata,accept_channel,channel_reserve_satoshis,u64,",
"msgdata,accept_channel,htlc_minimum_msat,u64,",
"msgdata,accept_channel,minimum_depth,u32,",
"msgdata,accept_channel,to_self_delay,u16,",
"msgdata,accept_channel,max_accepted_htlcs,u16,",
"msgdata,accept_channel,funding_pubkey,point,",
"msgdata,accept_channel,revocation_basepoint,point,",
"msgdata,accept_channel,payment_basepoint,point,",
"msgdata,accept_channel,delayed_payment_basepoint,point,",
"msgdata,accept_channel,htlc_basepoint,point,",
"msgdata,accept_channel,first_per_commitment_point,point,",
"msgdata,accept_channel,tlvs,accept_channel_tlvs,",
"tlvtype,accept_channel_tlvs,upfront_shutdown_script,0",
"tlvdata,accept_channel_tlvs,upfront_shutdown_script,shutdown_scriptpubkey,byte,...",
"msgtype,funding_created,34",
"msgdata,funding_created,temporary_channel_id,byte,32",
"msgdata,funding_created,funding_txid,sha256,",
"msgdata,funding_created,funding_output_index,u16,",
"msgdata,funding_created,signature,signature,",
"msgtype,funding_signed,35",
"msgdata,funding_signed,channel_id,channel_id,",
"msgdata,funding_signed,signature,signature,",
"msgtype,funding_locked,36",
"msgdata,funding_locked,channel_id,channel_id,",
"msgdata,funding_locked,next_per_commitment_point,point,",
"msgtype,shutdown,38",
"msgdata,shutdown,channel_id,channel_id,",
"msgdata,shutdown,len,u16,",
"msgdata,shutdown,scriptpubkey,byte,len",
"msgtype,closing_signed,39",
"msgdata,closing_signed,channel_id,channel_id,",
"msgdata,closing_signed,fee_satoshis,u64,",
"msgdata,closing_signed,signature,signature,",
"msgtype,update_add_htlc,128",
"msgdata,update_add_htlc,channel_id,channel_id,",
"msgdata,update_add_htlc,id,u64,",
"msgdata,update_add_htlc,amount_msat,u64,",
"msgdata,update_add_htlc,payment_hash,sha256,",
"msgdata,update_add_htlc,cltv_expiry,u32,",
"msgdata,update_add_htlc,onion_routing_packet,byte,1366",
"msgtype,update_fulfill_htlc,130",
"msgdata,update_fulfill_htlc,channel_id,channel_id,",
"msgdata,update_fulfill_htlc,id,u64,",
"msgdata,update_fulfill_htlc,payment_preimage,byte,32",
"msgtype,update_fail_htlc,131",
"msgdata,update_fail_htlc,channel_id,channel_id,",
"msgdata,update_fail_htlc,id,u64,",
"msgdata,update_fail_htlc,len,u16,",
"msgdata,update_fail_htlc,reason,byte,len",
"msgtype,update_fail_malformed_htlc,135",
"msgdata,update_fail_malformed_htlc,channel_id,channel_id,",
"msgdata,update_fail_malformed_htlc,id,u64,",
"msgdata,update_fail_malformed_htlc,sha256_of_onion,sha256,",
"msgdata,update_fail_malformed_htlc,failure_code,u16,",
"msgtype,commitment_signed,132",
"msgdata,commitment_signed,channel_id,channel_id,",
"msgdata,commitment_signed,signature,signature,",
"msgdata,commitment_signed,num_htlcs,u16,",
"msgdata,commitment_signed,htlc_signature,signature,num_htlcs",
"msgtype,revoke_and_ack,133",
"msgdata,revoke_and_ack,channel_id,channel_id,",
"msgdata,revoke_and_ack,per_commitment_secret,byte,32",
"msgdata,revoke_and_ack,next_per_commitment_point,point,",
"msgtype,update_fee,134",
"msgdata,update_fee,channel_id,channel_id,",
"msgdata,update_fee,feerate_per_kw,u32,",
"msgtype,channel_reestablish,136",
"msgdata,channel_reestablish,channel_id,channel_id,",
"msgdata,channel_reestablish,next_commitment_number,u64,",
"msgdata,channel_reestablish,next_revocation_number,u64,",
"msgdata,channel_reestablish,your_last_per_commitment_secret,byte,32",
"msgdata,channel_reestablish,my_current_per_commitment_point,point,",
]

7
contrib/pyln-proto/pyln/proto/message/bolt4/Makefile

@ -1,7 +0,0 @@
#! /usr/bin/make
SPECDIR := ../../../../../../../lightning-rfc
csv.py: $(SPECDIR)/04-onion-routing.md Makefile
SPECNUM=`basename $< | sed 's/-.*//'`; (echo csv = '['; python3 $(SPECDIR)/tools/extract-formats.py $< | sed 's/\(.*\)/ "\1",/'; echo ']') > $@
chmod a+x $@

5
contrib/pyln-proto/pyln/proto/message/bolt4/bolt.py

@ -1,5 +0,0 @@
from pyln.proto.message import MessageNamespace
from .csv import csv
namespace = MessageNamespace(csv_lines=csv)

55
contrib/pyln-proto/pyln/proto/message/bolt4/csv.py

@ -1,55 +0,0 @@
csv = [
"tlvtype,tlv_payload,amt_to_forward,2",
"tlvdata,tlv_payload,amt_to_forward,amt_to_forward,tu64,",
"tlvtype,tlv_payload,outgoing_cltv_value,4",
"tlvdata,tlv_payload,outgoing_cltv_value,outgoing_cltv_value,tu32,",
"tlvtype,tlv_payload,short_channel_id,6",
"tlvdata,tlv_payload,short_channel_id,short_channel_id,short_channel_id,",
"tlvtype,tlv_payload,payment_data,8",
"tlvdata,tlv_payload,payment_data,payment_secret,byte,32",
"tlvdata,tlv_payload,payment_data,total_msat,tu64,",
"msgtype,invalid_realm,PERM|1",
"msgtype,temporary_node_failure,NODE|2",
"msgtype,permanent_node_failure,PERM|NODE|2",
"msgtype,required_node_feature_missing,PERM|NODE|3",
"msgtype,invalid_onion_version,BADONION|PERM|4",
"msgdata,invalid_onion_version,sha256_of_onion,sha256,",
"msgtype,invalid_onion_hmac,BADONION|PERM|5",
"msgdata,invalid_onion_hmac,sha256_of_onion,sha256,",
"msgtype,invalid_onion_key,BADONION|PERM|6",
"msgdata,invalid_onion_key,sha256_of_onion,sha256,",
"msgtype,temporary_channel_failure,UPDATE|7",
"msgdata,temporary_channel_failure,len,u16,",
"msgdata,temporary_channel_failure,channel_update,byte,len",
"msgtype,permanent_channel_failure,PERM|8",
"msgtype,required_channel_feature_missing,PERM|9",
"msgtype,unknown_next_peer,PERM|10",
"msgtype,amount_below_minimum,UPDATE|11",
"msgdata,amount_below_minimum,htlc_msat,u64,",
"msgdata,amount_below_minimum,len,u16,",
"msgdata,amount_below_minimum,channel_update,byte,len",
"msgtype,fee_insufficient,UPDATE|12",
"msgdata,fee_insufficient,htlc_msat,u64,",
"msgdata,fee_insufficient,len,u16,",
"msgdata,fee_insufficient,channel_update,byte,len",
"msgtype,incorrect_cltv_expiry,UPDATE|13",
"msgdata,incorrect_cltv_expiry,cltv_expiry,u32,",
"msgdata,incorrect_cltv_expiry,len,u16,",
"msgdata,incorrect_cltv_expiry,channel_update,byte,len",
"msgtype,expiry_too_soon,UPDATE|14",
"msgdata,expiry_too_soon,len,u16,",
"msgdata,expiry_too_soon,channel_update,byte,len",
"msgtype,incorrect_or_unknown_payment_details,PERM|15",
"msgdata,incorrect_or_unknown_payment_details,htlc_msat,u64,",
"msgdata,incorrect_or_unknown_payment_details,height,u32,",
"msgtype,final_incorrect_cltv_expiry,18",
"msgdata,final_incorrect_cltv_expiry,cltv_expiry,u32,",
"msgtype,final_incorrect_htlc_amount,19",
"msgdata,final_incorrect_htlc_amount,incoming_htlc_amt,u64,",
"msgtype,channel_disabled,UPDATE|20",
"msgtype,expiry_too_far,21",
"msgtype,invalid_onion_payload,PERM|22",
"msgdata,invalid_onion_payload,type,varint,",
"msgdata,invalid_onion_payload,offset,u16,",
"msgtype,mpp_timeout,23",
]

7
contrib/pyln-proto/pyln/proto/message/bolt7/Makefile

@ -1,7 +0,0 @@
#! /usr/bin/make
SPECDIR := ../../../../../../../lightning-rfc
csv.py: $(SPECDIR)/07-routing-gossip.md Makefile
SPECNUM=`basename $< | sed 's/-.*//'`; (echo csv = '['; python3 $(SPECDIR)/tools/extract-formats.py $< | sed 's/\(.*\)/ "\1",/'; echo ']') > $@
chmod a+x $@

16
contrib/pyln-proto/pyln/proto/message/bolt7/__init__.py

@ -1,16 +0,0 @@
from .csv import csv
from .bolt import namespace
import sys
__version__ = '0.0.1'
__all__ = [
'csv',
'namespace',
]
mod = sys.modules[__name__]
for d in namespace.subtypes, namespace.tlvtypes, namespace.messagetypes:
for name in d:
setattr(mod, name, d[name])
__all__.append(name)

5
contrib/pyln-proto/pyln/proto/message/bolt7/bolt.py

@ -1,5 +0,0 @@
from pyln.proto.message import MessageNamespace
from .csv import csv
namespace = MessageNamespace(csv_lines=csv)

83
contrib/pyln-proto/pyln/proto/message/bolt7/csv.py

@ -1,83 +0,0 @@
csv = [
"msgtype,announcement_signatures,259",
"msgdata,announcement_signatures,channel_id,channel_id,",
"msgdata,announcement_signatures,short_channel_id,short_channel_id,",
"msgdata,announcement_signatures,node_signature,signature,",
"msgdata,announcement_signatures,bitcoin_signature,signature,",
"msgtype,channel_announcement,256",
"msgdata,channel_announcement,node_signature_1,signature,",
"msgdata,channel_announcement,node_signature_2,signature,",
"msgdata,channel_announcement,bitcoin_signature_1,signature,",
"msgdata,channel_announcement,bitcoin_signature_2,signature,",
"msgdata,channel_announcement,len,u16,",
"msgdata,channel_announcement,features,byte,len",
"msgdata,channel_announcement,chain_hash,chain_hash,",
"msgdata,channel_announcement,short_channel_id,short_channel_id,",
"msgdata,channel_announcement,node_id_1,point,",
"msgdata,channel_announcement,node_id_2,point,",
"msgdata,channel_announcement,bitcoin_key_1,point,",
"msgdata,channel_announcement,bitcoin_key_2,point,",
"msgtype,node_announcement,257",
"msgdata,node_announcement,signature,signature,",
"msgdata,node_announcement,flen,u16,",
"msgdata,node_announcement,features,byte,flen",
"msgdata,node_announcement,timestamp,u32,",
"msgdata,node_announcement,node_id,point,",
"msgdata,node_announcement,rgb_color,byte,3",
"msgdata,node_announcement,alias,byte,32",
"msgdata,node_announcement,addrlen,u16,",
"msgdata,node_announcement,addresses,byte,addrlen",
"msgtype,channel_update,258",
"msgdata,channel_update,signature,signature,",
"msgdata,channel_update,chain_hash,chain_hash,",
"msgdata,channel_update,short_channel_id,short_channel_id,",
"msgdata,channel_update,timestamp,u32,",
"msgdata,channel_update,message_flags,byte,",
"msgdata,channel_update,channel_flags,byte,",
"msgdata,channel_update,cltv_expiry_delta,u16,",
"msgdata,channel_update,htlc_minimum_msat,u64,",
"msgdata,channel_update,fee_base_msat,u32,",
"msgdata,channel_update,fee_proportional_millionths,u32,",
"msgdata,channel_update,htlc_maximum_msat,u64,,option_channel_htlc_max",
"msgtype,query_short_channel_ids,261,gossip_queries",
"msgdata,query_short_channel_ids,chain_hash,chain_hash,",
"msgdata,query_short_channel_ids,len,u16,",
"msgdata,query_short_channel_ids,encoded_short_ids,byte,len",
"msgdata,query_short_channel_ids,tlvs,query_short_channel_ids_tlvs,",
"tlvtype,query_short_channel_ids_tlvs,query_flags,1",
"tlvdata,query_short_channel_ids_tlvs,query_flags,encoding_type,u8,",
"tlvdata,query_short_channel_ids_tlvs,query_flags,encoded_query_flags,byte,...",
"msgtype,reply_short_channel_ids_end,262,gossip_queries",
"msgdata,reply_short_channel_ids_end,chain_hash,chain_hash,",
"msgdata,reply_short_channel_ids_end,full_information,byte,",
"msgtype,query_channel_range,263,gossip_queries",
"msgdata,query_channel_range,chain_hash,chain_hash,",
"msgdata,query_channel_range,first_blocknum,u32,",
"msgdata,query_channel_range,number_of_blocks,u32,",
"msgdata,query_channel_range,tlvs,query_channel_range_tlvs,",
"tlvtype,query_channel_range_tlvs,query_option,1",
"tlvdata,query_channel_range_tlvs,query_option,query_option_flags,varint,",
"msgtype,reply_channel_range,264,gossip_queries",
"msgdata,reply_channel_range,chain_hash,chain_hash,",
"msgdata,reply_channel_range,first_blocknum,u32,",
"msgdata,reply_channel_range,number_of_blocks,u32,",
"msgdata,reply_channel_range,full_information,byte,",
"msgdata,reply_channel_range,len,u16,",
"msgdata,reply_channel_range,encoded_short_ids,byte,len",
"msgdata,reply_channel_range,tlvs,reply_channel_range_tlvs,",
"tlvtype,reply_channel_range_tlvs,timestamps_tlv,1",
"tlvdata,reply_channel_range_tlvs,timestamps_tlv,encoding_type,u8,",
"tlvdata,reply_channel_range_tlvs,timestamps_tlv,encoded_timestamps,byte,...",
"tlvtype,reply_channel_range_tlvs,checksums_tlv,3",
"tlvdata,reply_channel_range_tlvs,checksums_tlv,checksums,channel_update_checksums,...",
"subtype,channel_update_timestamps",
"subtypedata,channel_update_timestamps,timestamp_node_id_1,u32,",
"subtypedata,channel_update_timestamps,timestamp_node_id_2,u32,",
"subtype,channel_update_checksums",
"subtypedata,channel_update_checksums,checksum_node_id_1,u32,",
"subtypedata,channel_update_checksums,checksum_node_id_2,u32,",
"msgtype,gossip_timestamp_filter,265,gossip_queries",
"msgdata,gossip_timestamp_filter,chain_hash,chain_hash,",
"msgdata,gossip_timestamp_filter,first_timestamp,u32,",
"msgdata,gossip_timestamp_filter,timestamp_range,u32,",
]

47
contrib/pyln-spec/Makefile

@ -0,0 +1,47 @@
#! /usr/bin/make
SPECDIR := ../../../lightning-rfc
# This gives us something like 'v1.0-137-gae2d248b7ad8b0965f224c303019ba04c661008f'
GITDESCRIBE := $(shell git -C $(SPECDIR) describe --abbrev=40)
# PEP 440 requires numbers only, but allows -post (setuptools prefers .post though):
VERSION := $(shell echo $(GITDESCRIBE) | sed 's/^v//' | sed 's/-/.post/' | sed 's/-g.*//')
# This maintains -dirty, if present.
GITVERSION := $(shell echo $(GITDESCRIBE) | sed 's/.*-g//')
BOLTS := 1 2 4 7
DIRS := $(foreach b,$(BOLTS),bolt$b)
CODE_DIRS := $(foreach b,$(BOLTS),bolt$b/pyln/spec/bolt$b)
check: $(DIRS:%=check-pytest-%)
check-pytest-%:
cd $* && pytest
check-source: check-source-flake8 check-source-mypy
check-source-flake8: $(DIRS:%=check-source-flake8-%)
check-source-mypy: $(DIRS:%=check-source-mypy-%)
check-source-flake8-%:
cd $* && flake8 --ignore=E501,E731,W503 --exclude=gen.py
# mypy . does not recurse. I have no idea why...
check-source-mypy-%:
cd $* && mypy --ignore-missing-imports `find * -name '*.py'`
refresh: $(CODE_DIRS:%=%/gen_version.py)
bolt1/pyln/spec/bolt1/gen.py: $(SPECDIR)/01-messaging.md Makefile
bolt2/pyln/spec/bolt2/gen.py: $(SPECDIR)/02-peer-protocol.md Makefile
bolt4/pyln/spec/bolt4/gen.py: $(SPECDIR)/04-onion-routing.md Makefile
bolt7/pyln/spec/bolt7/gen.py: $(SPECDIR)/07-routing-gossip.md Makefile
%/gen_version.py: %/gen.py
echo '__version__ = "$(VERSION)"' > $@
echo '__gitversion__ = "$(GITVERSION)"' >> $@
# We update iff it has changed.
$(CODE_DIRS:%=%/gen.py):
@(echo csv = '['; python3 $(SPECDIR)/tools/extract-formats.py $< | sed 's/\(.*\)/ "\1",/'; echo ']') > $@.tmp
@echo 'desc = "'`head -n1 $< | cut -c3-`'"' >> $@.tmp
@(echo -n 'text = """'; sed 's,\\,\\\\,g' < $<; echo '"""') >> $@.tmp
@if cmp $@ $@.tmp >/dev/null 2>&1; then rm $@.tmp; else mv $@.tmp $@; fi

2
contrib/pyln-proto/pyln/proto/message/bolt2/bolt.py → contrib/pyln-spec/bolt.py

@ -1,5 +1,5 @@
from pyln.proto.message import MessageNamespace
from .csv import csv
from .gen import csv
namespace = MessageNamespace(csv_lines=csv)

1
contrib/pyln-spec/bolt1/pyln/spec/bolt1/__init__.py

@ -0,0 +1 @@
../../../../subinit.py

1
contrib/pyln-spec/bolt1/pyln/spec/bolt1/bolt.py

@ -0,0 +1 @@
../../../../bolt.py

976
contrib/pyln-spec/bolt1/pyln/spec/bolt1/gen.py

@ -0,0 +1,976 @@
csv = [
"msgtype,init,16",
"msgdata,init,gflen,u16,",
"msgdata,init,globalfeatures,byte,gflen",
"msgdata,init,flen,u16,",
"msgdata,init,features,byte,flen",
"msgdata,init,tlvs,init_tlvs,",
"tlvtype,init_tlvs,networks,1",
"tlvdata,init_tlvs,networks,chains,chain_hash,...",
"msgtype,error,17",
"msgdata,error,channel_id,channel_id,",
"msgdata,error,len,u16,",
"msgdata,error,data,byte,len",
"msgtype,ping,18",
"msgdata,ping,num_pong_bytes,u16,",
"msgdata,ping,byteslen,u16,",
"msgdata,ping,ignored,byte,byteslen",
"msgtype,pong,19",
"msgdata,pong,byteslen,u16,",
"msgdata,pong,ignored,byte,byteslen",
"tlvtype,n1,tlv1,1",
"tlvdata,n1,tlv1,amount_msat,tu64,",
"tlvtype,n1,tlv2,2",
"tlvdata,n1,tlv2,scid,short_channel_id,",
"tlvtype,n1,tlv3,3",
"tlvdata,n1,tlv3,node_id,point,",
"tlvdata,n1,tlv3,amount_msat_1,u64,",
"tlvdata,n1,tlv3,amount_msat_2,u64,",
"tlvtype,n1,tlv4,254",
"tlvdata,n1,tlv4,cltv_delta,u16,",
"tlvtype,n2,tlv1,0",
"tlvdata,n2,tlv1,amount_msat,tu64,",
"tlvtype,n2,tlv2,11",
"tlvdata,n2,tlv2,cltv_expiry,tu32,",
]
desc = "BOLT #1: Base Protocol"
text = """# BOLT #1: Base Protocol
## Overview
This protocol assumes an underlying authenticated and ordered transport mechanism that takes care of framing individual messages.
[BOLT #8](08-transport.md) specifies the canonical transport layer used in Lightning, though it can be replaced by any transport that fulfills the above guarantees.
The default TCP port is 9735. This corresponds to hexadecimal `0x2607`: the Unicode code point for LIGHTNING.<sup>[1](#reference-1)</sup>
All data fields are unsigned big-endian unless otherwise specified.
## Table of Contents
* [Connection Handling and Multiplexing](#connection-handling-and-multiplexing)
* [Lightning Message Format](#lightning-message-format)
* [Type-Length-Value Format](#type-length-value-format)
* [Fundamental Types](#fundamental-types)
* [Setup Messages](#setup-messages)
* [The `init` Message](#the-init-message)
* [The `error` Message](#the-error-message)
* [Control Messages](#control-messages)
* [The `ping` and `pong` Messages](#the-ping-and-pong-messages)
* [Appendix A: BigSize Test Vectors](#appendix-a-bigsize-test-vectors)
* [Appendix B: Type-Length-Value Test Vectors](#appendix-b-type-length-value-test-vectors)
* [Appendix C: Message Extension](#appendix-c-message-extension)
* [Acknowledgments](#acknowledgments)
* [References](#references)
* [Authors](#authors)
## Connection Handling and Multiplexing
Implementations MUST use a single connection per peer; channel messages (which include a channel ID) are multiplexed over this single connection.
## Lightning Message Format
After decryption, all Lightning messages are of the form:
1. `type`: a 2-byte big-endian field indicating the type of message
2. `payload`: a variable-length payload that comprises the remainder of
the message and that conforms to a format matching the `type`
3. `extension`: an optional [TLV stream](#type-length-value-format)
The `type` field indicates how to interpret the `payload` field.
The format for each individual type is defined by a specification in this repository.
The type follows the _it's ok to be odd_ rule, so nodes MAY send _odd_-numbered types without ascertaining that the recipient understands it.
The messages are grouped logically into five groups, ordered by the most significant bit that is set:
- Setup & Control (types `0`-`31`): messages related to connection setup, control, supported features, and error reporting (described below)
- Channel (types `32`-`127`): messages used to setup and tear down micropayment channels (described in [BOLT #2](02-peer-protocol.md))
- Commitment (types `128`-`255`): messages related to updating the current commitment transaction, which includes adding, revoking, and settling HTLCs as well as updating fees and exchanging signatures (described in [BOLT #2](02-peer-protocol.md))
- Routing (types `256`-`511`): messages containing node and channel announcements, as well as any active route exploration (described in [BOLT #7](07-routing-gossip.md))
- Custom (types `32768`-`65535`): experimental and application-specific messages
The size of the message is required by the transport layer to fit into a 2-byte unsigned int; therefore, the maximum possible size is 65535 bytes.
A sending node:
- MUST NOT send an evenly-typed message not listed here without prior negotiation.
- MUST NOT send evenly-typed TLV records in the `extension` without prior negotiation.
- that negotiates an option in this specification:
- MUST include all the fields annotated with that option.
- When defining custom messages:
- SHOULD pick a random `type` to avoid collision with other custom types.
- SHOULD pick a `type` that doesn't conflict with other experiments listed in [this issue](https://github.com/lightningnetwork/lightning-rfc/issues/716).
- SHOULD pick an odd `type` identifiers when regular nodes should ignore the
additional data.
- SHOULD pick an even `type` identifiers when regular nodes should reject
the message and close the connection.
A receiving node:
- upon receiving a message of _odd_, unknown type:
- MUST ignore the received message.
- upon receiving a message of _even_, unknown type:
- MUST close the connection.
- MAY fail the channels.
- upon receiving a known message with insufficient length for the contents:
- MUST close the connection.
- MAY fail the channels.
- upon receiving a message with an `extension`:
- MAY ignore the `extension`.
- Otherwise, if the `extension` is invalid:
- MUST close the connection.
- MAY fail the channels.
### Rationale
By default `SHA2` and Bitcoin public keys are both encoded as
big endian, thus it would be unusual to use a different endian for
other fields.
Length is limited to 65535 bytes by the cryptographic wrapping, and
messages in the protocol are never more than that length anyway.
The _it's ok to be odd_ rule allows for future optional extensions
without negotiation or special coding in clients. The _extension_ field
similarly allows for future expansion by letting senders include additional
TLV data. Note that an _extension_ field can only be added when the message
`payload` doesn't already fill the 65535 bytes maximum length.
Implementations may prefer to have message data aligned on an 8-byte
boundary (the largest natural alignment requirement of any type here);
however, adding a 6-byte padding after the type field was considered
wasteful: alignment may be achieved by decrypting the message into
a buffer with 6-bytes of pre-padding.
## Type-Length-Value Format
Throughout the protocol, a TLV (Type-Length-Value) format is used to allow for
the backwards-compatible addition of new fields to existing message types.
A `tlv_record` represents a single field, encoded in the form:
* [`bigsize`: `type`]
* [`bigsize`: `length`]
* [`length`: `value`]
A `tlv_stream` is a series of (possibly zero) `tlv_record`s, represented as the
concatenation of the encoded `tlv_record`s. When used to extend existing
messages, a `tlv_stream` is typically placed after all currently defined fields.
The `type` is encoded using the BigSize format. It functions as a
message-specific, 64-bit identifier for the `tlv_record` determining how the
contents of `value` should be decoded. `type` identifiers below 2^16 are
reserved for use in this specification. `type` identifiers greater than or equal
to 2^16 are available for custom records. Any record not defined in this
specification is considered a custom record. This includes experimental and
application-specific messages.
The `length` is encoded using the BigSize format signaling the size of
`value` in bytes.
The `value` depends entirely on the `type`, and should be encoded or decoded
according to the message-specific format determined by `type`.
### Requirements
The sending node:
- MUST order `tlv_record`s in a `tlv_stream` by monotonically-increasing `type`.
- MUST minimally encode `type` and `length`.
- When defining custom record `type` identifiers:
- SHOULD pick random `type` identifiers to avoid collision with other
custom types.
- SHOULD pick odd `type` identifiers when regular nodes should ignore the
additional data.
- SHOULD pick even `type` identifiers when regular nodes should reject the
full tlv stream containing the custom record.
- SHOULD NOT use redundant, variable-length encodings in a `tlv_record`.
The receiving node:
- if zero bytes remain before parsing a `type`:
- MUST stop parsing the `tlv_stream`.
- if a `type` or `length` is not minimally encoded:
- MUST fail to parse the `tlv_stream`.
- if decoded `type`s are not monotonically-increasing:
- MUST fail to parse the `tlv_stream`.
- if `length` exceeds the number of bytes remaining in the message:
- MUST fail to parse the `tlv_stream`.
- if `type` is known:
- MUST decode the next `length` bytes using the known encoding for `type`.
- if `length` is not exactly equal to that required for the known encoding for `type`:
- MUST fail to parse the `tlv_stream`.
- if variable-length fields within the known encoding for `type` are not minimal:
- MUST fail to parse the `tlv_stream`.
- otherwise, if `type` is unknown:
- if `type` is even:
- MUST fail to parse the `tlv_stream`.
- otherwise, if `type` is odd:
- MUST discard the next `length` bytes.
### Rationale
The primary advantage in using TLV is that a reader is able to ignore new fields
that it does not understand, since each field carries the exact size of the
encoded element. Without TLV, even if a node does not wish to use a particular
field, the node is forced to add parsing logic for that field in order to
determine the offset of any fields that follow.
The monotonicity constraint ensures that all `type`s are unique and can appear
at most once. Fields that map to complex objects, e.g. vectors, maps, or
structs, should do so by defining the encoding such that the object is
serialized within a single `tlv_record`. The uniqueness constraint, among other
things, enables the following optimizations:
- canonical ordering is defined independent of the encoded `value`s.
- canonical ordering can be known at compile-time, rather than being determined
dynamically at the time of encoding.
- verifying canonical ordering requires less state and is less-expensive.
- variable-size fields can reserve their expected size up front, rather than
appending elements sequentially and incurring double-and-copy overhead.
The use of a bigsize for `type` and `length` permits a space savings for small
`type`s or short `value`s. This potentially leaves more space for application
data over the wire or in an onion payload.
All `type`s must appear in increasing order to create a canonical encoding of
the underlying `tlv_record`s. This is crucial when computing signatures over a
`tlv_stream`, as it ensures verifiers will be able to recompute the same message
digest as the signer. Note that the canonical ordering over the set of fields
can be enforced even if the verifier does not understand what the fields
contain.
Writers should avoid using redundant, variable-length encodings in a
`tlv_record` since this results in encoding the length twice and complicates
computing the outer length. As an example, when writing a variable length byte
array, the `value` should contain only the raw bytes and forgo an additional
internal length since the `tlv_record` already carries the number of bytes that
follow. On the other hand, if a `tlv_record` contains multiple, variable-length
elements then this would not be considered redundant, and is needed to allow the
receiver to parse individual elements from `value`.
## Fundamental Types
Various fundamental types are referred to in the message specifications:
* `byte`: an 8-bit byte
* `u16`: a 2 byte unsigned integer
* `u32`: a 4 byte unsigned integer
* `u64`: an 8 byte unsigned integer
Inside TLV records which contain a single value, leading zeros in
integers can be omitted:
* `tu16`: a 0 to 2 byte unsigned integer
* `tu32`: a 0 to 4 byte unsigned integer
* `tu64`: a 0 to 8 byte unsigned integer
The following convenience types are also defined:
* `chain_hash`: a 32-byte chain identifier (see [BOLT #0](00-introduction.md#glossary-and-terminology-guide))
* `channel_id`: a 32-byte channel_id (see [BOLT #2](02-peer-protocol.md#definition-of-channel-id))
* `sha256`: a 32-byte SHA2-256 hash
* `signature`: a 64-byte bitcoin Elliptic Curve signature
* `point`: a 33-byte Elliptic Curve point (compressed encoding as per [SEC 1 standard](http://www.secg.org/sec1-v2.pdf#subsubsection.2.3.3))
* `short_channel_id`: an 8 byte value identifying a channel (see [BOLT #7](07-routing-gossip.md#definition-of-short-channel-id))
* `bigsize`: a variable-length, unsigned integer similar to Bitcoin's CompactSize encoding, but big-endian. Described in [BigSize](#appendix-a-bigsize-test-vectors).
## Setup Messages
### The `init` Message
Once authentication is complete, the first message reveals the features supported or required by this node, even if this is a reconnection.
[BOLT #9](09-features.md) specifies lists of features. Each feature is generally represented by 2 bits. The least-significant bit is numbered 0, which is _even_, and the next most significant bit is numbered 1, which is _odd_. For historical reasons, features are divided into global and local feature bitmasks.
The `features` field MUST be padded to bytes with 0s.
1. type: 16 (`init`)
2. data:
* [`u16`:`gflen`]
* [`gflen*byte`:`globalfeatures`]
* [`u16`:`flen`]
* [`flen*byte`:`features`]
* [`init_tlvs`:`tlvs`]
1. tlvs: `init_tlvs`
2. types:
1. type: 1 (`networks`)
2. data:
* [`...*chain_hash`:`chains`]
The optional `networks` indicates the chains the node is interested in.
#### Requirements
The sending node:
- MUST send `init` as the first Lightning message for any connection.
- MUST set feature bits as defined in [BOLT #9](09-features.md).
- MUST set any undefined feature bits to 0.
- SHOULD NOT set features greater than 13 in `globalfeatures`.
- SHOULD use the minimum length required to represent the `features` field.
- SHOULD set `networks` to all chains it will gossip or open channels for.
The receiving node:
- MUST wait to receive `init` before sending any other messages.
- MUST combine (logical OR) the two feature bitmaps into one logical `features` map.
- MUST respond to known feature bits as specified in [BOLT #9](09-features.md).
- upon receiving unknown _odd_ feature bits that are non-zero:
- MUST ignore the bit.
- upon receiving unknown _even_ feature bits that are non-zero:
- MUST fail the connection.
- upon receiving `networks` containing no common chains
- MAY fail the connection.
- if the feature vector does not set all known, transitive dependencies:
- MUST fail the connection.
#### Rationale
There used to be two feature bitfields here, but for backwards compatibility they're now
combined into one.
This semantic allows both future incompatible changes and future backward compatible changes. Bits should generally be assigned in pairs, in order that optional features may later become compulsory.
Nodes wait for receipt of the other's features to simplify error
diagnosis when features are incompatible.
Since all networks share the same port, but most implementations only
support a single network, the `networks` fields avoids nodes
erroneously believing they will receive updates about their preferred
network, or that they can open channels.
### The `error` Message
For simplicity of diagnosis, it's often useful to tell a peer that something is incorrect.
1. type: 17 (`error`)
2. data:
* [`channel_id`:`channel_id`]
* [`u16`:`len`]
* [`len*byte`:`data`]
The 2-byte `len` field indicates the number of bytes in the immediately following field.
#### Requirements
The channel is referred to by `channel_id`, unless `channel_id` is 0 (i.e. all bytes are 0), in which case it refers to all channels.
The funding node:
- for all error messages sent before (and including) the `funding_created` message:
- MUST use `temporary_channel_id` in lieu of `channel_id`.
The fundee node:
- for all error messages sent before (and not including) the `funding_signed` message:
- MUST use `temporary_channel_id` in lieu of `channel_id`.
A sending node:
- when sending `error`:
- MUST fail the channel referred to by the error message.
- SHOULD send `error` for protocol violations or internal errors that make channels unusable or that make further communication unusable.
- SHOULD send `error` with the unknown `channel_id` in reply to messages of type `32`-`255` related to unknown channels.
- MAY send an empty `data` field.
- when failure was caused by an invalid signature check:
- SHOULD include the raw, hex-encoded transaction in reply to a `funding_created`, `funding_signed`, `closing_signed`, or `commitment_signed` message.
- when `channel_id` is 0:
- MUST fail all channels with the receiving node.
- MUST close the connection.
- MUST set `len` equal to the length of `data`.
The receiving node:
- upon receiving `error`:
- MUST fail the channel referred to by the error message, if that channel is with the sending node.
- if no existing channel is referred to by the message:
- MUST ignore the message.
- MUST truncate `len` to the remainder of the packet (if it's larger).
- if `data` is not composed solely of printable ASCII characters (For reference: the printable character set includes byte values 32 through 126, inclusive):
- SHOULD NOT print out `data` verbatim.
#### Rationale
There are unrecoverable errors that require an abort of conversations;
if the connection is simply dropped, then the peer may retry the
connection. It's also useful to describe protocol violations for
diagnosis, as this indicates that one peer has a bug.
It may be wise not to distinguish errors in production settings, lest
it leak information hence, the optional `data` field.
## Control Messages
### The `ping` and `pong` Messages
In order to allow for the existence of long-lived TCP connections, at
times it may be required that both ends keep alive the TCP connection at the
application level. Such messages also allow obfuscation of traffic patterns.
1. type: 18 (`ping`)
2. data:
* [`u16`:`num_pong_bytes`]
* [`u16`:`byteslen`]
* [`byteslen*byte`:`ignored`]
The `pong` message is to be sent whenever a `ping` message is received. It
serves as a reply and also serves to keep the connection alive, while
explicitly notifying the other end that the receiver is still active. Within
the received `ping` message, the sender will specify the number of bytes to be
included within the data payload of the `pong` message.
1. type: 19 (`pong`)
2. data:
* [`u16`:`byteslen`]
* [`byteslen*byte`:`ignored`]
#### Requirements
A node sending a `ping` message:
- SHOULD set `ignored` to 0s.
- MUST NOT set `ignored` to sensitive data such as secrets or portions of initialized
memory.
- if it doesn't receive a corresponding `pong`:
- MAY terminate the network connection,
- and MUST NOT fail the channels in this case.
- SHOULD NOT send `ping` messages more often than once every 30 seconds.
A node sending a `pong` message:
- SHOULD set `ignored` to 0s.
- MUST NOT set `ignored` to sensitive data such as secrets or portions of initialized
memory.
A node receiving a `ping` message:
- SHOULD fail the channels if it has received significantly in excess of one `ping` per 30 seconds.
- if `num_pong_bytes` is less than 65532:
- MUST respond by sending a `pong` message, with `byteslen` equal to `num_pong_bytes`.
- otherwise (`num_pong_bytes` is **not** less than 65532):
- MUST ignore the `ping`.
A node receiving a `pong` message:
- if `byteslen` does not correspond to any `ping`'s `num_pong_bytes` value it has sent:
- MAY fail the channels.
### Rationale
The largest possible message is 65535 bytes; thus, the maximum sensible `byteslen`
is 65531 in order to account for the type field (`pong`) and the `byteslen` itself. This allows
a convenient cutoff for `num_pong_bytes` to indicate that no reply should be sent.
Connections between nodes within the network may be long lived, as payment
channels have an indefinite lifetime. However, it's likely that
no new data will be
exchanged for a
significant portion of a connection's lifetime. Also, on several platforms it's possible that Lightning
clients will be put to sleep without prior warning. Hence, a
distinct `ping` message is used, in order to probe for the liveness of the connection on
the other side, as well as to keep the established connection active.
Additionally, the ability for a sender to request that the receiver send a
response with a particular number of bytes enables nodes on the network to
create _synthetic_ traffic. Such traffic can be used to partially defend
against packet and timing analysis as nodes can fake the traffic patterns of
typical exchanges without applying any true updates to their respective
channels.
When combined with the onion routing protocol defined in
[BOLT #4](04-onion-routing.md),
careful statistically driven synthetic traffic can serve to further bolster the
privacy of participants within the network.
Limited precautions are recommended against `ping` flooding, however some
latitude is given because of network delays. Note that there are other methods
of incoming traffic flooding (e.g. sending _odd_ unknown message types, or padding
every message maximally).
Finally, the usage of periodic `ping` messages serves to promote frequent key
rotations as specified within [BOLT #8](08-transport.md).
## Appendix A: BigSize Test Vectors
The following test vectors can be used to assert the correctness of a BigSize
implementation used in the TLV format. The format is identical to the
CompactSize encoding used in bitcoin, but replaces the little-endian encoding of
multi-byte values with big-endian.
Values encoded with BigSize will produce an encoding of either 1, 3, 5, or 9
bytes depending on the size of the integer. The encoding is a piece-wise
function that takes a `uint64` value `x` and produces:
```
uint8(x) if x < 0xfd
0xfd + be16(uint16(x)) if x < 0x10000
0xfe + be32(uint32(x)) if x < 0x100000000
0xff + be64(x) otherwise.
```
Here `+` denotes concatenation and `be16`, `be32`, and `be64` produce a
big-endian encoding of the input for 16, 32, and 64-bit integers, respectively.
A value is said to be _minimally encoded_ if it could not be encoded using
fewer bytes. For example, a BigSize encoding that occupies 5 bytes
but whose value is less than 0x10000 is not minimally encoded. All values
decoded with BigSize should be checked to ensure they are minimally encoded.
### BigSize Decoding Tests
The following is an example of how to execute the BigSize decoding tests.
```golang
func testReadBigSize(t *testing.T, test bigSizeTest) {
var buf [8]byte
r := bytes.NewReader(test.Bytes)
val, err := tlv.ReadBigSize(r, &buf)
if err != nil && err.Error() != test.ExpErr {
t.Fatalf("expected decoding error: %v, got: %v",
test.ExpErr, err)
}
// If we expected a decoding error, there's no point checking the value.
if test.ExpErr != "" {
return
}
if val != test.Value {
t.Fatalf("expected value: %d, got %d", test.Value, val)
}
}
```
A correct implementation should pass against these test vectors:
```json
[
{
"name": "zero",
"value": 0,
"bytes": "00"
},
{
"name": "one byte high",
"value": 252,
"bytes": "fc"
},
{
"name": "two byte low",
"value": 253,
"bytes": "fd00fd"
},
{
"name": "two byte high",
"value": 65535,
"bytes": "fdffff"
},
{
"name": "four byte low",
"value": 65536,
"bytes": "fe00010000"
},
{
"name": "four byte high",
"value": 4294967295,
"bytes": "feffffffff"
},
{
"name": "eight byte low",
"value": 4294967296,
"bytes": "ff0000000100000000"
},
{
"name": "eight byte high",
"value": 18446744073709551615,
"bytes": "ffffffffffffffffff"
},
{
"name": "two byte not canonical",
"value": 0,
"bytes": "fd00fc",
"exp_error": "decoded bigsize is not canonical"
},
{
"name": "four byte not canonical",
"value": 0,
"bytes": "fe0000ffff",
"exp_error": "decoded bigsize is not canonical"
},
{
"name": "eight byte not canonical",
"value": 0,
"bytes": "ff00000000ffffffff",
"exp_error": "decoded bigsize is not canonical"
},
{
"name": "two byte short read",
"value": 0,
"bytes": "fd00",
"exp_error": "unexpected EOF"
},
{
"name": "four byte short read",
"value": 0,
"bytes": "feffff",
"exp_error": "unexpected EOF"
},
{
"name": "eight byte short read",
"value": 0,
"bytes": "ffffffffff",
"exp_error": "unexpected EOF"
},
{
"name": "one byte no read",
"value": 0,
"bytes": "",
"exp_error": "EOF"
},
{
"name": "two byte no read",
"value": 0,
"bytes": "fd",
"exp_error": "unexpected EOF"
},
{
"name": "four byte no read",
"value": 0,
"bytes": "fe",
"exp_error": "unexpected EOF"
},
{
"name": "eight byte no read",
"value": 0,
"bytes": "ff",
"exp_error": "unexpected EOF"
}
]
```
### BigSize Encoding Tests
The following is an example of how to execute the BigSize encoding tests.
```golang
func testWriteBigSize(t *testing.T, test bigSizeTest) {
var (
w bytes.Buffer
buf [8]byte
)
err := tlv.WriteBigSize(&w, test.Value, &buf)
if err != nil {
t.Fatalf("unable to encode %d as bigsize: %v",
test.Value, err)
}
if bytes.Compare(w.Bytes(), test.Bytes) != 0 {
t.Fatalf("expected bytes: %v, got %v",
test.Bytes, w.Bytes())
}
}
```
A correct implementation should pass against the following test vectors:
```json
[
{
"name": "zero",
"value": 0,
"bytes": "00"
},
{
"name": "one byte high",
"value": 252,
"bytes": "fc"
},
{
"name": "two byte low",
"value": 253,
"bytes": "fd00fd"
},
{
"name": "two byte high",
"value": 65535,
"bytes": "fdffff"
},
{
"name": "four byte low",
"value": 65536,
"bytes": "fe00010000"
},
{
"name": "four byte high",
"value": 4294967295,
"bytes": "feffffffff"
},
{
"name": "eight byte low",
"value": 4294967296,
"bytes": "ff0000000100000000"
},
{
"name": "eight byte high",
"value": 18446744073709551615,
"bytes": "ffffffffffffffffff"
}
]
```
## Appendix B: Type-Length-Value Test Vectors
The following tests assume that two separate TLV namespaces exist: n1 and n2.
The n1 namespace supports the following TLV types:
1. tlvs: `n1`
2. types:
1. type: 1 (`tlv1`)
2. data:
* [`tu64`:`amount_msat`]
1. type: 2 (`tlv2`)
2. data:
* [`short_channel_id`:`scid`]
1. type: 3 (`tlv3`)
2. data:
* [`point`:`node_id`]
* [`u64`:`amount_msat_1`]
* [`u64`:`amount_msat_2`]
1. type: 254 (`tlv4`)
2. data:
* [`u16`:`cltv_delta`]
The n2 namespace supports the following TLV types:
1. tlvs: `n2`
2. types:
1. type: 0 (`tlv1`)
2. data:
* [`tu64`:`amount_msat`]
1. type: 11 (`tlv2`)
2. data:
* [`tu32`:`cltv_expiry`]
### TLV Decoding Failures
The following TLV streams in any namespace should trigger a decoding failure:
1. Invalid stream: 0xfd
2. Reason: type truncated
1. Invalid stream: 0xfd01
2. Reason: type truncated
1. Invalid stream: 0xfd0001 00
2. Reason: not minimally encoded type
1. Invalid stream: 0xfd0101
2. Reason: missing length
1. Invalid stream: 0x0f fd
2. Reason: (length truncated)
1. Invalid stream: 0x0f fd26
2. Reason: (length truncated)
1. Invalid stream: 0x0f fd2602
2. Reason: missing value
1. Invalid stream: 0x0f fd0001 00
2. Reason: not minimally encoded length
1. Invalid stream: 0x0f fd0201 000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
2. Reason: value truncated
The following TLV streams in either namespace should trigger a
decoding failure:
1. Invalid stream: 0x12 00
2. Reason: unknown even type.
1. Invalid stream: 0xfd0102 00
2. Reason: unknown even type.
1. Invalid stream: 0xfe01000002 00
2. Reason: unknown even type.
1. Invalid stream: 0xff0100000000000002 00
2. Reason: unknown even type.
The following TLV streams in namespace `n1` should trigger a decoding
failure:
1. Invalid stream: 0x01 09 ffffffffffffffffff
2. Reason: greater than encoding length for `n1`s `tlv1`.
1. Invalid stream: 0x01 01 00
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x01 02 0001
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x01 03 000100
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x01 04 00010000
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x01 05 0001000000
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x01 06 000100000000
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x01 07 00010000000000
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x01 08 0001000000000000
2. Reason: encoding for `n1`s `tlv1`s `amount_msat` is not minimal
1. Invalid stream: 0x02 07 01010101010101
2. Reason: less than encoding length for `n1`s `tlv2`.
1. Invalid stream: 0x02 09 010101010101010101
2. Reason: greater than encoding length for `n1`s `tlv2`.
1. Invalid stream: 0x03 21 023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb
2. Reason: less than encoding length for `n1`s `tlv3`.
1. Invalid stream: 0x03 29 023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001
2. Reason: less than encoding length for `n1`s `tlv3`.
1. Invalid stream: 0x03 30 023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb000000000000000100000000000001
2. Reason: less than encoding length for `n1`s `tlv3`.
1. Invalid stream: 0x03 31 043da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002
2. Reason: `n1`s `node_id` is not a valid point.
1. Invalid stream: 0x03 32 023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb0000000000000001000000000000000001
2. Reason: greater than encoding length for `n1`s `tlv3`.
1. Invalid stream: 0xfd00fe 00
2. Reason: less than encoding length for `n1`s `tlv4`.
1. Invalid stream: 0xfd00fe 01 01
2. Reason: less than encoding length for `n1`s `tlv4`.
1. Invalid stream: 0xfd00fe 03 010101
2. Reason: greater than encoding length for `n1`s `tlv4`.
1. Invalid stream: 0x00 00
2. Reason: unknown even field for `n1`s namespace.
### TLV Decoding Successes
The following TLV streams in either namespace should correctly decode,
and be ignored:
1. Valid stream: 0x
2. Explanation: empty message
1. Valid stream: 0x21 00
2. Explanation: Unknown odd type.
1. Valid stream: 0xfd0201 00
2. Explanation: Unknown odd type.
1. Valid stream: 0xfd00fd 00
2. Explanation: Unknown odd type.
1. Valid stream: 0xfd00ff 00
2. Explanation: Unknown odd type.
1. Valid stream: 0xfe02000001 00
2. Explanation: Unknown odd type.
1. Valid stream: 0xff0200000000000001 00
2. Explanation: Unknown odd type.
The following TLV streams in `n1` namespace should correctly decode,
with the values given here:
1. Valid stream: 0x01 00
2. Values: `tlv1` `amount_msat`=0
1. Valid stream: 0x01 01 01
2. Values: `tlv1` `amount_msat`=1
1. Valid stream: 0x01 02 0100
2. Values: `tlv1` `amount_msat`=256
1. Valid stream: 0x01 03 010000
2. Values: `tlv1` `amount_msat`=65536
1. Valid stream: 0x01 04 01000000
2. Values: `tlv1` `amount_msat`=16777216
1. Valid stream: 0x01 05 0100000000
2. Values: `tlv1` `amount_msat`=4294967296
1. Valid stream: 0x01 06 010000000000
2. Values: `tlv1` `amount_msat`=1099511627776
1. Valid stream: 0x01 07 01000000000000
2. Values: `tlv1` `amount_msat`=281474976710656
1. Valid stream: 0x01 08 0100000000000000
2. Values: `tlv1` `amount_msat`=72057594037927936
1. Valid stream: 0x02 08 0000000000000226
2. Values: `tlv2` `scid`=0x0x550
1. Valid stream: 0x03 31 023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb00000000000000010000000000000002
2. Values: `tlv3` `node_id`=023da092f6980e58d2c037173180e9a465476026ee50f96695963e8efe436f54eb `amount_msat_1`=1 `amount_msat_2`=2
1. Valid stream: 0xfd00fe 02 0226
2. Values: `tlv4` `cltv_delta`=550
### TLV Stream Decoding Failure
Any appending of an invalid stream to a valid stream should trigger
a decoding failure.
Any appending of a higher-numbered valid stream to a lower-numbered
valid stream should not trigger a decoding failure.
In addition, the following TLV streams in namespace `n1` should
trigger a decoding failure:
1. Invalid stream: 0x02 08 0000000000000226 01 01 2a
2. Reason: valid TLV records but invalid ordering
1. Invalid stream: 0x02 08 0000000000000231 02 08 0000000000000451
2. Reason: duplicate TLV type
1. Invalid stream: 0x1f 00 0f 01 2a
2. Reason: valid (ignored) TLV records but invalid ordering
1. Invalid stream: 0x1f 00 1f 01 2a
2. Reason: duplicate TLV type (ignored)
The following TLV stream in namespace `n2` should trigger a decoding
failure:
1. Invalid stream: 0xffffffffffffffffff 00 00 00
2. Reason: valid TLV records but invalid ordering
## Appendix C: Message Extension
This section contains examples of valid and invalid extensions on the `init`
message. The base `init` message (without extensions) for these examples is
`0x001000000000` (all features turned off).
The following `init` messages are valid:
- `0x001000000000`: no extension provided
- `0x00100000000001012a030104`: the extension contains two _odd_ TLV records (with types `0x01` and `0x03`)
The following `init` messages are invalid:
- `0x00100000000001`: the extension is present but truncated
- `0x00100000000002012a`: the extension contains unknown _even_ TLV records (assuming that TLV type `0x02` is unknown)
- `0x001000000000010101010102`: the extension TLV stream is invalid (duplicate TLV record type `0x01`)
Note that when messages are signed, the _extension_ is part of the signed bytes.
Nodes should store the _extension_ bytes even if they don't understand them to
be able to correctly verify signatures.
## Acknowledgments
[ TODO: (roasbeef); fin ]
## References
1. <a id="reference-1">http://www.unicode.org/charts/PDF/U2600.pdf</a>
## Authors
[ FIXME: Insert Author List ]
![Creative Commons License](https://i.creativecommons.org/l/by/4.0/88x31.png "License CC-BY")
<br>
This work is licensed under a [Creative Commons Attribution 4.0 International License](http://creativecommons.org/licenses/by/4.0/).
"""

2
contrib/pyln-spec/bolt1/pyln/spec/bolt1/gen_version.py

@ -0,0 +1,2 @@
__version__ = "1.0.post137"
__gitversion__ = "9e8e29af9b9a922eb114b2c716205d0772946e56"

1
contrib/pyln-spec/bolt1/requirements.txt

@ -0,0 +1 @@
../requirements.txt

23
contrib/pyln-spec/bolt1/setup.py

@ -0,0 +1,23 @@
from pyln.spec.bolt1 import __version__, desc
from setuptools import setup
import io
with io.open('requirements.txt', encoding='utf-8') as f:
requirements = [r for r in f.read().split('\n') if len(r)]
def do_setup(boltnum: int, version: str, desc: str):
setup(name='pyln-bolt{}'.format(boltnum),
version=version,
description=desc,
url='http://github.com/ElementsProject/lightning',
author='Rusty Russell',
author_email='rusty@rustcorp.com.au',
license='MIT',
packages=['pyln.spec.bolt{}'.format(boltnum)],
scripts=[],
zip_safe=True,
install_requires=requirements)
do_setup(1, __version__, desc)

2
contrib/pyln-proto/tests/test_bolt1.py → contrib/pyln-spec/bolt1/tests/test_bolt1.py

@ -1,6 +1,6 @@
#! /usr/bin/python3
from pyln.proto.message import Message, MessageNamespace
import pyln.proto.message.bolt1 as bolt1
import pyln.spec.bolt1 as bolt1
import io

1
contrib/pyln-spec/bolt2/pyln/spec/bolt2/__init__.py

@ -0,0 +1 @@
../../../../subinit.py

1
contrib/pyln-spec/bolt2/pyln/spec/bolt2/bolt.py

@ -0,0 +1 @@
../../../../bolt.py

1471
contrib/pyln-spec/bolt2/pyln/spec/bolt2/gen.py

File diff suppressed because it is too large

2
contrib/pyln-spec/bolt2/pyln/spec/bolt2/gen_version.py

@ -0,0 +1,2 @@
__version__ = "1.0.post137"
__gitversion__ = "ae2d248b7ad8b0965f224c303019ba04c661008f"

1
contrib/pyln-spec/bolt2/requirements.txt

@ -0,0 +1 @@
../requirements.txt

23
contrib/pyln-spec/bolt2/setup.py

@ -0,0 +1,23 @@
from pyln.spec.bolt2 import __version__, desc
from setuptools import setup
import io
with io.open('requirements.txt', encoding='utf-8') as f:
requirements = [r for r in f.read().split('\n') if len(r)]
def do_setup(boltnum: int, version: str, desc: str):
setup(name='pyln-bolt{}'.format(boltnum),
version=version,
description=desc,
url='http://github.com/ElementsProject/lightning',
author='Rusty Russell',
author_email='rusty@rustcorp.com.au',
license='MIT',
packages=['pyln.spec.bolt{}'.format(boltnum)],
scripts=[],
zip_safe=True,
install_requires=requirements)
do_setup(2, __version__, desc)

2
contrib/pyln-proto/tests/test_bolt2.py → contrib/pyln-spec/bolt2/tests/test_bolt2.py

@ -1,6 +1,6 @@
#! /usr/bin/python3
from pyln.proto.message import MessageNamespace
import pyln.proto.message.bolt2 as bolt2
import pyln.spec.bolt2 as bolt2
# FIXME: more tests

1
contrib/pyln-spec/bolt4/pyln/spec/bolt4/__init__.py

@ -0,0 +1 @@
../../../../subinit.py

1
contrib/pyln-spec/bolt4/pyln/spec/bolt4/bolt.py

@ -0,0 +1 @@
../../../../bolt.py

1267
contrib/pyln-spec/bolt4/pyln/spec/bolt4/gen.py

File diff suppressed because it is too large

2
contrib/pyln-spec/bolt4/pyln/spec/bolt4/gen_version.py

@ -0,0 +1,2 @@
__version__ = "1.0.post137"
__gitversion__ = "9e8e29af9b9a922eb114b2c716205d0772946e56"

1
contrib/pyln-spec/bolt4/requirements.txt

@ -0,0 +1 @@
../requirements.txt

23
contrib/pyln-spec/bolt4/setup.py

@ -0,0 +1,23 @@
from pyln.spec.bolt4 import __version__, desc
from setuptools import setup
import io
with io.open('requirements.txt', encoding='utf-8') as f:
requirements = [r for r in f.read().split('\n') if len(r)]
def do_setup(boltnum: int, version: str, desc: str):
setup(name='pyln-bolt{}'.format(boltnum),
version=version,
description=desc,
url='http://github.com/ElementsProject/lightning',
author='Rusty Russell',
author_email='rusty@rustcorp.com.au',
license='MIT',
packages=['pyln.spec.bolt{}'.format(boltnum)],
scripts=[],
zip_safe=True,
install_requires=requirements)
do_setup(4, __version__, desc)

2
contrib/pyln-proto/tests/test_bolt4.py → contrib/pyln-spec/bolt4/tests/test_bolt4.py

@ -1,6 +1,6 @@
#! /usr/bin/python3
from pyln.proto.message import MessageNamespace
import pyln.proto.message.bolt4 as bolt4
import pyln.spec.bolt4 as bolt4
# FIXME: more tests

1
contrib/pyln-spec/bolt7/pyln/spec/bolt7/__init__.py

@ -0,0 +1 @@
../../../../subinit.py

1
contrib/pyln-spec/bolt7/pyln/spec/bolt7/bolt.py

@ -0,0 +1 @@
../../../../bolt.py

1211
contrib/pyln-spec/bolt7/pyln/spec/bolt7/gen.py

File diff suppressed because it is too large

2
contrib/pyln-spec/bolt7/pyln/spec/bolt7/gen_version.py

@ -0,0 +1,2 @@
__version__ = "1.0.post137"
__gitversion__ = "9e8e29af9b9a922eb114b2c716205d0772946e56"

1
contrib/pyln-spec/bolt7/requirements.txt

@ -0,0 +1 @@
../requirements.txt

23
contrib/pyln-spec/bolt7/setup.py

@ -0,0 +1,23 @@
from pyln.spec.bolt7 import __version__, desc
from setuptools import setup
import io
with io.open('requirements.txt', encoding='utf-8') as f:
requirements = [r for r in f.read().split('\n') if len(r)]
def do_setup(boltnum: int, version: str, desc: str):
setup(name='pyln-bolt{}'.format(boltnum),
version=version,
description=desc,
url='http://github.com/ElementsProject/lightning',
author='Rusty Russell',
author_email='rusty@rustcorp.com.au',
license='MIT',
packages=['pyln.spec.bolt{}'.format(boltnum)],
scripts=[],
zip_safe=True,
install_requires=requirements)
do_setup(7, __version__, desc)

2
contrib/pyln-proto/tests/test_bolt7.py → contrib/pyln-spec/bolt7/tests/test_bolt7.py

@ -1,6 +1,6 @@
#! /usr/bin/python3
from pyln.proto.message import MessageNamespace
import pyln.proto.message.bolt7 as bolt7
import pyln.spec.bolt7 as bolt7
# FIXME: more tests

1
contrib/pyln-spec/requirements.txt

@ -0,0 +1 @@
pyln.proto.message

10
contrib/pyln-proto/pyln/proto/message/bolt4/__init__.py → contrib/pyln-spec/subinit.py

@ -1,12 +1,16 @@
from .csv import csv
# This is the same __init__.py for all bolt dirs.
from .gen import csv, text, desc
from .gen_version import __version__, __gitversion__
from .bolt import namespace
import sys
__version__ = '0.0.1'
__all__ = [
'csv',
'text',
'desc',
'namespace',
'__version__',
'__gitversion__',
]
mod = sys.modules[__name__]
Loading…
Cancel
Save