From 081dc51143f317bce8e7f1e23b893e84bb6177e0 Mon Sep 17 00:00:00 2001 From: bomanaps Date: Sat, 13 Sep 2025 22:23:14 +0100 Subject: [PATCH] Implement complete libp2p Noise protocol extensions --- libp2p/crypto/pb/crypto_pb2.py | 35 +- libp2p/host/autonat/pb/autonat_pb2.py | 51 +- libp2p/identity/identify/pb/identify_pb2.py | 27 +- libp2p/kad_dht/pb/kademlia_pb2.py | 16 +- libp2p/kad_dht/pb/kademlia_pb2.pyi | 214 +++++--- libp2p/pubsub/pb/rpc_pb2.py | 16 +- libp2p/pubsub/pb/rpc_pb2.pyi | 458 +++++++++++++----- libp2p/relay/circuit_v2/pb/circuit_pb2.py | 55 ++- libp2p/relay/circuit_v2/pb/dcutr_pb2.py | 31 +- libp2p/security/insecure/pb/plaintext_pb2.py | 27 +- libp2p/security/noise/early_data.py | 282 +++++++++++ libp2p/security/noise/messages.py | 199 +++++++- libp2p/security/noise/patterns.py | 47 +- libp2p/security/noise/patterns_ik.py | 280 +++++++++++ libp2p/security/noise/pb/noise.proto | 14 + libp2p/security/noise/pb/noise_pb2.py | 31 +- libp2p/security/noise/pb/noise_pb2.pyi | 40 +- libp2p/security/noise/rekey.py | 258 ++++++++++ libp2p/security/noise/transport.py | 148 +++++- libp2p/security/noise/webtransport.py | 151 ++++++ libp2p/security/secio/pb/spipe_pb2.py | 31 +- pyproject.toml | 2 +- .../noise/test_early_data_extensions.py | 336 +++++++++++++ .../noise/test_phase_advanced_features.p | 443 +++++++++++++++++ 24 files changed, 2863 insertions(+), 329 deletions(-) create mode 100644 libp2p/security/noise/early_data.py create mode 100644 libp2p/security/noise/patterns_ik.py create mode 100644 libp2p/security/noise/rekey.py create mode 100644 libp2p/security/noise/webtransport.py create mode 100644 tests/core/security/noise/test_early_data_extensions.py create mode 100644 tests/core/security/noise/test_phase_advanced_features.p diff --git a/libp2p/crypto/pb/crypto_pb2.py b/libp2p/crypto/pb/crypto_pb2.py index 99d472022..c4552bfa9 100644 --- a/libp2p/crypto/pb/crypto_pb2.py +++ b/libp2p/crypto/pb/crypto_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/crypto/pb/crypto.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/crypto/pb/crypto.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,15 +26,15 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dlibp2p/crypto/pb/crypto.proto\x12\tcrypto.pb\"?\n\tPublicKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c\"@\n\nPrivateKey\x12$\n\x08key_type\x18\x01 \x02(\x0e\x32\x12.crypto.pb.KeyType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x02(\x0c*S\n\x07KeyType\x12\x07\n\x03RSA\x10\x00\x12\x0b\n\x07\x45\x64\x32\x35\x35\x31\x39\x10\x01\x12\r\n\tSecp256k1\x10\x02\x12\t\n\x05\x45\x43\x44SA\x10\x03\x12\x0c\n\x08\x45\x43\x43_P256\x10\x04\x12\n\n\x06X25519\x10\x05') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.crypto.pb.crypto_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _KEYTYPE._serialized_start=175 - _KEYTYPE._serialized_end=258 - _PUBLICKEY._serialized_start=44 - _PUBLICKEY._serialized_end=107 - _PRIVATEKEY._serialized_start=109 - _PRIVATEKEY._serialized_end=173 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.crypto.pb.crypto_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_KEYTYPE']._serialized_start=175 + _globals['_KEYTYPE']._serialized_end=258 + _globals['_PUBLICKEY']._serialized_start=44 + _globals['_PUBLICKEY']._serialized_end=107 + _globals['_PRIVATEKEY']._serialized_start=109 + _globals['_PRIVATEKEY']._serialized_end=173 # @@protoc_insertion_point(module_scope) diff --git a/libp2p/host/autonat/pb/autonat_pb2.py b/libp2p/host/autonat/pb/autonat_pb2.py index 983a89c6e..bd3c61a13 100644 --- a/libp2p/host/autonat/pb/autonat_pb2.py +++ b/libp2p/host/autonat/pb/autonat_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/host/autonat/pb/autonat.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/host/autonat/pb/autonat.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,23 +26,23 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$libp2p/host/autonat/pb/autonat.proto\x12\nautonat.pb\"\x81\x01\n\x07Message\x12\x1e\n\x04type\x18\x01 \x01(\x0e\x32\x10.autonat.pb.Type\x12%\n\x04\x64ial\x18\x02 \x01(\x0b\x32\x17.autonat.pb.DialRequest\x12/\n\rdial_response\x18\x03 \x01(\x0b\x32\x18.autonat.pb.DialResponse\"2\n\x0b\x44ialRequest\x12#\n\x05peers\x18\x01 \x03(\x0b\x32\x14.autonat.pb.PeerInfo\"W\n\x0c\x44ialResponse\x12\"\n\x06status\x18\x01 \x01(\x0e\x32\x12.autonat.pb.Status\x12#\n\x05peers\x18\x02 \x03(\x0b\x32\x14.autonat.pb.PeerInfo\"6\n\x08PeerInfo\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\r\n\x05\x61\x64\x64rs\x18\x02 \x03(\x0c\x12\x0f\n\x07success\x18\x03 \x01(\x08*0\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04\x44IAL\x10\x01\x12\x11\n\rDIAL_RESPONSE\x10\x02*_\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\x10\n\x0c\x45_DIAL_ERROR\x10\x01\x12\x12\n\x0e\x45_DIAL_REFUSED\x10\x02\x12\x11\n\rE_DIAL_FAILED\x10\x03\x12\x14\n\x10\x45_INTERNAL_ERROR\x10\x64\x32=\n\x07\x41utoNAT\x12\x32\n\x04\x44ial\x12\x13.autonat.pb.Message\x1a\x13.autonat.pb.Message\"\x00\x62\x06proto3') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.host.autonat.pb.autonat_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _TYPE._serialized_start=381 - _TYPE._serialized_end=429 - _STATUS._serialized_start=431 - _STATUS._serialized_end=526 - _MESSAGE._serialized_start=53 - _MESSAGE._serialized_end=182 - _DIALREQUEST._serialized_start=184 - _DIALREQUEST._serialized_end=234 - _DIALRESPONSE._serialized_start=236 - _DIALRESPONSE._serialized_end=323 - _PEERINFO._serialized_start=325 - _PEERINFO._serialized_end=379 - _AUTONAT._serialized_start=528 - _AUTONAT._serialized_end=589 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.host.autonat.pb.autonat_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_TYPE']._serialized_start=381 + _globals['_TYPE']._serialized_end=429 + _globals['_STATUS']._serialized_start=431 + _globals['_STATUS']._serialized_end=526 + _globals['_MESSAGE']._serialized_start=53 + _globals['_MESSAGE']._serialized_end=182 + _globals['_DIALREQUEST']._serialized_start=184 + _globals['_DIALREQUEST']._serialized_end=234 + _globals['_DIALRESPONSE']._serialized_start=236 + _globals['_DIALRESPONSE']._serialized_end=323 + _globals['_PEERINFO']._serialized_start=325 + _globals['_PEERINFO']._serialized_end=379 + _globals['_AUTONAT']._serialized_start=528 + _globals['_AUTONAT']._serialized_end=589 # @@protoc_insertion_point(module_scope) diff --git a/libp2p/identity/identify/pb/identify_pb2.py b/libp2p/identity/identify/pb/identify_pb2.py index d582d68a0..3063f1097 100644 --- a/libp2p/identity/identify/pb/identify_pb2.py +++ b/libp2p/identity/identify/pb/identify_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/identity/identify/pb/identify.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/identity/identify/pb/identify.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,11 +26,11 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*libp2p/identity/identify/pb/identify.proto\x12\x0bidentify.pb\"\xa9\x01\n\x08Identify\x12\x18\n\x10protocol_version\x18\x05 \x01(\t\x12\x15\n\ragent_version\x18\x06 \x01(\t\x12\x12\n\npublic_key\x18\x01 \x01(\x0c\x12\x14\n\x0clisten_addrs\x18\x02 \x03(\x0c\x12\x15\n\robserved_addr\x18\x04 \x01(\x0c\x12\x11\n\tprotocols\x18\x03 \x03(\t\x12\x18\n\x10signedPeerRecord\x18\x08 \x01(\x0c') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.identity.identify.pb.identify_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _IDENTIFY._serialized_start=60 - _IDENTIFY._serialized_end=229 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.identity.identify.pb.identify_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_IDENTIFY']._serialized_start=60 + _globals['_IDENTIFY']._serialized_end=229 # @@protoc_insertion_point(module_scope) diff --git a/libp2p/kad_dht/pb/kademlia_pb2.py b/libp2p/kad_dht/pb/kademlia_pb2.py index ac23169cf..e41bb5292 100644 --- a/libp2p/kad_dht/pb/kademlia_pb2.py +++ b/libp2p/kad_dht/pb/kademlia_pb2.py @@ -1,12 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/kad_dht/pb/kademlia.proto -# Protobuf Python Version: 4.25.3 +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/kad_dht/pb/kademlia.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -19,8 +29,8 @@ _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.kad_dht.pb.kademlia_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None _globals['_RECORD']._serialized_start=36 _globals['_RECORD']._serialized_end=94 _globals['_MESSAGE']._serialized_start=97 diff --git a/libp2p/kad_dht/pb/kademlia_pb2.pyi b/libp2p/kad_dht/pb/kademlia_pb2.pyi index 6d80d77db..641ae66ae 100644 --- a/libp2p/kad_dht/pb/kademlia_pb2.pyi +++ b/libp2p/kad_dht/pb/kademlia_pb2.pyi @@ -1,70 +1,144 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class Record(_message.Message): - __slots__ = ("key", "value", "timeReceived") - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - TIMERECEIVED_FIELD_NUMBER: _ClassVar[int] - key: bytes - value: bytes - timeReceived: str - def __init__(self, key: _Optional[bytes] = ..., value: _Optional[bytes] = ..., timeReceived: _Optional[str] = ...) -> None: ... - -class Message(_message.Message): - __slots__ = ("type", "clusterLevelRaw", "key", "record", "closerPeers", "providerPeers", "senderRecord") - class MessageType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - PUT_VALUE: _ClassVar[Message.MessageType] - GET_VALUE: _ClassVar[Message.MessageType] - ADD_PROVIDER: _ClassVar[Message.MessageType] - GET_PROVIDERS: _ClassVar[Message.MessageType] - FIND_NODE: _ClassVar[Message.MessageType] - PING: _ClassVar[Message.MessageType] - PUT_VALUE: Message.MessageType - GET_VALUE: Message.MessageType - ADD_PROVIDER: Message.MessageType - GET_PROVIDERS: Message.MessageType - FIND_NODE: Message.MessageType - PING: Message.MessageType - class ConnectionType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - NOT_CONNECTED: _ClassVar[Message.ConnectionType] - CONNECTED: _ClassVar[Message.ConnectionType] - CAN_CONNECT: _ClassVar[Message.ConnectionType] - CANNOT_CONNECT: _ClassVar[Message.ConnectionType] - NOT_CONNECTED: Message.ConnectionType - CONNECTED: Message.ConnectionType - CAN_CONNECT: Message.ConnectionType - CANNOT_CONNECT: Message.ConnectionType - class Peer(_message.Message): - __slots__ = ("id", "addrs", "connection", "signedRecord") - ID_FIELD_NUMBER: _ClassVar[int] - ADDRS_FIELD_NUMBER: _ClassVar[int] - CONNECTION_FIELD_NUMBER: _ClassVar[int] - SIGNEDRECORD_FIELD_NUMBER: _ClassVar[int] - id: bytes - addrs: _containers.RepeatedScalarFieldContainer[bytes] - connection: Message.ConnectionType - signedRecord: bytes - def __init__(self, id: _Optional[bytes] = ..., addrs: _Optional[_Iterable[bytes]] = ..., connection: _Optional[_Union[Message.ConnectionType, str]] = ..., signedRecord: _Optional[bytes] = ...) -> None: ... - TYPE_FIELD_NUMBER: _ClassVar[int] - CLUSTERLEVELRAW_FIELD_NUMBER: _ClassVar[int] - KEY_FIELD_NUMBER: _ClassVar[int] - RECORD_FIELD_NUMBER: _ClassVar[int] - CLOSERPEERS_FIELD_NUMBER: _ClassVar[int] - PROVIDERPEERS_FIELD_NUMBER: _ClassVar[int] - SENDERRECORD_FIELD_NUMBER: _ClassVar[int] - type: Message.MessageType - clusterLevelRaw: int - key: bytes - record: Record - closerPeers: _containers.RepeatedCompositeFieldContainer[Message.Peer] - providerPeers: _containers.RepeatedCompositeFieldContainer[Message.Peer] - senderRecord: bytes - def __init__(self, type: _Optional[_Union[Message.MessageType, str]] = ..., clusterLevelRaw: _Optional[int] = ..., key: _Optional[bytes] = ..., record: _Optional[_Union[Record, _Mapping]] = ..., closerPeers: _Optional[_Iterable[_Union[Message.Peer, _Mapping]]] = ..., providerPeers: _Optional[_Iterable[_Union[Message.Peer, _Mapping]]] = ..., senderRecord: _Optional[bytes] = ...) -> None: ... # type: ignore +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class Record(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + TIMERECEIVED_FIELD_NUMBER: builtins.int + key: builtins.bytes + value: builtins.bytes + timeReceived: builtins.str + def __init__( + self, + *, + key: builtins.bytes = ..., + value: builtins.bytes = ..., + timeReceived: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "timeReceived", b"timeReceived", "value", b"value"]) -> None: ... + +global___Record = Record + +@typing.final +class Message(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _MessageType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _MessageTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Message._MessageType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PUT_VALUE: Message._MessageType.ValueType # 0 + GET_VALUE: Message._MessageType.ValueType # 1 + ADD_PROVIDER: Message._MessageType.ValueType # 2 + GET_PROVIDERS: Message._MessageType.ValueType # 3 + FIND_NODE: Message._MessageType.ValueType # 4 + PING: Message._MessageType.ValueType # 5 + + class MessageType(_MessageType, metaclass=_MessageTypeEnumTypeWrapper): ... + PUT_VALUE: Message.MessageType.ValueType # 0 + GET_VALUE: Message.MessageType.ValueType # 1 + ADD_PROVIDER: Message.MessageType.ValueType # 2 + GET_PROVIDERS: Message.MessageType.ValueType # 3 + FIND_NODE: Message.MessageType.ValueType # 4 + PING: Message.MessageType.ValueType # 5 + + class _ConnectionType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ConnectionTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Message._ConnectionType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NOT_CONNECTED: Message._ConnectionType.ValueType # 0 + CONNECTED: Message._ConnectionType.ValueType # 1 + CAN_CONNECT: Message._ConnectionType.ValueType # 2 + CANNOT_CONNECT: Message._ConnectionType.ValueType # 3 + + class ConnectionType(_ConnectionType, metaclass=_ConnectionTypeEnumTypeWrapper): ... + NOT_CONNECTED: Message.ConnectionType.ValueType # 0 + CONNECTED: Message.ConnectionType.ValueType # 1 + CAN_CONNECT: Message.ConnectionType.ValueType # 2 + CANNOT_CONNECT: Message.ConnectionType.ValueType # 3 + + @typing.final + class Peer(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ID_FIELD_NUMBER: builtins.int + ADDRS_FIELD_NUMBER: builtins.int + CONNECTION_FIELD_NUMBER: builtins.int + SIGNEDRECORD_FIELD_NUMBER: builtins.int + id: builtins.bytes + connection: global___Message.ConnectionType.ValueType + signedRecord: builtins.bytes + """Envelope(PeerRecord) encoded""" + @property + def addrs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ... + def __init__( + self, + *, + id: builtins.bytes = ..., + addrs: collections.abc.Iterable[builtins.bytes] | None = ..., + connection: global___Message.ConnectionType.ValueType = ..., + signedRecord: builtins.bytes | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_signedRecord", b"_signedRecord", "signedRecord", b"signedRecord"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_signedRecord", b"_signedRecord", "addrs", b"addrs", "connection", b"connection", "id", b"id", "signedRecord", b"signedRecord"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_signedRecord", b"_signedRecord"]) -> typing.Literal["signedRecord"] | None: ... + + TYPE_FIELD_NUMBER: builtins.int + CLUSTERLEVELRAW_FIELD_NUMBER: builtins.int + KEY_FIELD_NUMBER: builtins.int + RECORD_FIELD_NUMBER: builtins.int + CLOSERPEERS_FIELD_NUMBER: builtins.int + PROVIDERPEERS_FIELD_NUMBER: builtins.int + SENDERRECORD_FIELD_NUMBER: builtins.int + type: global___Message.MessageType.ValueType + clusterLevelRaw: builtins.int + key: builtins.bytes + senderRecord: builtins.bytes + """Envelope(PeerRecord) encoded""" + @property + def record(self) -> global___Record: ... + @property + def closerPeers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Message.Peer]: ... + @property + def providerPeers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Message.Peer]: ... + def __init__( + self, + *, + type: global___Message.MessageType.ValueType = ..., + clusterLevelRaw: builtins.int = ..., + key: builtins.bytes = ..., + record: global___Record | None = ..., + closerPeers: collections.abc.Iterable[global___Message.Peer] | None = ..., + providerPeers: collections.abc.Iterable[global___Message.Peer] | None = ..., + senderRecord: builtins.bytes | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_senderRecord", b"_senderRecord", "record", b"record", "senderRecord", b"senderRecord"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_senderRecord", b"_senderRecord", "closerPeers", b"closerPeers", "clusterLevelRaw", b"clusterLevelRaw", "key", b"key", "providerPeers", b"providerPeers", "record", b"record", "senderRecord", b"senderRecord", "type", b"type"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_senderRecord", b"_senderRecord"]) -> typing.Literal["senderRecord"] | None: ... + +global___Message = Message diff --git a/libp2p/pubsub/pb/rpc_pb2.py b/libp2p/pubsub/pb/rpc_pb2.py index e4a35745a..55086c791 100644 --- a/libp2p/pubsub/pb/rpc_pb2.py +++ b/libp2p/pubsub/pb/rpc_pb2.py @@ -1,12 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/pubsub/pb/rpc.proto -# Protobuf Python Version: 4.25.3 +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/pubsub/pb/rpc.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -19,8 +29,8 @@ _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.pubsub.pb.rpc_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None _globals['_RPC']._serialized_start=42 _globals['_RPC']._serialized_end=244 _globals['_RPC_SUBOPTS']._serialized_start=199 diff --git a/libp2p/pubsub/pb/rpc_pb2.pyi b/libp2p/pubsub/pb/rpc_pb2.pyi index 2609fd11f..7d846c9a4 100644 --- a/libp2p/pubsub/pb/rpc_pb2.pyi +++ b/libp2p/pubsub/pb/rpc_pb2.pyi @@ -1,132 +1,326 @@ -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class RPC(_message.Message): - __slots__ = ("subscriptions", "publish", "control", "senderRecord") - class SubOpts(_message.Message): - __slots__ = ("subscribe", "topicid") - SUBSCRIBE_FIELD_NUMBER: _ClassVar[int] - TOPICID_FIELD_NUMBER: _ClassVar[int] - subscribe: bool - topicid: str - def __init__(self, subscribe: bool = ..., topicid: _Optional[str] = ...) -> None: ... - SUBSCRIPTIONS_FIELD_NUMBER: _ClassVar[int] - PUBLISH_FIELD_NUMBER: _ClassVar[int] - CONTROL_FIELD_NUMBER: _ClassVar[int] - SENDERRECORD_FIELD_NUMBER: _ClassVar[int] - subscriptions: _containers.RepeatedCompositeFieldContainer[RPC.SubOpts] - publish: _containers.RepeatedCompositeFieldContainer[Message] - control: ControlMessage - senderRecord: bytes - def __init__(self, subscriptions: _Optional[_Iterable[_Union[RPC.SubOpts, _Mapping]]] = ..., publish: _Optional[_Iterable[_Union[Message, _Mapping]]] = ..., control: _Optional[_Union[ControlMessage, _Mapping]] = ..., senderRecord: _Optional[bytes] = ...) -> None: ... # type: ignore - -class Message(_message.Message): - __slots__ = ("from_id", "data", "seqno", "topicIDs", "signature", "key") - FROM_ID_FIELD_NUMBER: _ClassVar[int] - DATA_FIELD_NUMBER: _ClassVar[int] - SEQNO_FIELD_NUMBER: _ClassVar[int] - TOPICIDS_FIELD_NUMBER: _ClassVar[int] - SIGNATURE_FIELD_NUMBER: _ClassVar[int] - KEY_FIELD_NUMBER: _ClassVar[int] - from_id: bytes - data: bytes - seqno: bytes - topicIDs: _containers.RepeatedScalarFieldContainer[str] - signature: bytes - key: bytes - def __init__(self, from_id: _Optional[bytes] = ..., data: _Optional[bytes] = ..., seqno: _Optional[bytes] = ..., topicIDs: _Optional[_Iterable[str]] = ..., signature: _Optional[bytes] = ..., key: _Optional[bytes] = ...) -> None: ... - -class ControlMessage(_message.Message): - __slots__ = ("ihave", "iwant", "graft", "prune") - IHAVE_FIELD_NUMBER: _ClassVar[int] - IWANT_FIELD_NUMBER: _ClassVar[int] - GRAFT_FIELD_NUMBER: _ClassVar[int] - PRUNE_FIELD_NUMBER: _ClassVar[int] - ihave: _containers.RepeatedCompositeFieldContainer[ControlIHave] - iwant: _containers.RepeatedCompositeFieldContainer[ControlIWant] - graft: _containers.RepeatedCompositeFieldContainer[ControlGraft] - prune: _containers.RepeatedCompositeFieldContainer[ControlPrune] - def __init__(self, ihave: _Optional[_Iterable[_Union[ControlIHave, _Mapping]]] = ..., iwant: _Optional[_Iterable[_Union[ControlIWant, _Mapping]]] = ..., graft: _Optional[_Iterable[_Union[ControlGraft, _Mapping]]] = ..., prune: _Optional[_Iterable[_Union[ControlPrune, _Mapping]]] = ...) -> None: ... # type: ignore - -class ControlIHave(_message.Message): - __slots__ = ("topicID", "messageIDs") - TOPICID_FIELD_NUMBER: _ClassVar[int] - MESSAGEIDS_FIELD_NUMBER: _ClassVar[int] - topicID: str - messageIDs: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, topicID: _Optional[str] = ..., messageIDs: _Optional[_Iterable[str]] = ...) -> None: ... - -class ControlIWant(_message.Message): - __slots__ = ("messageIDs",) - MESSAGEIDS_FIELD_NUMBER: _ClassVar[int] - messageIDs: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, messageIDs: _Optional[_Iterable[str]] = ...) -> None: ... - -class ControlGraft(_message.Message): - __slots__ = ("topicID",) - TOPICID_FIELD_NUMBER: _ClassVar[int] - topicID: str - def __init__(self, topicID: _Optional[str] = ...) -> None: ... - -class ControlPrune(_message.Message): - __slots__ = ("topicID", "peers", "backoff") - TOPICID_FIELD_NUMBER: _ClassVar[int] - PEERS_FIELD_NUMBER: _ClassVar[int] - BACKOFF_FIELD_NUMBER: _ClassVar[int] - topicID: str - peers: _containers.RepeatedCompositeFieldContainer[PeerInfo] - backoff: int - def __init__(self, topicID: _Optional[str] = ..., peers: _Optional[_Iterable[_Union[PeerInfo, _Mapping]]] = ..., backoff: _Optional[int] = ...) -> None: ... # type: ignore - -class PeerInfo(_message.Message): - __slots__ = ("peerID", "signedPeerRecord") - PEERID_FIELD_NUMBER: _ClassVar[int] - SIGNEDPEERRECORD_FIELD_NUMBER: _ClassVar[int] - peerID: bytes - signedPeerRecord: bytes - def __init__(self, peerID: _Optional[bytes] = ..., signedPeerRecord: _Optional[bytes] = ...) -> None: ... - -class TopicDescriptor(_message.Message): - __slots__ = ("name", "auth", "enc") - class AuthOpts(_message.Message): - __slots__ = ("mode", "keys") - class AuthMode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - NONE: _ClassVar[TopicDescriptor.AuthOpts.AuthMode] - KEY: _ClassVar[TopicDescriptor.AuthOpts.AuthMode] - WOT: _ClassVar[TopicDescriptor.AuthOpts.AuthMode] - NONE: TopicDescriptor.AuthOpts.AuthMode - KEY: TopicDescriptor.AuthOpts.AuthMode - WOT: TopicDescriptor.AuthOpts.AuthMode - MODE_FIELD_NUMBER: _ClassVar[int] - KEYS_FIELD_NUMBER: _ClassVar[int] - mode: TopicDescriptor.AuthOpts.AuthMode - keys: _containers.RepeatedScalarFieldContainer[bytes] - def __init__(self, mode: _Optional[_Union[TopicDescriptor.AuthOpts.AuthMode, str]] = ..., keys: _Optional[_Iterable[bytes]] = ...) -> None: ... - class EncOpts(_message.Message): - __slots__ = ("mode", "keyHashes") - class EncMode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - NONE: _ClassVar[TopicDescriptor.EncOpts.EncMode] - SHAREDKEY: _ClassVar[TopicDescriptor.EncOpts.EncMode] - WOT: _ClassVar[TopicDescriptor.EncOpts.EncMode] - NONE: TopicDescriptor.EncOpts.EncMode - SHAREDKEY: TopicDescriptor.EncOpts.EncMode - WOT: TopicDescriptor.EncOpts.EncMode - MODE_FIELD_NUMBER: _ClassVar[int] - KEYHASHES_FIELD_NUMBER: _ClassVar[int] - mode: TopicDescriptor.EncOpts.EncMode - keyHashes: _containers.RepeatedScalarFieldContainer[bytes] - def __init__(self, mode: _Optional[_Union[TopicDescriptor.EncOpts.EncMode, str]] = ..., keyHashes: _Optional[_Iterable[bytes]] = ...) -> None: ... - NAME_FIELD_NUMBER: _ClassVar[int] - AUTH_FIELD_NUMBER: _ClassVar[int] - ENC_FIELD_NUMBER: _ClassVar[int] - name: str - auth: TopicDescriptor.AuthOpts - enc: TopicDescriptor.EncOpts - def __init__(self, name: _Optional[str] = ..., auth: _Optional[_Union[TopicDescriptor.AuthOpts, _Mapping]] = ..., enc: _Optional[_Union[TopicDescriptor.EncOpts, _Mapping]] = ...) -> None: ... # type: ignore +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Modified from https://github.com/libp2p/go-libp2p-pubsub/blob/master/pb/rpc.proto""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class RPC(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class SubOpts(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SUBSCRIBE_FIELD_NUMBER: builtins.int + TOPICID_FIELD_NUMBER: builtins.int + subscribe: builtins.bool + """subscribe or unsubscribe""" + topicid: builtins.str + def __init__( + self, + *, + subscribe: builtins.bool | None = ..., + topicid: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["subscribe", b"subscribe", "topicid", b"topicid"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["subscribe", b"subscribe", "topicid", b"topicid"]) -> None: ... + + SUBSCRIPTIONS_FIELD_NUMBER: builtins.int + PUBLISH_FIELD_NUMBER: builtins.int + CONTROL_FIELD_NUMBER: builtins.int + SENDERRECORD_FIELD_NUMBER: builtins.int + senderRecord: builtins.bytes + @property + def subscriptions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RPC.SubOpts]: ... + @property + def publish(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Message]: ... + @property + def control(self) -> global___ControlMessage: ... + def __init__( + self, + *, + subscriptions: collections.abc.Iterable[global___RPC.SubOpts] | None = ..., + publish: collections.abc.Iterable[global___Message] | None = ..., + control: global___ControlMessage | None = ..., + senderRecord: builtins.bytes | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["control", b"control", "senderRecord", b"senderRecord"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["control", b"control", "publish", b"publish", "senderRecord", b"senderRecord", "subscriptions", b"subscriptions"]) -> None: ... + +global___RPC = RPC + +@typing.final +class Message(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FROM_ID_FIELD_NUMBER: builtins.int + DATA_FIELD_NUMBER: builtins.int + SEQNO_FIELD_NUMBER: builtins.int + TOPICIDS_FIELD_NUMBER: builtins.int + SIGNATURE_FIELD_NUMBER: builtins.int + KEY_FIELD_NUMBER: builtins.int + from_id: builtins.bytes + data: builtins.bytes + seqno: builtins.bytes + signature: builtins.bytes + key: builtins.bytes + @property + def topicIDs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + from_id: builtins.bytes | None = ..., + data: builtins.bytes | None = ..., + seqno: builtins.bytes | None = ..., + topicIDs: collections.abc.Iterable[builtins.str] | None = ..., + signature: builtins.bytes | None = ..., + key: builtins.bytes | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["data", b"data", "from_id", b"from_id", "key", b"key", "seqno", b"seqno", "signature", b"signature"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["data", b"data", "from_id", b"from_id", "key", b"key", "seqno", b"seqno", "signature", b"signature", "topicIDs", b"topicIDs"]) -> None: ... + +global___Message = Message + +@typing.final +class ControlMessage(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + IHAVE_FIELD_NUMBER: builtins.int + IWANT_FIELD_NUMBER: builtins.int + GRAFT_FIELD_NUMBER: builtins.int + PRUNE_FIELD_NUMBER: builtins.int + @property + def ihave(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlIHave]: ... + @property + def iwant(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlIWant]: ... + @property + def graft(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlGraft]: ... + @property + def prune(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlPrune]: ... + def __init__( + self, + *, + ihave: collections.abc.Iterable[global___ControlIHave] | None = ..., + iwant: collections.abc.Iterable[global___ControlIWant] | None = ..., + graft: collections.abc.Iterable[global___ControlGraft] | None = ..., + prune: collections.abc.Iterable[global___ControlPrune] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["graft", b"graft", "ihave", b"ihave", "iwant", b"iwant", "prune", b"prune"]) -> None: ... + +global___ControlMessage = ControlMessage + +@typing.final +class ControlIHave(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TOPICID_FIELD_NUMBER: builtins.int + MESSAGEIDS_FIELD_NUMBER: builtins.int + topicID: builtins.str + @property + def messageIDs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + topicID: builtins.str | None = ..., + messageIDs: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["topicID", b"topicID"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["messageIDs", b"messageIDs", "topicID", b"topicID"]) -> None: ... + +global___ControlIHave = ControlIHave + +@typing.final +class ControlIWant(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MESSAGEIDS_FIELD_NUMBER: builtins.int + @property + def messageIDs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + messageIDs: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["messageIDs", b"messageIDs"]) -> None: ... + +global___ControlIWant = ControlIWant + +@typing.final +class ControlGraft(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TOPICID_FIELD_NUMBER: builtins.int + topicID: builtins.str + def __init__( + self, + *, + topicID: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["topicID", b"topicID"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["topicID", b"topicID"]) -> None: ... + +global___ControlGraft = ControlGraft + +@typing.final +class ControlPrune(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TOPICID_FIELD_NUMBER: builtins.int + PEERS_FIELD_NUMBER: builtins.int + BACKOFF_FIELD_NUMBER: builtins.int + topicID: builtins.str + backoff: builtins.int + @property + def peers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PeerInfo]: ... + def __init__( + self, + *, + topicID: builtins.str | None = ..., + peers: collections.abc.Iterable[global___PeerInfo] | None = ..., + backoff: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["backoff", b"backoff", "topicID", b"topicID"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["backoff", b"backoff", "peers", b"peers", "topicID", b"topicID"]) -> None: ... + +global___ControlPrune = ControlPrune + +@typing.final +class PeerInfo(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PEERID_FIELD_NUMBER: builtins.int + SIGNEDPEERRECORD_FIELD_NUMBER: builtins.int + peerID: builtins.bytes + signedPeerRecord: builtins.bytes + def __init__( + self, + *, + peerID: builtins.bytes | None = ..., + signedPeerRecord: builtins.bytes | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["peerID", b"peerID", "signedPeerRecord", b"signedPeerRecord"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["peerID", b"peerID", "signedPeerRecord", b"signedPeerRecord"]) -> None: ... + +global___PeerInfo = PeerInfo + +@typing.final +class TopicDescriptor(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class AuthOpts(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _AuthMode: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _AuthModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TopicDescriptor.AuthOpts._AuthMode.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NONE: TopicDescriptor.AuthOpts._AuthMode.ValueType # 0 + """no authentication, anyone can publish""" + KEY: TopicDescriptor.AuthOpts._AuthMode.ValueType # 1 + """only messages signed by keys in the topic descriptor are accepted""" + WOT: TopicDescriptor.AuthOpts._AuthMode.ValueType # 2 + """web of trust, certificates can allow publisher set to grow""" + + class AuthMode(_AuthMode, metaclass=_AuthModeEnumTypeWrapper): ... + NONE: TopicDescriptor.AuthOpts.AuthMode.ValueType # 0 + """no authentication, anyone can publish""" + KEY: TopicDescriptor.AuthOpts.AuthMode.ValueType # 1 + """only messages signed by keys in the topic descriptor are accepted""" + WOT: TopicDescriptor.AuthOpts.AuthMode.ValueType # 2 + """web of trust, certificates can allow publisher set to grow""" + + MODE_FIELD_NUMBER: builtins.int + KEYS_FIELD_NUMBER: builtins.int + mode: global___TopicDescriptor.AuthOpts.AuthMode.ValueType + @property + def keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: + """root keys to trust""" + + def __init__( + self, + *, + mode: global___TopicDescriptor.AuthOpts.AuthMode.ValueType | None = ..., + keys: collections.abc.Iterable[builtins.bytes] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["mode", b"mode"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["keys", b"keys", "mode", b"mode"]) -> None: ... + + @typing.final + class EncOpts(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _EncMode: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _EncModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TopicDescriptor.EncOpts._EncMode.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NONE: TopicDescriptor.EncOpts._EncMode.ValueType # 0 + """no encryption, anyone can read""" + SHAREDKEY: TopicDescriptor.EncOpts._EncMode.ValueType # 1 + """messages are encrypted with shared key""" + WOT: TopicDescriptor.EncOpts._EncMode.ValueType # 2 + """web of trust, certificates can allow publisher set to grow""" + + class EncMode(_EncMode, metaclass=_EncModeEnumTypeWrapper): ... + NONE: TopicDescriptor.EncOpts.EncMode.ValueType # 0 + """no encryption, anyone can read""" + SHAREDKEY: TopicDescriptor.EncOpts.EncMode.ValueType # 1 + """messages are encrypted with shared key""" + WOT: TopicDescriptor.EncOpts.EncMode.ValueType # 2 + """web of trust, certificates can allow publisher set to grow""" + + MODE_FIELD_NUMBER: builtins.int + KEYHASHES_FIELD_NUMBER: builtins.int + mode: global___TopicDescriptor.EncOpts.EncMode.ValueType + @property + def keyHashes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: + """the hashes of the shared keys used (salted)""" + + def __init__( + self, + *, + mode: global___TopicDescriptor.EncOpts.EncMode.ValueType | None = ..., + keyHashes: collections.abc.Iterable[builtins.bytes] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["mode", b"mode"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["keyHashes", b"keyHashes", "mode", b"mode"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + AUTH_FIELD_NUMBER: builtins.int + ENC_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def auth(self) -> global___TopicDescriptor.AuthOpts: ... + @property + def enc(self) -> global___TopicDescriptor.EncOpts: ... + def __init__( + self, + *, + name: builtins.str | None = ..., + auth: global___TopicDescriptor.AuthOpts | None = ..., + enc: global___TopicDescriptor.EncOpts | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["auth", b"auth", "enc", b"enc", "name", b"name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["auth", b"auth", "enc", b"enc", "name", b"name"]) -> None: ... + +global___TopicDescriptor = TopicDescriptor diff --git a/libp2p/relay/circuit_v2/pb/circuit_pb2.py b/libp2p/relay/circuit_v2/pb/circuit_pb2.py index 946bff732..706718dae 100644 --- a/libp2p/relay/circuit_v2/pb/circuit_pb2.py +++ b/libp2p/relay/circuit_v2/pb/circuit_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/relay/circuit_v2/pb/circuit.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/relay/circuit_v2/pb/circuit.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,25 +26,25 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(libp2p/relay/circuit_v2/pb/circuit.proto\x12\rcircuit.pb.v2\"\xf3\x01\n\nHopMessage\x12,\n\x04type\x18\x01 \x01(\x0e\x32\x1e.circuit.pb.v2.HopMessage.Type\x12\x0c\n\x04peer\x18\x02 \x01(\x0c\x12/\n\x0breservation\x18\x03 \x01(\x0b\x32\x1a.circuit.pb.v2.Reservation\x12#\n\x05limit\x18\x04 \x01(\x0b\x32\x14.circuit.pb.v2.Limit\x12%\n\x06status\x18\x05 \x01(\x0b\x32\x15.circuit.pb.v2.Status\",\n\x04Type\x12\x0b\n\x07RESERVE\x10\x00\x12\x0b\n\x07\x43ONNECT\x10\x01\x12\n\n\x06STATUS\x10\x02\"\x92\x01\n\x0bStopMessage\x12-\n\x04type\x18\x01 \x01(\x0e\x32\x1f.circuit.pb.v2.StopMessage.Type\x12\x0c\n\x04peer\x18\x02 \x01(\x0c\x12%\n\x06status\x18\x03 \x01(\x0b\x32\x15.circuit.pb.v2.Status\"\x1f\n\x04Type\x12\x0b\n\x07\x43ONNECT\x10\x00\x12\n\n\x06STATUS\x10\x01\"A\n\x0bReservation\x12\x0f\n\x07voucher\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c\x12\x0e\n\x06\x65xpire\x18\x03 \x01(\x03\"\'\n\x05Limit\x12\x10\n\x08\x64uration\x18\x01 \x01(\x03\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x03\"\xf6\x01\n\x06Status\x12(\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x1a.circuit.pb.v2.Status.Code\x12\x0f\n\x07message\x18\x02 \x01(\t\"\xb0\x01\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\x17\n\x13RESERVATION_REFUSED\x10\x64\x12\x1b\n\x17RESOURCE_LIMIT_EXCEEDED\x10\x65\x12\x15\n\x11PERMISSION_DENIED\x10\x66\x12\x16\n\x11\x43ONNECTION_FAILED\x10\xc8\x01\x12\x11\n\x0c\x44IAL_REFUSED\x10\xc9\x01\x12\x10\n\x0bSTOP_FAILED\x10\xac\x02\x12\x16\n\x11MALFORMED_MESSAGE\x10\x90\x03\x62\x06proto3') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.relay.circuit_v2.pb.circuit_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _HOPMESSAGE._serialized_start=60 - _HOPMESSAGE._serialized_end=303 - _HOPMESSAGE_TYPE._serialized_start=259 - _HOPMESSAGE_TYPE._serialized_end=303 - _STOPMESSAGE._serialized_start=306 - _STOPMESSAGE._serialized_end=452 - _STOPMESSAGE_TYPE._serialized_start=421 - _STOPMESSAGE_TYPE._serialized_end=452 - _RESERVATION._serialized_start=454 - _RESERVATION._serialized_end=519 - _LIMIT._serialized_start=521 - _LIMIT._serialized_end=560 - _STATUS._serialized_start=563 - _STATUS._serialized_end=809 - _STATUS_CODE._serialized_start=633 - _STATUS_CODE._serialized_end=809 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.relay.circuit_v2.pb.circuit_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_HOPMESSAGE']._serialized_start=60 + _globals['_HOPMESSAGE']._serialized_end=303 + _globals['_HOPMESSAGE_TYPE']._serialized_start=259 + _globals['_HOPMESSAGE_TYPE']._serialized_end=303 + _globals['_STOPMESSAGE']._serialized_start=306 + _globals['_STOPMESSAGE']._serialized_end=452 + _globals['_STOPMESSAGE_TYPE']._serialized_start=421 + _globals['_STOPMESSAGE_TYPE']._serialized_end=452 + _globals['_RESERVATION']._serialized_start=454 + _globals['_RESERVATION']._serialized_end=519 + _globals['_LIMIT']._serialized_start=521 + _globals['_LIMIT']._serialized_end=560 + _globals['_STATUS']._serialized_start=563 + _globals['_STATUS']._serialized_end=809 + _globals['_STATUS_CODE']._serialized_start=633 + _globals['_STATUS_CODE']._serialized_end=809 # @@protoc_insertion_point(module_scope) diff --git a/libp2p/relay/circuit_v2/pb/dcutr_pb2.py b/libp2p/relay/circuit_v2/pb/dcutr_pb2.py index 59e49a79b..efa32a46c 100644 --- a/libp2p/relay/circuit_v2/pb/dcutr_pb2.py +++ b/libp2p/relay/circuit_v2/pb/dcutr_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/relay/circuit_v2/pb/dcutr.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/relay/circuit_v2/pb/dcutr.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,13 +26,13 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&libp2p/relay/circuit_v2/pb/dcutr.proto\x12\x0cholepunch.pb\"i\n\tHolePunch\x12*\n\x04type\x18\x01 \x02(\x0e\x32\x1c.holepunch.pb.HolePunch.Type\x12\x10\n\x08ObsAddrs\x18\x02 \x03(\x0c\"\x1e\n\x04Type\x12\x0b\n\x07\x43ONNECT\x10\x64\x12\t\n\x04SYNC\x10\xac\x02') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.relay.circuit_v2.pb.dcutr_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _HOLEPUNCH._serialized_start=56 - _HOLEPUNCH._serialized_end=161 - _HOLEPUNCH_TYPE._serialized_start=131 - _HOLEPUNCH_TYPE._serialized_end=161 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.relay.circuit_v2.pb.dcutr_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_HOLEPUNCH']._serialized_start=56 + _globals['_HOLEPUNCH']._serialized_end=161 + _globals['_HOLEPUNCH_TYPE']._serialized_start=131 + _globals['_HOLEPUNCH_TYPE']._serialized_end=161 # @@protoc_insertion_point(module_scope) diff --git a/libp2p/security/insecure/pb/plaintext_pb2.py b/libp2p/security/insecure/pb/plaintext_pb2.py index 005a3d600..421a6150f 100644 --- a/libp2p/security/insecure/pb/plaintext_pb2.py +++ b/libp2p/security/insecure/pb/plaintext_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/security/insecure/pb/plaintext.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/security/insecure/pb/plaintext.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -16,11 +27,11 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+libp2p/security/insecure/pb/plaintext.proto\x12\x0cplaintext.pb\x1a\x1dlibp2p/crypto/pb/crypto.proto\"<\n\x08\x45xchange\x12\n\n\x02id\x18\x01 \x01(\x0c\x12$\n\x06pubkey\x18\x02 \x01(\x0b\x32\x14.crypto.pb.PublicKey') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.security.insecure.pb.plaintext_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _EXCHANGE._serialized_start=92 - _EXCHANGE._serialized_end=152 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.security.insecure.pb.plaintext_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_EXCHANGE']._serialized_start=92 + _globals['_EXCHANGE']._serialized_end=152 # @@protoc_insertion_point(module_scope) diff --git a/libp2p/security/noise/early_data.py b/libp2p/security/noise/early_data.py new file mode 100644 index 000000000..00e08116c --- /dev/null +++ b/libp2p/security/noise/early_data.py @@ -0,0 +1,282 @@ +"""Early data handlers for Noise protocol.""" + +from abc import ABC, abstractmethod +import asyncio +from typing import Optional, Protocol, runtime_checkable + + +@runtime_checkable +class EarlyDataHandler(Protocol): + """Protocol for handling early data in Noise handshake.""" + + async def handle_early_data(self, data: bytes) -> None: + """ + Handle early data received during handshake. + + Args: + data: The early data received + + Raises: + Exception: If early data cannot be processed + + """ + ... + + +class AsyncEarlyDataHandler(ABC): + """Abstract base class for async early data handlers.""" + + @abstractmethod + async def handle_early_data(self, data: bytes) -> None: + """ + Handle early data received during handshake. + + Args: + data: The early data received + + Raises: + Exception: If early data cannot be processed + + """ + pass + + +class SyncEarlyDataHandler(ABC): + """Abstract base class for synchronous early data handlers.""" + + @abstractmethod + def handle_early_data(self, data: bytes) -> None: + """ + Handle early data received during handshake. + + Args: + data: The early data received + + Raises: + Exception: If early data cannot be processed + + """ + pass + + +class LoggingEarlyDataHandler(AsyncEarlyDataHandler): + """Early data handler that logs received data.""" + + def __init__(self, logger_name: str = "early_data"): + self.logger_name = logger_name + + async def handle_early_data(self, data: bytes) -> None: + """ + Log the received early data. + + Args: + data: The early data received + + """ + import logging + logger = logging.getLogger(self.logger_name) + logger.info(f"Received early data: {len(data)} bytes") + logger.debug(f"Early data content: {data}") + + +class BufferingEarlyDataHandler(AsyncEarlyDataHandler): + """Early data handler that buffers received data.""" + + def __init__(self, max_buffer_size: int = 1024 * 1024): # 1MB default + self.max_buffer_size = max_buffer_size + self.buffer: list[bytes] = [] + self.total_size = 0 + + async def handle_early_data(self, data: bytes) -> None: + """ + Buffer the received early data. + + Args: + data: The early data received + + Raises: + ValueError: If buffer size would exceed maximum + + """ + if self.total_size + len(data) > self.max_buffer_size: + raise ValueError( + f"Early data buffer size would exceed maximum of " + f"{self.max_buffer_size} bytes" + ) + + self.buffer.append(data) + self.total_size += len(data) + + def get_buffered_data(self) -> bytes: + """ + Get all buffered early data. + + Returns: + bytes: All buffered early data concatenated + + """ + return b"".join(self.buffer) + + def clear_buffer(self) -> None: + """Clear the early data buffer.""" + self.buffer.clear() + self.total_size = 0 + + def __len__(self) -> int: + """Get the number of buffered data chunks.""" + return len(self.buffer) + + @property + def size(self) -> int: + """Get the total size of buffered data.""" + return self.total_size + + +class CallbackEarlyDataHandler(AsyncEarlyDataHandler): + """Early data handler that calls a user-provided callback.""" + + def __init__(self, callback): + """ + Initialize with a callback function. + + Args: + callback: Function to call with early data + + """ + self.callback = callback + + async def handle_early_data(self, data: bytes) -> None: + """ + Call the user-provided callback with early data. + + Args: + data: The early data received + + Raises: + Exception: If the callback raises an exception + + """ + if asyncio.iscoroutinefunction(self.callback): + await self.callback(data) + else: + self.callback(data) + + +class CompositeEarlyDataHandler(AsyncEarlyDataHandler): + """Early data handler that delegates to multiple handlers.""" + + def __init__(self, handlers: list[EarlyDataHandler]): + """ + Initialize with a list of handlers. + + Args: + handlers: List of early data handlers + + """ + self.handlers = handlers + + async def handle_early_data(self, data: bytes) -> None: + """ + Handle early data by delegating to all handlers. + + Args: + data: The early data received + + Raises: + Exception: If any handler raises an exception + + """ + for handler in self.handlers: + if asyncio.iscoroutinefunction(handler.handle_early_data): + await handler.handle_early_data(data) + else: + handler.handle_early_data(data) + + def add_handler(self, handler: EarlyDataHandler) -> None: + """ + Add a handler to the composite. + + Args: + handler: Early data handler to add + + """ + self.handlers.append(handler) + + def remove_handler(self, handler: EarlyDataHandler) -> None: + """ + Remove a handler from the composite. + + Args: + handler: Early data handler to remove + + """ + if handler in self.handlers: + self.handlers.remove(handler) + + +class EarlyDataManager: + """Manager for early data handling in Noise protocol.""" + + def __init__(self, handler: Optional[EarlyDataHandler] = None): + """ + Initialize with an optional early data handler. + + Args: + handler: Early data handler to use + + """ + self.handler = handler + self._early_data_received = False + self._early_data_buffer: Optional[bytes] = None + + async def handle_early_data(self, data: bytes) -> None: + """ + Handle early data using the configured handler. + + Args: + data: The early data received + + """ + self._early_data_received = True + self._early_data_buffer = data + + if self.handler is not None: + if asyncio.iscoroutinefunction(self.handler.handle_early_data): + await self.handler.handle_early_data(data) + else: + self.handler.handle_early_data(data) + + def has_early_data(self) -> bool: + """ + Check if early data has been received. + + Returns: + bool: True if early data has been received + + """ + return self._early_data_received + + def get_early_data(self) -> Optional[bytes]: + """ + Get the received early data. + + Returns: + Optional[bytes]: The early data if received, None otherwise + + """ + return self._early_data_buffer + + def clear_early_data(self) -> None: + """Clear the early data buffer.""" + self._early_data_received = False + self._early_data_buffer = None + + def set_handler(self, handler: EarlyDataHandler) -> None: + """ + Set a new early data handler. + + Args: + handler: Early data handler to use + + """ + self.handler = handler diff --git a/libp2p/security/noise/messages.py b/libp2p/security/noise/messages.py index 309b24b06..f11c870b2 100644 --- a/libp2p/security/noise/messages.py +++ b/libp2p/security/noise/messages.py @@ -1,5 +1,6 @@ from dataclasses import ( dataclass, + field, ) from libp2p.crypto.keys import ( @@ -15,29 +16,219 @@ SIGNED_DATA_PREFIX = "noise-libp2p-static-key:" +@dataclass +class NoiseExtensions: + """ + Noise protocol extensions for advanced features like WebTransport and early data. + + This class provides support for: + - WebTransport certificate hashes for WebTransport support + - Early data payload for 0-RTT support + """ + + webtransport_certhashes: list[bytes] = field(default_factory=list) + early_data: bytes | None = None + + def to_protobuf(self) -> noise_pb.NoiseExtensions: + """ + Convert to protobuf message. + + Returns: + noise_pb.NoiseExtensions: The protobuf message representation + + """ + ext = noise_pb.NoiseExtensions() + ext.webtransport_certhashes.extend(self.webtransport_certhashes) + if self.early_data is not None: + ext.early_data = self.early_data + return ext + + @classmethod + def from_protobuf(cls, pb_ext: noise_pb.NoiseExtensions) -> "NoiseExtensions": + """ + Create from protobuf message. + + Args: + pb_ext: The protobuf message to convert + + Returns: + NoiseExtensions: The Python dataclass representation + + """ + early_data = None + if pb_ext.early_data != b"": + early_data = pb_ext.early_data + return cls( + webtransport_certhashes=list(pb_ext.webtransport_certhashes), + early_data=early_data, + ) + + def is_empty(self) -> bool: + """ + Check if extensions are empty (no data). + + Returns: + bool: True if no extensions data is present + + """ + return not self.webtransport_certhashes and self.early_data is None + + def has_webtransport_certhashes(self) -> bool: + """ + Check if WebTransport certificate hashes are present. + + Returns: + bool: True if WebTransport certificate hashes are present + + """ + return bool(self.webtransport_certhashes) + + def has_early_data(self) -> bool: + """ + Check if early data is present. + + Returns: + bool: True if early data is present + + """ + return self.early_data is not None + + @dataclass class NoiseHandshakePayload: + """ + Noise handshake payload containing peer identity and optional extensions. + + This class represents the payload sent during Noise handshake and provides: + - Peer identity verification through public key and signature + - Optional early data for 0-RTT support + - Optional extensions for advanced features like WebTransport + """ + id_pubkey: PublicKey id_sig: bytes early_data: bytes | None = None + extensions: NoiseExtensions | None = None def serialize(self) -> bytes: + """ + Serialize the handshake payload to protobuf bytes. + + Returns: + bytes: The serialized protobuf message + + Raises: + ValueError: If the payload is invalid + + """ + if not self.id_pubkey or not self.id_sig: + raise ValueError("Invalid handshake payload: missing required fields") + msg = noise_pb.NoiseHandshakePayload( identity_key=self.id_pubkey.serialize(), identity_sig=self.id_sig ) - if self.early_data is not None: + + # Handle early data: prefer extensions over legacy data field + if self.extensions is not None and self.extensions.early_data is not None: + # Early data is in extensions + msg.extensions.CopyFrom(self.extensions.to_protobuf()) + elif self.early_data is not None: + # Legacy early data in data field (for backward compatibility) msg.data = self.early_data + if self.extensions is not None: + # Still include extensions even if early data is in legacy field + msg.extensions.CopyFrom(self.extensions.to_protobuf()) + elif self.extensions is not None: + # Extensions without early data + msg.extensions.CopyFrom(self.extensions.to_protobuf()) + return msg.SerializeToString() @classmethod def deserialize(cls, protobuf_bytes: bytes) -> "NoiseHandshakePayload": - msg = noise_pb.NoiseHandshakePayload.FromString(protobuf_bytes) + """ + Deserialize protobuf bytes to handshake payload. + + Args: + protobuf_bytes: The serialized protobuf message + + Returns: + NoiseHandshakePayload: The deserialized handshake payload + + Raises: + ValueError: If the protobuf data is invalid + + """ + if not protobuf_bytes: + raise ValueError("Empty protobuf data") + + try: + msg = noise_pb.NoiseHandshakePayload.FromString(protobuf_bytes) + except Exception as e: + raise ValueError(f"Failed to deserialize protobuf: {e}") + + if not msg.identity_key or not msg.identity_sig: + raise ValueError("Invalid handshake payload: missing required fields") + + extensions = None + early_data = None + + if msg.HasField("extensions"): + extensions = NoiseExtensions.from_protobuf(msg.extensions) + # Early data from extensions takes precedence + if extensions.early_data is not None: + early_data = extensions.early_data + + # Fall back to legacy data field if no early data in extensions + if early_data is None: + early_data = msg.data if msg.data != b"" else None + + try: + id_pubkey = deserialize_public_key(msg.identity_key) + except Exception as e: + raise ValueError(f"Failed to deserialize public key: {e}") + return cls( - id_pubkey=deserialize_public_key(msg.identity_key), + id_pubkey=id_pubkey, id_sig=msg.identity_sig, - early_data=msg.data if msg.data != b"" else None, + early_data=early_data, + extensions=extensions, ) + def has_extensions(self) -> bool: + """ + Check if extensions are present. + + Returns: + bool: True if extensions are present + + """ + return self.extensions is not None and not self.extensions.is_empty() + + def has_early_data(self) -> bool: + """ + Check if early data is present (either in extensions or legacy field). + + Returns: + bool: True if early data is present + + """ + if self.extensions is not None and self.extensions.has_early_data(): + return True + return self.early_data is not None + + def get_early_data(self) -> bytes | None: + """ + Get early data, preferring extensions over legacy field. + + Returns: + bytes | None: The early data if present + + """ + if self.extensions is not None and self.extensions.has_early_data(): + return self.extensions.early_data + return self.early_data + def make_data_to_be_signed(noise_static_pubkey: PublicKey) -> bytes: prefix_bytes = SIGNED_DATA_PREFIX.encode("utf-8") diff --git a/libp2p/security/noise/patterns.py b/libp2p/security/noise/patterns.py index 00f51d063..b2c364462 100644 --- a/libp2p/security/noise/patterns.py +++ b/libp2p/security/noise/patterns.py @@ -41,6 +41,7 @@ NoiseTransportReadWriter, ) from .messages import ( + NoiseExtensions, NoiseHandshakePayload, make_handshake_payload_sig, verify_handshake_payload_sig, @@ -73,11 +74,53 @@ def create_noise_state(self) -> NoiseState: raise NoiseStateError("noise_protocol is not initialized") return noise_state - def make_handshake_payload(self) -> NoiseHandshakePayload: + def make_handshake_payload( + self, extensions: NoiseExtensions | None = None + ) -> NoiseHandshakePayload: signature = make_handshake_payload_sig( self.libp2p_privkey, self.noise_static_key.get_public_key() ) - return NoiseHandshakePayload(self.libp2p_privkey.get_public_key(), signature) + + # Handle early data through extensions (prioritize extensions early data) + if extensions is not None: + # Extensions provided - use extensions early data if available + if extensions.early_data is not None: + # Extensions have early data - use it + return NoiseHandshakePayload( + self.libp2p_privkey.get_public_key(), + signature, + early_data=None, # Early data is in extensions + extensions=extensions, + ) + elif self.early_data is not None: + # No extensions early data, but pattern has early data + # - embed in extensions + extensions_with_early_data = NoiseExtensions( + webtransport_certhashes=extensions.webtransport_certhashes, + early_data=self.early_data, + ) + return NoiseHandshakePayload( + self.libp2p_privkey.get_public_key(), + signature, + early_data=None, # Early data is now in extensions + extensions=extensions_with_early_data, + ) + else: + # No early data anywhere - just extensions + return NoiseHandshakePayload( + self.libp2p_privkey.get_public_key(), + signature, + early_data=None, + extensions=extensions, + ) + else: + # No extensions, use legacy early data + return NoiseHandshakePayload( + self.libp2p_privkey.get_public_key(), + signature, + early_data=self.early_data, + extensions=None, + ) class PatternXX(BasePattern): diff --git a/libp2p/security/noise/patterns_ik.py b/libp2p/security/noise/patterns_ik.py new file mode 100644 index 000000000..df5d57f24 --- /dev/null +++ b/libp2p/security/noise/patterns_ik.py @@ -0,0 +1,280 @@ +"""IK handshake pattern implementation for Noise protocol.""" + +from typing import Optional + +from noise.connection import ( + Keypair as NoiseKeypairEnum, + NoiseConnection as NoiseState, +) + +from libp2p.abc import ( + IRawConnection, + ISecureConn, +) +from libp2p.crypto.secp256k1 import PrivateKey, PublicKey +from libp2p.peer.id import ID +from libp2p.security.noise.exceptions import ( + HandshakeHasNotFinished, + InvalidSignature, + NoiseStateError, + PeerIDMismatchesPubkey, +) +from libp2p.security.noise.messages import ( + NoiseExtensions, + NoiseHandshakePayload, + make_handshake_payload_sig, + verify_handshake_payload_sig, +) +from libp2p.security.noise.patterns import BasePattern +from libp2p.security.secure_session import SecureSession + + +class PatternIK(BasePattern): + """ + Noise IK handshake pattern implementation. + + The IK pattern is a one-way authenticated key exchange where the initiator + knows the responder's static public key beforehand. This pattern provides: + - Mutual authentication + - Forward secrecy + - Reduced handshake latency (2 messages instead of 3) + """ + + def __init__( + self, + local_peer: ID, + libp2p_privkey: PrivateKey, + noise_static_key: PrivateKey, + early_data: Optional[bytes] = None, + remote_peer: Optional[ID] = None, + remote_static_key: Optional[PublicKey] = None, + ): + """ + Initialize the IK pattern. + + Args: + local_peer: Local peer ID + libp2p_privkey: libp2p private key + noise_static_key: Noise static private key + early_data: Optional early data + remote_peer: Remote peer ID (required for IK pattern) + remote_static_key: Remote static public key (required for IK pattern) + + """ + # Initialize base pattern attributes + self.local_peer = local_peer + self.libp2p_privkey = libp2p_privkey + self.noise_static_key = noise_static_key + self.early_data = early_data + + if remote_peer is None: + raise ValueError("IK pattern requires remote_peer to be specified") + if remote_static_key is None: + raise ValueError("IK pattern requires remote_static_key to be specified") + + self.remote_peer = remote_peer + self.remote_static_key = remote_static_key + self.protocol_name = b"Noise_IK_25519_ChaChaPoly_SHA256" + + def create_noise_state(self) -> NoiseState: + """ + Create and configure Noise state for IK pattern. + + Returns: + NoiseState: Configured Noise state + + """ + noise_state = NoiseState.from_name(self.protocol_name) + noise_state.set_keypair_from_private_bytes( + NoiseKeypairEnum.STATIC, self.noise_static_key.to_bytes() + ) + + # Set the remote static key for IK pattern + noise_state.set_keypair_from_public_bytes( + NoiseKeypairEnum.REMOTE_STATIC, self.remote_static_key.to_bytes() + ) + + if noise_state.noise_protocol is None: + raise NoiseStateError("noise_protocol is not initialized") + + return noise_state + + async def handshake_outbound( + self, conn: IRawConnection, remote_peer: ID + ) -> ISecureConn: + """ + Perform outbound IK handshake (initiator). + + Args: + conn: Raw connection to perform handshake on + remote_peer: Remote peer ID + + Returns: + ISecureConn: Secure connection after handshake + + Raises: + PeerIDMismatchesPubkey: If remote peer ID doesn't match expected + InvalidSignature: If signature verification fails + HandshakeHasNotFinished: If handshake doesn't complete properly + + """ + if remote_peer != self.remote_peer: + raise ValueError( + f"Remote peer mismatch: expected {self.remote_peer}, got {remote_peer}" + ) + + noise_state = self.create_noise_state() + noise_state.set_as_initiator() + noise_state.start_handshake() + + from libp2p.security.noise.io import NoiseHandshakeReadWriter + read_writer = NoiseHandshakeReadWriter(conn, noise_state) + + # IK pattern: Send encrypted payload immediately (message 1) + our_payload = self.make_handshake_payload() + msg_1 = our_payload.serialize() + await read_writer.write_msg(msg_1) + + # Receive response (message 2) + msg_2 = await read_writer.read_msg() + peer_handshake_payload = NoiseHandshakePayload.deserialize(msg_2) + + # Verify the response + if not verify_handshake_payload_sig( + peer_handshake_payload, self.remote_static_key + ): + raise InvalidSignature("Invalid signature in IK handshake response") + + # Verify peer ID matches + remote_peer_id_from_pubkey = ID.from_pubkey(peer_handshake_payload.id_pubkey) + if remote_peer_id_from_pubkey != remote_peer: + raise PeerIDMismatchesPubkey( + f"peer id mismatch: expected={remote_peer}, " + f"got={remote_peer_id_from_pubkey}" + ) + + # Verify handshake is complete + if not noise_state.handshake_finished: + raise HandshakeHasNotFinished("IK handshake not finished") + + # Create secure session + from libp2p.security.noise.io import NoiseTransportReadWriter + transport_read_writer = NoiseTransportReadWriter(conn, noise_state) + + return SecureSession( + local_peer=self.local_peer, + local_private_key=self.libp2p_privkey, + remote_peer=remote_peer_id_from_pubkey, + remote_permanent_pubkey=self.remote_static_key, + is_initiator=True, + conn=transport_read_writer, + ) + + async def handshake_inbound(self, conn: IRawConnection) -> ISecureConn: + """ + Perform inbound IK handshake (responder). + + Args: + conn: Raw connection to perform handshake on + + Returns: + ISecureConn: Secure connection after handshake + + Raises: + InvalidSignature: If signature verification fails + HandshakeHasNotFinished: If handshake doesn't complete properly + + """ + noise_state = self.create_noise_state() + noise_state.set_as_responder() + noise_state.start_handshake() + + from libp2p.security.noise.io import NoiseHandshakeReadWriter + read_writer = NoiseHandshakeReadWriter(conn, noise_state) + + # Receive encrypted payload (message 1) + msg_1 = await read_writer.read_msg() + peer_handshake_payload = NoiseHandshakePayload.deserialize(msg_1) + + # Verify the payload + if not verify_handshake_payload_sig( + peer_handshake_payload, self.remote_static_key + ): + raise InvalidSignature("Invalid signature in IK handshake request") + + # Verify peer ID matches expected + remote_peer_id_from_pubkey = ID.from_pubkey(peer_handshake_payload.id_pubkey) + if remote_peer_id_from_pubkey != self.remote_peer: + raise PeerIDMismatchesPubkey( + f"peer id mismatch: expected={self.remote_peer}, " + f"got={remote_peer_id_from_pubkey}" + ) + + # Send response (message 2) + our_payload = self.make_handshake_payload() + msg_2 = our_payload.serialize() + await read_writer.write_msg(msg_2) + + # Verify handshake is complete + if not noise_state.handshake_finished: + raise HandshakeHasNotFinished("IK handshake not finished") + + # Create secure session + from libp2p.security.noise.io import NoiseTransportReadWriter + transport_read_writer = NoiseTransportReadWriter(conn, noise_state) + + return SecureSession( + local_peer=self.local_peer, + local_private_key=self.libp2p_privkey, + remote_peer=remote_peer_id_from_pubkey, + remote_permanent_pubkey=self.remote_static_key, + is_initiator=False, + conn=transport_read_writer, + ) + + def make_handshake_payload( + self, extensions: Optional[NoiseExtensions] = None + ) -> NoiseHandshakePayload: + """ + Create handshake payload for IK pattern. + + Args: + extensions: Optional noise extensions + + Returns: + NoiseHandshakePayload: Handshake payload + + """ + signature = make_handshake_payload_sig( + self.libp2p_privkey, self.noise_static_key.get_public_key() + ) + + # Handle early data through extensions + if extensions is not None and self.early_data is not None: + # Create extensions with early data + extensions_with_early_data = NoiseExtensions( + webtransport_certhashes=extensions.webtransport_certhashes, + early_data=self.early_data + ) + return NoiseHandshakePayload( + self.libp2p_privkey.get_public_key(), + signature, + early_data=None, # Early data is now in extensions + extensions=extensions_with_early_data, + ) + elif extensions is not None: + # Extensions without early data + return NoiseHandshakePayload( + self.libp2p_privkey.get_public_key(), + signature, + early_data=self.early_data, # Keep legacy early data + extensions=extensions, + ) + else: + # No extensions, use legacy early data + return NoiseHandshakePayload( + self.libp2p_privkey.get_public_key(), + signature, + early_data=self.early_data, + extensions=None, + ) diff --git a/libp2p/security/noise/pb/noise.proto b/libp2p/security/noise/pb/noise.proto index 05a78c6f3..ea4ab8aba 100644 --- a/libp2p/security/noise/pb/noise.proto +++ b/libp2p/security/noise/pb/noise.proto @@ -1,8 +1,22 @@ syntax = "proto3"; package pb; +// NoiseExtensions contains optional extensions for the Noise handshake +message NoiseExtensions { + // WebTransport certificate hashes for WebTransport support + repeated bytes webtransport_certhashes = 1; + // Early data payload for 0-RTT support + bytes early_data = 2; +} + +// NoiseHandshakePayload is the payload sent during Noise handshake message NoiseHandshakePayload { + // The libp2p public key of the peer bytes identity_key = 1; + // Signature of the noise static key by the libp2p private key bytes identity_sig = 2; + // Legacy early data field (deprecated, use extensions.early_data) bytes data = 3; + // Optional extensions for advanced features + NoiseExtensions extensions = 4; } diff --git a/libp2p/security/noise/pb/noise_pb2.py b/libp2p/security/noise/pb/noise_pb2.py index dd078b0f4..7c4c382e7 100644 --- a/libp2p/security/noise/pb/noise_pb2.py +++ b/libp2p/security/noise/pb/noise_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/security/noise/pb/noise.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/security/noise/pb/noise.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -13,13 +24,15 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$libp2p/security/noise/pb/noise.proto\x12\x02pb\"Q\n\x15NoiseHandshakePayload\x12\x14\n\x0cidentity_key\x18\x01 \x01(\x0c\x12\x14\n\x0cidentity_sig\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x62\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.security.noise.pb.noise_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$libp2p/security/noise/pb/noise.proto\x12\x02pb\"F\n\x0fNoiseExtensions\x12\x1f\n\x17webtransport_certhashes\x18\x01 \x03(\x0c\x12\x12\n\nearly_data\x18\x02 \x01(\x0c\"z\n\x15NoiseHandshakePayload\x12\x14\n\x0cidentity_key\x18\x01 \x01(\x0c\x12\x14\n\x0cidentity_sig\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\'\n\nextensions\x18\x04 \x01(\x0b\x32\x13.pb.NoiseExtensionsb\x06proto3') - DESCRIPTOR._options = None - _NOISEHANDSHAKEPAYLOAD._serialized_start=44 - _NOISEHANDSHAKEPAYLOAD._serialized_end=125 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.security.noise.pb.noise_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_NOISEEXTENSIONS']._serialized_start=44 + _globals['_NOISEEXTENSIONS']._serialized_end=114 + _globals['_NOISEHANDSHAKEPAYLOAD']._serialized_start=116 + _globals['_NOISEHANDSHAKEPAYLOAD']._serialized_end=238 # @@protoc_insertion_point(module_scope) diff --git a/libp2p/security/noise/pb/noise_pb2.pyi b/libp2p/security/noise/pb/noise_pb2.pyi index f0f3201ef..61025096c 100644 --- a/libp2p/security/noise/pb/noise_pb2.pyi +++ b/libp2p/security/noise/pb/noise_pb2.pyi @@ -4,29 +4,67 @@ isort:skip_file """ import builtins +import collections.abc import google.protobuf.descriptor +import google.protobuf.internal.containers import google.protobuf.message import typing DESCRIPTOR: google.protobuf.descriptor.FileDescriptor +@typing.final +class NoiseExtensions(google.protobuf.message.Message): + """NoiseExtensions contains optional extensions for the Noise handshake""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + WEBTRANSPORT_CERTHASHES_FIELD_NUMBER: builtins.int + EARLY_DATA_FIELD_NUMBER: builtins.int + early_data: builtins.bytes + """Early data payload for 0-RTT support""" + @property + def webtransport_certhashes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: + """WebTransport certificate hashes for WebTransport support""" + + def __init__( + self, + *, + webtransport_certhashes: collections.abc.Iterable[builtins.bytes] | None = ..., + early_data: builtins.bytes = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["early_data", b"early_data", "webtransport_certhashes", b"webtransport_certhashes"]) -> None: ... + +global___NoiseExtensions = NoiseExtensions + @typing.final class NoiseHandshakePayload(google.protobuf.message.Message): + """NoiseHandshakePayload is the payload sent during Noise handshake""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor IDENTITY_KEY_FIELD_NUMBER: builtins.int IDENTITY_SIG_FIELD_NUMBER: builtins.int DATA_FIELD_NUMBER: builtins.int + EXTENSIONS_FIELD_NUMBER: builtins.int identity_key: builtins.bytes + """The libp2p public key of the peer""" identity_sig: builtins.bytes + """Signature of the noise static key by the libp2p private key""" data: builtins.bytes + """Legacy early data field (deprecated, use extensions.early_data)""" + @property + def extensions(self) -> global___NoiseExtensions: + """Optional extensions for advanced features""" + def __init__( self, *, identity_key: builtins.bytes = ..., identity_sig: builtins.bytes = ..., data: builtins.bytes = ..., + extensions: global___NoiseExtensions | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "identity_key", b"identity_key", "identity_sig", b"identity_sig"]) -> None: ... + def HasField(self, field_name: typing.Literal["extensions", b"extensions"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["data", b"data", "extensions", b"extensions", "identity_key", b"identity_key", "identity_sig", b"identity_sig"]) -> None: ... global___NoiseHandshakePayload = NoiseHandshakePayload diff --git a/libp2p/security/noise/rekey.py b/libp2p/security/noise/rekey.py new file mode 100644 index 000000000..b7e6cc1ff --- /dev/null +++ b/libp2p/security/noise/rekey.py @@ -0,0 +1,258 @@ +"""Rekey support for Noise protocol sessions.""" + +from abc import ABC, abstractmethod +import time +from typing import Optional, Protocol, runtime_checkable + + +@runtime_checkable +class RekeyPolicy(Protocol): + """Protocol for rekey policies.""" + + def should_rekey(self, bytes_processed: int, time_elapsed: float) -> bool: + """ + Determine if a rekey should be performed. + + Args: + bytes_processed: Number of bytes processed since last rekey + time_elapsed: Time elapsed since last rekey in seconds + + Returns: + bool: True if rekey should be performed + + """ + ... + + +class TimeBasedRekeyPolicy(RekeyPolicy): + """Rekey policy based on time elapsed.""" + + def __init__(self, max_time_seconds: int = 3600): # 1 hour default + """ + Initialize with maximum time between rekeys. + + Args: + max_time_seconds: Maximum time between rekeys in seconds + + """ + self.max_time_seconds = max_time_seconds + + def should_rekey(self, bytes_processed: int, time_elapsed: float) -> bool: + """ + Check if rekey should be performed based on time. + + Args: + bytes_processed: Number of bytes processed (ignored for time-based policy) + time_elapsed: Time elapsed since last rekey in seconds + + Returns: + bool: True if maximum time has been exceeded + + """ + return time_elapsed >= self.max_time_seconds + + +class ByteCountRekeyPolicy(RekeyPolicy): + """Rekey policy based on bytes processed.""" + + def __init__(self, max_bytes: int = 1024 * 1024 * 1024): # 1GB default + """ + Initialize with maximum bytes between rekeys. + + Args: + max_bytes: Maximum bytes processed between rekeys + + """ + self.max_bytes = max_bytes + + def should_rekey(self, bytes_processed: int, time_elapsed: float) -> bool: + """ + Check if rekey should be performed based on bytes processed. + + Args: + bytes_processed: Number of bytes processed since last rekey + time_elapsed: Time elapsed (ignored for byte-based policy) + + Returns: + bool: True if maximum bytes have been processed + + """ + return bytes_processed >= self.max_bytes + + +class CompositeRekeyPolicy(RekeyPolicy): + """Rekey policy that combines multiple policies.""" + + def __init__(self, policies: list[RekeyPolicy]): + """ + Initialize with a list of rekey policies. + + Args: + policies: List of rekey policies to combine + + """ + self.policies = policies + + def should_rekey(self, bytes_processed: int, time_elapsed: float) -> bool: + """ + Check if rekey should be performed based on any policy. + + Args: + bytes_processed: Number of bytes processed since last rekey + time_elapsed: Time elapsed since last rekey in seconds + + Returns: + bool: True if any policy indicates rekey should be performed + + """ + return any( + policy.should_rekey(bytes_processed, time_elapsed) + for policy in self.policies + ) + + +class RekeyManager: + """Manager for rekey operations in Noise sessions.""" + + def __init__(self, policy: Optional[RekeyPolicy] = None): + """ + Initialize with an optional rekey policy. + + Args: + policy: Rekey policy to use + + """ + self.policy = policy or CompositeRekeyPolicy([ + TimeBasedRekeyPolicy(max_time_seconds=3600), # 1 hour + ByteCountRekeyPolicy(max_bytes=1024 * 1024 * 1024), # 1GB + ]) + + self._last_rekey_time = time.time() + self._bytes_since_rekey = 0 + self._rekey_count = 0 + + def update_bytes_processed(self, bytes_count: int) -> None: + """ + Update the count of bytes processed since last rekey. + + Args: + bytes_count: Number of bytes processed + + """ + self._bytes_since_rekey += bytes_count + + def should_rekey(self) -> bool: + """ + Check if a rekey should be performed. + + Returns: + bool: True if rekey should be performed + + """ + time_elapsed = time.time() - self._last_rekey_time + return self.policy.should_rekey(self._bytes_since_rekey, time_elapsed) + + def perform_rekey(self) -> None: + """Mark that a rekey has been performed.""" + self._last_rekey_time = time.time() + self._bytes_since_rekey = 0 + self._rekey_count += 1 + + def get_stats(self) -> dict: + """ + Get rekey statistics. + + Returns: + dict: Statistics about rekey operations + + """ + time_elapsed = time.time() - self._last_rekey_time + return { + "rekey_count": self._rekey_count, + "bytes_since_rekey": self._bytes_since_rekey, + "time_since_rekey": time_elapsed, + "last_rekey_time": self._last_rekey_time, + } + + def reset_stats(self) -> None: + """Reset rekey statistics.""" + self._last_rekey_time = time.time() + self._bytes_since_rekey = 0 + self._rekey_count = 0 + + +class RekeyableSession(ABC): + """Abstract base class for sessions that support rekeying.""" + + @abstractmethod + async def rekey(self) -> None: + """ + Perform a rekey operation. + + Raises: + Exception: If rekey operation fails + + """ + pass + + +class RekeyHandler: + """Handler for managing rekey operations in Noise sessions.""" + + def __init__( + self, session: RekeyableSession, rekey_manager: Optional[RekeyManager] = None + ): + """ + Initialize with a rekeyable session and optional rekey manager. + + Args: + session: Session that supports rekeying + rekey_manager: Rekey manager to use + + """ + self.session = session + self.rekey_manager = rekey_manager or RekeyManager() + + async def check_and_rekey(self, bytes_processed: int = 0) -> bool: + """ + Check if rekey is needed and perform it if necessary. + + Args: + bytes_processed: Number of bytes processed in this operation + + Returns: + bool: True if rekey was performed + + Raises: + Exception: If rekey operation fails + + """ + if bytes_processed > 0: + self.rekey_manager.update_bytes_processed(bytes_processed) + + if self.rekey_manager.should_rekey(): + await self.session.rekey() + self.rekey_manager.perform_rekey() + return True + + return False + + def get_rekey_stats(self) -> dict: + """ + Get rekey statistics. + + Returns: + dict: Statistics about rekey operations + + """ + return self.rekey_manager.get_stats() + + def set_rekey_policy(self, policy: RekeyPolicy) -> None: + """ + Set a new rekey policy. + + Args: + policy: Rekey policy to use + + """ + self.rekey_manager.policy = policy diff --git a/libp2p/security/noise/transport.py b/libp2p/security/noise/transport.py index b26e06447..01dfc8e08 100644 --- a/libp2p/security/noise/transport.py +++ b/libp2p/security/noise/transport.py @@ -6,6 +6,7 @@ from libp2p.crypto.keys import ( KeyPair, PrivateKey, + PublicKey, ) from libp2p.custom_types import ( TProtocol, @@ -14,20 +15,30 @@ ID, ) +from .early_data import EarlyDataHandler, EarlyDataManager from .patterns import ( IPattern, PatternXX, ) +from .patterns_ik import PatternIK +from .rekey import RekeyManager, RekeyPolicy +from .webtransport import WebTransportSupport PROTOCOL_ID = TProtocol("/noise") class Transport(ISecureTransport): + """Enhanced Noise transport with advanced features support.""" + libp2p_privkey: PrivateKey noise_privkey: PrivateKey local_peer: ID early_data: bytes | None with_noise_pipes: bool + webtransport_support: WebTransportSupport + early_data_manager: EarlyDataManager + rekey_manager: RekeyManager + _static_key_cache: dict[ID, PublicKey] # Cache for IK pattern def __init__( self, @@ -35,31 +46,140 @@ def __init__( noise_privkey: PrivateKey, early_data: bytes | None = None, with_noise_pipes: bool = False, + early_data_handler: EarlyDataHandler | None = None, + rekey_policy: RekeyPolicy | None = None, ) -> None: + """ + Initialize enhanced Noise transport. + + Args: + libp2p_keypair: libp2p key pair + noise_privkey: Noise private key + early_data: Optional early data + with_noise_pipes: Enable noise pipes support + early_data_handler: Optional early data handler + rekey_policy: Optional rekey policy + + """ self.libp2p_privkey = libp2p_keypair.private_key self.noise_privkey = noise_privkey self.local_peer = ID.from_pubkey(libp2p_keypair.public_key) self.early_data = early_data self.with_noise_pipes = with_noise_pipes - if self.with_noise_pipes: - raise NotImplementedError + # Initialize advanced features + self.webtransport_support = WebTransportSupport() + self.early_data_manager = EarlyDataManager(early_data_handler) + self.rekey_manager = RekeyManager(rekey_policy) + self._static_key_cache = {} - def get_pattern(self) -> IPattern: if self.with_noise_pipes: - raise NotImplementedError - else: - return PatternXX( - self.local_peer, - self.libp2p_privkey, - self.noise_privkey, - self.early_data, - ) + # Noise pipes are now supported with IK pattern + pass + + def get_pattern(self, remote_peer: ID | None = None) -> IPattern: + """ + Get the appropriate handshake pattern for the connection. + + Args: + remote_peer: Remote peer ID (used for IK pattern selection) + + Returns: + IPattern: The handshake pattern to use + + """ + if self.with_noise_pipes and remote_peer is not None: + # Check if we have a cached static key for IK pattern + if remote_peer in self._static_key_cache: + remote_static_key = self._static_key_cache[remote_peer] + return PatternIK( + self.local_peer, + self.libp2p_privkey, + self.noise_privkey, + self.early_data, + remote_peer, + remote_static_key, + ) + + # Default to XX pattern + return PatternXX( + self.local_peer, + self.libp2p_privkey, + self.noise_privkey, + self.early_data, + ) + + def cache_static_key(self, peer_id: ID, static_key: PublicKey) -> None: + """ + Cache a static key for IK pattern optimization. + + Args: + peer_id: Peer ID + static_key: Static public key + + """ + self._static_key_cache[peer_id] = static_key + + def get_cached_static_key(self, peer_id: ID) -> PublicKey | None: + """ + Get a cached static key for a peer. + + Args: + peer_id: Peer ID + + Returns: + Optional[PublicKey]: Cached static key if available + + """ + return self._static_key_cache.get(peer_id) + + def clear_static_key_cache(self) -> None: + """Clear the static key cache.""" + self._static_key_cache.clear() async def secure_inbound(self, conn: IRawConnection) -> ISecureConn: + """ + Perform inbound secure connection. + + Args: + conn: Raw connection + + Returns: + ISecureConn: Secure connection + + """ pattern = self.get_pattern() - return await pattern.handshake_inbound(conn) + secure_conn = await pattern.handshake_inbound(conn) + + # Handle early data if present + if hasattr(pattern, "early_data") and pattern.early_data is not None: + await self.early_data_manager.handle_early_data(pattern.early_data) + + return secure_conn async def secure_outbound(self, conn: IRawConnection, peer_id: ID) -> ISecureConn: - pattern = self.get_pattern() - return await pattern.handshake_outbound(conn, peer_id) + """ + Perform outbound secure connection. + + Args: + conn: Raw connection + peer_id: Remote peer ID + + Returns: + ISecureConn: Secure connection + + """ + pattern = self.get_pattern(peer_id) + secure_conn = await pattern.handshake_outbound(conn, peer_id) + + # Handle early data if present + if hasattr(pattern, "early_data") and pattern.early_data is not None: + await self.early_data_manager.handle_early_data(pattern.early_data) + + # Cache static key if we learned it during handshake + if isinstance(pattern, PatternXX) and hasattr( + secure_conn, "remote_permanent_pubkey" + ): + self.cache_static_key(peer_id, secure_conn.remote_permanent_pubkey) + + return secure_conn diff --git a/libp2p/security/noise/webtransport.py b/libp2p/security/noise/webtransport.py new file mode 100644 index 000000000..178cdd693 --- /dev/null +++ b/libp2p/security/noise/webtransport.py @@ -0,0 +1,151 @@ +"""WebTransport integration for Noise protocol extensions.""" + +import hashlib +from typing import Protocol, runtime_checkable + + +@runtime_checkable +class WebTransportCertificate(Protocol): + """Protocol for WebTransport certificate handling.""" + + def get_certificate_hash(self) -> bytes: + """ + Get the SHA-256 hash of the certificate. + + Returns: + bytes: The SHA-256 hash of the certificate + + """ + ... + + +class WebTransportCertManager: + """Manager for WebTransport certificate hashes in Noise extensions.""" + + def __init__(self): + self._cert_hashes: list[bytes] = [] + + def add_certificate(self, cert_data: bytes) -> bytes: + """ + Add a certificate and return its hash. + + Args: + cert_data: The certificate data + + Returns: + bytes: The SHA-256 hash of the certificate + + """ + cert_hash = hashlib.sha256(cert_data).digest() + if cert_hash not in self._cert_hashes: + self._cert_hashes.append(cert_hash) + return cert_hash + + def add_certificate_hash(self, cert_hash: bytes) -> None: + """ + Add a certificate hash directly. + + Args: + cert_hash: The SHA-256 hash of the certificate + + """ + if cert_hash not in self._cert_hashes: + self._cert_hashes.append(cert_hash) + + def get_certificate_hashes(self) -> list[bytes]: + """ + Get all certificate hashes. + + Returns: + list[bytes]: List of certificate hashes + + """ + return self._cert_hashes.copy() + + def has_certificate_hash(self, cert_hash: bytes) -> bool: + """ + Check if a certificate hash is present. + + Args: + cert_hash: The certificate hash to check + + Returns: + bool: True if the certificate hash is present + + """ + return cert_hash in self._cert_hashes + + def clear_certificates(self) -> None: + """Clear all certificate hashes.""" + self._cert_hashes.clear() + + def __len__(self) -> int: + """Get the number of certificate hashes.""" + return len(self._cert_hashes) + + def __bool__(self) -> bool: + """Check if there are any certificate hashes.""" + return bool(self._cert_hashes) + + +class WebTransportSupport: + """Support for WebTransport integration in Noise protocol.""" + + def __init__(self): + self._cert_manager = WebTransportCertManager() + + @property + def cert_manager(self) -> WebTransportCertManager: + """Get the certificate manager.""" + return self._cert_manager + + def add_certificate(self, cert_data: bytes) -> bytes: + """ + Add a WebTransport certificate. + + Args: + cert_data: The certificate data + + Returns: + bytes: The SHA-256 hash of the certificate + + """ + return self._cert_manager.add_certificate(cert_data) + + def get_certificate_hashes(self) -> list[bytes]: + """ + Get all certificate hashes for extensions. + + Returns: + list[bytes]: List of certificate hashes + + """ + return self._cert_manager.get_certificate_hashes() + + def has_certificates(self) -> bool: + """ + Check if any certificates are configured. + + Returns: + bool: True if certificates are configured + + """ + return bool(self._cert_manager) + + def validate_certificate_hash(self, cert_hash: bytes) -> bool: + """ + Validate a certificate hash. + + Args: + cert_hash: The certificate hash to validate + + Returns: + bool: True if the certificate hash is valid + + """ + # Basic validation: check if it's a valid SHA-256 hash (32 bytes) + if len(cert_hash) != 32: + return False + + # Check if it's a known certificate hash + return self._cert_manager.has_certificate_hash(cert_hash) diff --git a/libp2p/security/secio/pb/spipe_pb2.py b/libp2p/security/secio/pb/spipe_pb2.py index 01de258b1..a7ccf305c 100644 --- a/libp2p/security/secio/pb/spipe_pb2.py +++ b/libp2p/security/secio/pb/spipe_pb2.py @@ -1,11 +1,22 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE # source: libp2p/security/secio/pb/spipe.proto +# Protobuf Python Version: 5.29.3 """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'libp2p/security/secio/pb/spipe.proto' +) # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,13 +26,13 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$libp2p/security/secio/pb/spipe.proto\x12\x08spipe.pb\"_\n\x07Propose\x12\x0c\n\x04rand\x18\x01 \x01(\x0c\x12\x12\n\npublic_key\x18\x02 \x01(\x0c\x12\x11\n\texchanges\x18\x03 \x01(\t\x12\x0f\n\x07\x63iphers\x18\x04 \x01(\t\x12\x0e\n\x06hashes\x18\x05 \x01(\t\";\n\x08\x45xchange\x12\x1c\n\x14\x65phemeral_public_key\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c') -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.security.secio.pb.spipe_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _PROPOSE._serialized_start=50 - _PROPOSE._serialized_end=145 - _EXCHANGE._serialized_start=147 - _EXCHANGE._serialized_end=206 +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'libp2p.security.secio.pb.spipe_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_PROPOSE']._serialized_start=50 + _globals['_PROPOSE']._serialized_end=145 + _globals['_EXCHANGE']._serialized_start=147 + _globals['_EXCHANGE']._serialized_end=206 # @@protoc_insertion_point(module_scope) diff --git a/pyproject.toml b/pyproject.toml index ab4824abf..a8ab3aa50 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ "multiaddr @ git+https://github.com/multiformats/py-multiaddr.git@3ea7f866fda9268ee92506edf9d8e975274bf941", "mypy-protobuf>=3.0.0", "noiseprotocol>=0.3.0", - "protobuf>=4.25.0,<5.0.0", + "protobuf>=4.25.0,<7.0.0", "pycryptodome>=3.9.2", "pymultihash>=0.8.2", "pynacl>=1.3.0", diff --git a/tests/core/security/noise/test_early_data_extensions.py b/tests/core/security/noise/test_early_data_extensions.py new file mode 100644 index 000000000..a770d8b92 --- /dev/null +++ b/tests/core/security/noise/test_early_data_extensions.py @@ -0,0 +1,336 @@ +"""Tests for early data support through noise extensions (Phase 2).""" + +import pytest + +from libp2p.crypto.secp256k1 import create_new_key_pair +from libp2p.peer.id import ID +from libp2p.security.noise.messages import NoiseExtensions, NoiseHandshakePayload +from libp2p.security.noise.patterns import PatternXX + + +class TestNoiseExtensionsEarlyData: + """Test NoiseExtensions with early data support.""" + + def test_extensions_with_early_data(self): + """Test creating extensions with early data.""" + early_data = b"test_early_data" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions( + webtransport_certhashes=certhashes, + early_data=early_data + ) + + assert ext.early_data == early_data + assert ext.webtransport_certhashes == certhashes + + def test_extensions_without_early_data(self): + """Test creating extensions without early data.""" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions(webtransport_certhashes=certhashes) + + assert ext.early_data is None + assert ext.webtransport_certhashes == certhashes + + def test_extensions_protobuf_conversion_with_early_data(self): + """Test protobuf conversion with early data.""" + early_data = b"test_early_data" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions( + webtransport_certhashes=certhashes, + early_data=early_data + ) + + # Convert to protobuf and back + pb_ext = ext.to_protobuf() + ext_roundtrip = NoiseExtensions.from_protobuf(pb_ext) + + assert ext_roundtrip.early_data == early_data + assert ext_roundtrip.webtransport_certhashes == certhashes + + def test_extensions_protobuf_conversion_without_early_data(self): + """Test protobuf conversion without early data.""" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions(webtransport_certhashes=certhashes) + + # Convert to protobuf and back + pb_ext = ext.to_protobuf() + ext_roundtrip = NoiseExtensions.from_protobuf(pb_ext) + + assert ext_roundtrip.early_data is None + assert ext_roundtrip.webtransport_certhashes == certhashes + + +class TestNoiseHandshakePayloadEarlyData: + """Test NoiseHandshakePayload with early data through extensions.""" + + @pytest.fixture + def key_pair(self): + """Create a test key pair.""" + return create_new_key_pair() + + def test_handshake_payload_with_early_data_in_extensions(self, key_pair): + """Test handshake payload with early data in extensions.""" + early_data = b"test_early_data" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions( + webtransport_certhashes=certhashes, + early_data=early_data + ) + + payload = NoiseHandshakePayload( + id_pubkey=key_pair.public_key, + id_sig=b"test_sig", + early_data=None, # No legacy early data + extensions=ext + ) + + # Serialize and deserialize + serialized = payload.serialize() + deserialized = NoiseHandshakePayload.deserialize(serialized) + + # Early data should come from extensions + assert deserialized.extensions is not None + assert deserialized.extensions.early_data == early_data + assert deserialized.extensions.webtransport_certhashes == certhashes + # Legacy early data should also be set for backward compatibility + assert deserialized.early_data == early_data + + def test_handshake_payload_with_legacy_early_data(self, key_pair): + """Test handshake payload with legacy early data (backward compatibility).""" + early_data = b"legacy_early_data" + + payload = NoiseHandshakePayload( + id_pubkey=key_pair.public_key, + id_sig=b"test_sig", + early_data=early_data, + extensions=None + ) + + # Serialize and deserialize + serialized = payload.serialize() + deserialized = NoiseHandshakePayload.deserialize(serialized) + + # Should preserve legacy early data + assert deserialized.early_data == early_data + assert deserialized.extensions is None + + def test_handshake_payload_with_extensions_and_legacy_early_data(self, key_pair): + """Test handshake payload with both extensions and legacy early data.""" + legacy_early_data = b"legacy_early_data" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions(webtransport_certhashes=certhashes) + + payload = NoiseHandshakePayload( + id_pubkey=key_pair.public_key, + id_sig=b"test_sig", + early_data=legacy_early_data, + extensions=ext + ) + + # Serialize and deserialize + serialized = payload.serialize() + deserialized = NoiseHandshakePayload.deserialize(serialized) + + # Should preserve both + assert deserialized.early_data == legacy_early_data + assert deserialized.extensions is not None + assert deserialized.extensions.webtransport_certhashes == certhashes + assert deserialized.extensions.early_data is None + + def test_handshake_payload_early_data_priority(self, key_pair): + """Test that early data in extensions takes priority over legacy early data.""" + legacy_early_data = b"legacy_early_data" + extension_early_data = b"extension_early_data" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions( + webtransport_certhashes=certhashes, + early_data=extension_early_data + ) + + payload = NoiseHandshakePayload( + id_pubkey=key_pair.public_key, + id_sig=b"test_sig", + early_data=legacy_early_data, # This should be ignored + extensions=ext + ) + + # Serialize and deserialize + serialized = payload.serialize() + deserialized = NoiseHandshakePayload.deserialize(serialized) + + # Extension early data should take priority + assert deserialized.extensions is not None + assert deserialized.extensions.early_data == extension_early_data + assert deserialized.early_data == extension_early_data + + +class TestPatternEarlyDataIntegration: + """Test pattern integration with early data through extensions.""" + + @pytest.fixture + def pattern_setup(self): + """Set up pattern for testing.""" + libp2p_keypair = create_new_key_pair() + noise_keypair = create_new_key_pair() + local_peer = ID.from_pubkey(libp2p_keypair.public_key) + + pattern = PatternXX( + local_peer, + libp2p_keypair.private_key, + noise_keypair.private_key, + early_data=b"pattern_early_data" + ) + + return pattern, libp2p_keypair, noise_keypair + + def test_pattern_with_extensions_and_early_data(self, pattern_setup): + """Test pattern with extensions and early data.""" + pattern, libp2p_keypair, noise_keypair = pattern_setup + + certhashes = [b"cert1", b"cert2"] + ext = NoiseExtensions(webtransport_certhashes=certhashes) + + payload = pattern.make_handshake_payload(extensions=ext) + + # Early data should be in extensions + assert payload.extensions is not None + assert payload.extensions.early_data == b"pattern_early_data" + assert payload.extensions.webtransport_certhashes == certhashes + # Legacy early data should be None + assert payload.early_data is None + + def test_pattern_with_extensions_without_early_data(self, pattern_setup): + """Test pattern with extensions but no early data.""" + pattern, libp2p_keypair, noise_keypair = pattern_setup + + # Create pattern without early data + libp2p_keypair2 = create_new_key_pair() + noise_keypair2 = create_new_key_pair() + local_peer2 = ID.from_pubkey(libp2p_keypair2.public_key) + + pattern_no_early = PatternXX( + local_peer2, + libp2p_keypair2.private_key, + noise_keypair2.private_key, + early_data=None + ) + + certhashes = [b"cert1", b"cert2"] + ext = NoiseExtensions(webtransport_certhashes=certhashes) + + payload = pattern_no_early.make_handshake_payload(extensions=ext) + + # Should have extensions but no early data + assert payload.extensions is not None + assert payload.extensions.early_data is None + assert payload.extensions.webtransport_certhashes == certhashes + assert payload.early_data is None + + def test_pattern_without_extensions_legacy_early_data(self, pattern_setup): + """Test pattern without extensions (legacy behavior).""" + pattern, libp2p_keypair, noise_keypair = pattern_setup + + payload = pattern.make_handshake_payload() + + # Should use legacy early data + assert payload.early_data == b"pattern_early_data" + assert payload.extensions is None + + def test_pattern_early_data_roundtrip(self, pattern_setup): + """Test pattern early data roundtrip through serialization.""" + pattern, libp2p_keypair, noise_keypair = pattern_setup + + certhashes = [b"cert1", b"cert2"] + ext = NoiseExtensions(webtransport_certhashes=certhashes) + + payload = pattern.make_handshake_payload(extensions=ext) + + # Serialize and deserialize + serialized = payload.serialize() + deserialized = NoiseHandshakePayload.deserialize(serialized) + + # Early data should be preserved + assert deserialized.extensions is not None + assert deserialized.extensions.early_data == b"pattern_early_data" + assert deserialized.extensions.webtransport_certhashes == certhashes + assert deserialized.early_data == b"pattern_early_data" + + +class TestBackwardCompatibility: + """Test backward compatibility with existing implementations.""" + + @pytest.fixture + def key_pair(self): + """Create a test key pair.""" + return create_new_key_pair() + + def test_legacy_handshake_payload_compatibility(self, key_pair): + """Test that legacy handshake payloads still work.""" + early_data = b"legacy_early_data" + + # Create payload the old way + payload = NoiseHandshakePayload( + id_pubkey=key_pair.public_key, + id_sig=b"test_sig", + early_data=early_data + ) + + # Serialize and deserialize + serialized = payload.serialize() + deserialized = NoiseHandshakePayload.deserialize(serialized) + + # Should work exactly as before + assert deserialized.early_data == early_data + assert deserialized.extensions is None + + def test_legacy_pattern_compatibility(self, key_pair): + """Test that legacy pattern usage still works.""" + libp2p_keypair = create_new_key_pair() + noise_keypair = create_new_key_pair() + local_peer = ID.from_pubkey(libp2p_keypair.public_key) + + pattern = PatternXX( + local_peer, + libp2p_keypair.private_key, + noise_keypair.private_key, + early_data=b"legacy_early_data" + ) + + # Create payload without extensions (legacy way) + payload = pattern.make_handshake_payload() + + # Should work exactly as before + assert payload.early_data == b"legacy_early_data" + assert payload.extensions is None + + def test_mixed_usage_compatibility(self, key_pair): + """Test mixed usage of legacy and new features.""" + # Test that we can mix legacy early data with new extensions + early_data = b"legacy_early_data" + certhashes = [b"cert1", b"cert2"] + + ext = NoiseExtensions(webtransport_certhashes=certhashes) + + payload = NoiseHandshakePayload( + id_pubkey=key_pair.public_key, + id_sig=b"test_sig", + early_data=early_data, + extensions=ext + ) + + # Serialize and deserialize + serialized = payload.serialize() + deserialized = NoiseHandshakePayload.deserialize(serialized) + + # Both should be preserved + assert deserialized.early_data == early_data + assert deserialized.extensions is not None + assert deserialized.extensions.webtransport_certhashes == certhashes + assert deserialized.extensions.early_data is None diff --git a/tests/core/security/noise/test_phase_advanced_features.p b/tests/core/security/noise/test_phase_advanced_features.p new file mode 100644 index 000000000..ac1797dec --- /dev/null +++ b/tests/core/security/noise/test_phase_advanced_features.p @@ -0,0 +1,443 @@ +"""Tests for Phase 4: Advanced Features Integration.""" + + +import pytest + +from libp2p.crypto.secp256k1 import create_new_key_pair +from libp2p.peer.id import ID +from libp2p.security.noise.early_data import ( + BufferingEarlyDataHandler, + CallbackEarlyDataHandler, + CompositeEarlyDataHandler, + EarlyDataManager, + LoggingEarlyDataHandler, +) +from libp2p.security.noise.rekey import ( + ByteCountRekeyPolicy, + CompositeRekeyPolicy, + RekeyManager, + TimeBasedRekeyPolicy, +) +from libp2p.security.noise.transport import Transport +from libp2p.security.noise.webtransport import ( + WebTransportCertManager, + WebTransportSupport, +) + + +class TestWebTransportSupport: + """Test WebTransport integration features.""" + + def test_webtransport_support_creation(self): + """Test WebTransport support creation.""" + wt_support = WebTransportSupport() + assert wt_support is not None + assert wt_support.cert_manager is not None + + def test_certificate_management(self): + """Test certificate management functionality.""" + wt_support = WebTransportSupport() + + # Test adding certificates + cert1_hash = wt_support.add_certificate(b"test_certificate_1") + cert2_hash = wt_support.add_certificate(b"test_certificate_2") + + assert len(cert1_hash) == 32 # SHA-256 hash length + assert len(cert2_hash) == 32 + assert cert1_hash != cert2_hash + + # Test getting certificate hashes + hashes = wt_support.get_certificate_hashes() + assert len(hashes) == 2 + assert cert1_hash in hashes + assert cert2_hash in hashes + + # Test has_certificates + assert wt_support.has_certificates() is True + + def test_certificate_validation(self): + """Test certificate validation functionality.""" + wt_support = WebTransportSupport() + + # Add a certificate + cert_hash = wt_support.add_certificate(b"test_certificate") + + # Test validation + assert wt_support.validate_certificate_hash(cert_hash) is True + + # Test invalid hash + invalid_hash = b"invalid_hash" + b"0" * 20 # 32 bytes but not a valid hash + assert wt_support.validate_certificate_hash(invalid_hash) is False + + # Test wrong length hash + short_hash = b"short" + assert wt_support.validate_certificate_hash(short_hash) is False + + def test_cert_manager_functionality(self): + """Test WebTransportCertManager functionality.""" + cert_manager = WebTransportCertManager() + + # Test empty state + assert len(cert_manager) == 0 + assert bool(cert_manager) is False + + # Test adding certificates + cert1_hash = cert_manager.add_certificate(b"cert1") + cert2_hash = cert_manager.add_certificate(b"cert2") + + assert len(cert_manager) == 2 + assert bool(cert_manager) is True + + # Test getting hashes + hashes = cert_manager.get_certificate_hashes() + assert len(hashes) == 2 + + # Test checking specific hash + assert cert_manager.has_certificate_hash(cert1_hash) is True + assert cert_manager.has_certificate_hash(cert2_hash) is True + + # Test adding duplicate (should not add again) + cert1_hash_dup = cert_manager.add_certificate(b"cert1") + assert cert1_hash == cert1_hash_dup + assert len(cert_manager) == 2 + + # Test clearing + cert_manager.clear_certificates() + assert len(cert_manager) == 0 + assert bool(cert_manager) is False + + +class TestEarlyDataHandlers: + """Test early data handler functionality.""" + + def test_logging_early_data_handler(self): + """Test LoggingEarlyDataHandler.""" + handler = LoggingEarlyDataHandler("test_logger") + assert handler.logger_name == "test_logger" + + @pytest.mark.trio + async def test_buffering_early_data_handler(self): + """Test BufferingEarlyDataHandler.""" + handler = BufferingEarlyDataHandler(max_buffer_size=1024) + + # Test empty state + assert len(handler) == 0 + assert handler.size == 0 + + # Test adding data + await handler.handle_early_data(b"test_data_1") + await handler.handle_early_data(b"test_data_2") + + assert len(handler) == 2 + assert handler.size == len(b"test_data_1") + len(b"test_data_2") + + # Test getting buffered data + buffered_data = handler.get_buffered_data() + assert buffered_data == b"test_data_1test_data_2" + + # Test clearing buffer + handler.clear_buffer() + assert len(handler) == 0 + assert handler.size == 0 + + @pytest.mark.trio + async def test_callback_early_data_handler(self): + """Test CallbackEarlyDataHandler.""" + callback_data = [] + + def sync_callback(data): + callback_data.append(data) + + handler = CallbackEarlyDataHandler(sync_callback) + + # Test sync callback + await handler.handle_early_data(b"sync_data") + assert callback_data == [b"sync_data"] + + # Test async callback + async def async_callback(data): + callback_data.append(data) + + handler = CallbackEarlyDataHandler(async_callback) + await handler.handle_early_data(b"async_data") + assert callback_data == [b"sync_data", b"async_data"] + + @pytest.mark.trio + async def test_composite_early_data_handler(self): + """Test CompositeEarlyDataHandler.""" + handler1 = BufferingEarlyDataHandler() + handler2 = BufferingEarlyDataHandler() + + composite = CompositeEarlyDataHandler([handler1, handler2]) + + # Test handling data + await composite.handle_early_data(b"composite_data") + + assert handler1.get_buffered_data() == b"composite_data" + assert handler2.get_buffered_data() == b"composite_data" + + # Test adding handler + handler3 = BufferingEarlyDataHandler() + composite.add_handler(handler3) + assert len(composite.handlers) == 3 + + # Test removing handler + composite.remove_handler(handler3) + assert len(composite.handlers) == 2 + + @pytest.mark.trio + async def test_early_data_manager(self): + """Test EarlyDataManager.""" + handler = BufferingEarlyDataHandler() + manager = EarlyDataManager(handler) + + # Test initial state + assert manager.has_early_data() is False + assert manager.get_early_data() is None + + # Test handling early data + await manager.handle_early_data(b"manager_data") + + assert manager.has_early_data() is True + assert manager.get_early_data() == b"manager_data" + + # Test clearing early data + manager.clear_early_data() + assert manager.has_early_data() is False + assert manager.get_early_data() is None + + # Test setting new handler + new_handler = BufferingEarlyDataHandler() + manager.set_handler(new_handler) + assert manager.handler == new_handler + + +class TestRekeySupport: + """Test rekey functionality.""" + + def test_time_based_rekey_policy(self): + """Test TimeBasedRekeyPolicy.""" + policy = TimeBasedRekeyPolicy(max_time_seconds=1) + + # Test immediately after creation + assert policy.should_rekey(0, 0) is False + + # Test after time limit + assert policy.should_rekey(0, 1.1) is True + + # Test with bytes (should be ignored) + assert policy.should_rekey(1000000, 0.5) is False + + def test_byte_count_rekey_policy(self): + """Test ByteCountRekeyPolicy.""" + policy = ByteCountRekeyPolicy(max_bytes=1000) + + # Test immediately after creation + assert policy.should_rekey(0, 0) is False + + # Test after byte limit + assert policy.should_rekey(1001, 0) is True + + # Test with time (should be ignored) + assert policy.should_rekey(500, 3600) is False + + def test_composite_rekey_policy(self): + """Test CompositeRekeyPolicy.""" + time_policy = TimeBasedRekeyPolicy(max_time_seconds=1) + byte_policy = ByteCountRekeyPolicy(max_bytes=1000) + + composite = CompositeRekeyPolicy([time_policy, byte_policy]) + + # Test neither condition met + assert composite.should_rekey(500, 0.5) is False + + # Test time condition met + assert composite.should_rekey(500, 1.1) is True + + # Test byte condition met + assert composite.should_rekey(1001, 0.5) is True + + def test_rekey_manager(self): + """Test RekeyManager functionality.""" + policy = TimeBasedRekeyPolicy(max_time_seconds=1) + manager = RekeyManager(policy) + + # Test initial state + assert manager.should_rekey() is False + stats = manager.get_stats() + assert stats["rekey_count"] == 0 + assert stats["bytes_since_rekey"] == 0 + + # Test updating bytes + manager.update_bytes_processed(500) + stats = manager.get_stats() + assert stats["bytes_since_rekey"] == 500 + + # Test performing rekey + manager.perform_rekey() + stats = manager.get_stats() + assert stats["rekey_count"] == 1 + assert stats["bytes_since_rekey"] == 0 + + # Test reset stats + manager.reset_stats() + stats = manager.get_stats() + assert stats["rekey_count"] == 0 + + +class TestEnhancedTransport: + """Test enhanced transport with advanced features.""" + + @pytest.fixture + def key_pairs(self): + """Create test key pairs.""" + libp2p_keypair = create_new_key_pair() + noise_keypair = create_new_key_pair() + return libp2p_keypair, noise_keypair + + def test_enhanced_transport_creation(self, key_pairs): + """Test enhanced transport creation with all features.""" + libp2p_keypair, noise_keypair = key_pairs + + transport = Transport( + libp2p_keypair=libp2p_keypair, + noise_privkey=noise_keypair.private_key, + early_data=b"test_early_data", + with_noise_pipes=True, + early_data_handler=BufferingEarlyDataHandler(), + rekey_policy=TimeBasedRekeyPolicy(max_time_seconds=3600), + ) + + # Test all features are available + assert transport.webtransport_support is not None + assert transport.early_data_manager is not None + assert transport.rekey_manager is not None + assert hasattr(transport, "_static_key_cache") + assert transport.with_noise_pipes is True + assert transport.early_data == b"test_early_data" + + def test_pattern_selection(self, key_pairs): + """Test pattern selection logic.""" + libp2p_keypair, noise_keypair = key_pairs + + transport = Transport( + libp2p_keypair=libp2p_keypair, + noise_privkey=noise_keypair.private_key, + with_noise_pipes=True, + ) + + # Test default pattern (XX) + pattern = transport.get_pattern() + assert pattern.__class__.__name__ == "PatternXX" + + # Test pattern with remote peer (should still be XX if no cached key) + remote_peer = ID.from_pubkey(libp2p_keypair.public_key) + pattern = transport.get_pattern(remote_peer) + assert pattern.__class__.__name__ == "PatternXX" + + def test_static_key_caching(self, key_pairs): + """Test static key caching for IK pattern optimization.""" + libp2p_keypair, noise_keypair = key_pairs + + transport = Transport( + libp2p_keypair=libp2p_keypair, + noise_privkey=noise_keypair.private_key, + with_noise_pipes=True, + ) + + # Test caching static key + remote_peer = ID.from_pubkey(libp2p_keypair.public_key) + remote_static_key = noise_keypair.public_key + + transport.cache_static_key(remote_peer, remote_static_key) + + # Test retrieving cached key + cached_key = transport.get_cached_static_key(remote_peer) + assert cached_key == remote_static_key + + # Test clearing cache + transport.clear_static_key_cache() + cached_key = transport.get_cached_static_key(remote_peer) + assert cached_key is None + + def test_webtransport_integration(self, key_pairs): + """Test WebTransport integration in transport.""" + libp2p_keypair, noise_keypair = key_pairs + + transport = Transport( + libp2p_keypair=libp2p_keypair, + noise_privkey=noise_keypair.private_key, + ) + + # Test WebTransport functionality + wt_support = transport.webtransport_support + cert_hash = wt_support.add_certificate(b"webtransport_cert") + + assert wt_support.has_certificates() is True + assert wt_support.validate_certificate_hash(cert_hash) is True + + @pytest.mark.trio + async def test_early_data_integration(self, key_pairs): + """Test early data integration in transport.""" + libp2p_keypair, noise_keypair = key_pairs + + handler = BufferingEarlyDataHandler() + transport = Transport( + libp2p_keypair=libp2p_keypair, + noise_privkey=noise_keypair.private_key, + early_data_handler=handler, + ) + + # Test early data manager + ed_manager = transport.early_data_manager + assert ed_manager.handler == handler + + # Test handling early data + await ed_manager.handle_early_data(b"transport_early_data") + assert ed_manager.has_early_data() is True + assert ed_manager.get_early_data() == b"transport_early_data" + + def test_rekey_integration(self, key_pairs): + """Test rekey integration in transport.""" + libp2p_keypair, noise_keypair = key_pairs + + policy = TimeBasedRekeyPolicy(max_time_seconds=3600) + transport = Transport( + libp2p_keypair=libp2p_keypair, + noise_privkey=noise_keypair.private_key, + rekey_policy=policy, + ) + + # Test rekey manager + rekey_manager = transport.rekey_manager + assert rekey_manager.policy == policy + + # Test updating bytes processed + rekey_manager.update_bytes_processed(1024) + stats = rekey_manager.get_stats() + assert stats["bytes_since_rekey"] == 1024 + + # Test rekey check + assert rekey_manager.should_rekey() is False # Time not exceeded + + def test_backward_compatibility(self, key_pairs): + """Test backward compatibility with existing transport usage.""" + libp2p_keypair, noise_keypair = key_pairs + + # Test minimal transport creation (backward compatible) + transport = Transport( + libp2p_keypair=libp2p_keypair, + noise_privkey=noise_keypair.private_key, + ) + + # Test that all required attributes exist + assert transport.libp2p_privkey is not None + assert transport.noise_privkey is not None + assert transport.local_peer is not None + assert transport.early_data is None + assert transport.with_noise_pipes is False + + # Test that advanced features are still available + assert transport.webtransport_support is not None + assert transport.early_data_manager is not None + assert transport.rekey_manager is not None