diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 68d032b57..3c9eca5d4 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -82,6 +82,7 @@ InvokeMethodResponse, QueryResponse, QueryResponseItem, + MetadataMCPServer, RegisteredComponents, StateResponse, TopicEventResponse, @@ -1723,6 +1724,9 @@ async def get_metadata(self) -> GetMetadataResponse: for i in response.registered_components ] extended_metadata = dict(response.extended_metadata.items()) + mcp_servers = [ + MetadataMCPServer(name=s.name) for s in response.mcp_servers + ] return GetMetadataResponse( application_id=response.id, @@ -1730,6 +1734,7 @@ async def get_metadata(self) -> GetMetadataResponse: registered_components=registered_components, extended_metadata=extended_metadata, headers=await call.initial_metadata(), + mcp_servers=mcp_servers, ) async def schedule_job_alpha1(self, job: Job, overwrite: bool = False) -> DaprResponse: diff --git a/dapr/clients/grpc/_response.py b/dapr/clients/grpc/_response.py index 2f966d3e8..bbe8efb78 100644 --- a/dapr/clients/grpc/_response.py +++ b/dapr/clients/grpc/_response.py @@ -956,6 +956,7 @@ def __init__( registered_components: Sequence[RegisteredComponents], extended_metadata: Dict[str, str], headers: MetadataTuple = (), + mcp_servers: Optional[Sequence['MetadataMCPServer']] = None, ): """Initializes GetMetadataResponse. @@ -968,12 +969,15 @@ def __init__( extended_metadata (Dict[str, str]): mapping of custom (extended) attributes to their respective values. headers (Tuple, optional): the headers from Dapr gRPC response. + mcp_servers (Sequence[MetadataMCPServer], optional): list of + loaded MCPServer resources. """ super().__init__(headers) self._application_id = application_id self._active_actors_count = active_actors_count self._registered_components = registered_components self._extended_metadata = extended_metadata + self._mcp_servers = mcp_servers or [] @property def application_id(self) -> str: @@ -995,6 +999,11 @@ def extended_metadata(self) -> Dict[str, str]: """Mapping of custom (extended) attributes to their respective values.""" return self._extended_metadata + @property + def mcp_servers(self) -> Sequence['MetadataMCPServer']: + """List of loaded MCPServer resources.""" + return self._mcp_servers + class RegisteredComponents(NamedTuple): """Describes a loaded Dapr component.""" @@ -1012,6 +1021,13 @@ class RegisteredComponents(NamedTuple): """Supported capabilities for this component type and version.""" +class MetadataMCPServer(NamedTuple): + """Describes a loaded Dapr MCPServer resource.""" + + name: str + """Name of the MCPServer resource.""" + + class CryptoResponse(DaprResponse, Generic[TCryptoResponse]): """An iterable of cryptography API responses.""" diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 5ac02f609..266f67065 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -73,6 +73,7 @@ InvokeMethodResponse, QueryResponse, QueryResponseItem, + MetadataMCPServer, RegisteredComponents, StateResponse, TopicEventResponse, @@ -1828,6 +1829,9 @@ def get_metadata(self) -> GetMetadataResponse: for i in response.registered_components ] extended_metadata = dict(response.extended_metadata.items()) + mcp_servers = [ + MetadataMCPServer(name=s.name) for s in response.mcp_servers + ] return GetMetadataResponse( application_id=response.id, @@ -1835,6 +1839,7 @@ def get_metadata(self) -> GetMetadataResponse: registered_components=registered_components, extended_metadata=extended_metadata, headers=call.initial_metadata(), + mcp_servers=mcp_servers, ) def set_metadata(self, attributeName: str, attributeValue: str) -> DaprResponse: diff --git a/dapr/proto/runtime/v1/metadata_pb2.py b/dapr/proto/runtime/v1/metadata_pb2.py index f19bea65d..d3eb414e6 100644 --- a/dapr/proto/runtime/v1/metadata_pb2.py +++ b/dapr/proto/runtime/v1/metadata_pb2.py @@ -24,54 +24,56 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$dapr/proto/runtime/v1/metadata.proto\x12\x15\x64\x61pr.proto.runtime.v1\"\x14\n\x12GetMetadataRequest\"\xd1\x07\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x12K\n\tscheduler\x18\x0b \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataSchedulerH\x00R\tscheduler\x88\x01\x01\x12K\n\tworkflows\x18\x0c \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataWorkflowsH\x01R\tworkflows\x88\x01\x01\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0c\n\n_schedulerB\x0c\n\n_workflows\"@\n\x11MetadataWorkflows\x12+\n\x11\x63onnected_workers\x18\x01 \x01(\x05R\x10\x63onnectedWorkers\"0\n\x11MetadataScheduler\x12\x1b\n\x13\x63onnected_addresses\x18\x01 \x03(\t\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x42q\n\nio.dapr.v1B\x12\x44\x61prMetadataProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$dapr/proto/runtime/v1/metadata.proto\x12\x15\x64\x61pr.proto.runtime.v1\"\x14\n\x12GetMetadataRequest\"\x9c\x08\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x12K\n\tscheduler\x18\x0b \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataSchedulerH\x00R\tscheduler\x88\x01\x01\x12K\n\tworkflows\x18\x0c \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataWorkflowsH\x01R\tworkflows\x88\x01\x01\x12I\n\x0bmcp_servers\x18\r \x03(\x0b\x32(.dapr.proto.runtime.v1.MetadataMCPServerR\nmcpServers\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0c\n\n_schedulerB\x0c\n\n_workflows\"@\n\x11MetadataWorkflows\x12+\n\x11\x63onnected_workers\x18\x01 \x01(\x05R\x10\x63onnectedWorkers\"0\n\x11MetadataScheduler\x12\x1b\n\x13\x63onnected_addresses\x18\x01 \x03(\t\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\'\n\x11MetadataMCPServer\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x42i\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dapr.proto.runtime.v1.metadata_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\nio.dapr.v1B\022DaprMetadataProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002\033Dapr.Client.Autogen.Grpc.v1' + _globals['DESCRIPTOR']._serialized_options = b'\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002\033Dapr.Client.Autogen.Grpc.v1' _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._loaded_options = None _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_options = b'8\001' _globals['_GETMETADATARESPONSE'].fields_by_name['active_actors_count']._loaded_options = None _globals['_GETMETADATARESPONSE'].fields_by_name['active_actors_count']._serialized_options = b'\030\001' _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._loaded_options = None _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_options = b'8\001' - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=2707 - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=2794 + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=2823 + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=2910 _globals['_GETMETADATAREQUEST']._serialized_start=63 _globals['_GETMETADATAREQUEST']._serialized_end=83 _globals['_GETMETADATARESPONSE']._serialized_start=86 - _globals['_GETMETADATARESPONSE']._serialized_end=1063 - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_start=980 - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_end=1035 - _globals['_METADATAWORKFLOWS']._serialized_start=1065 - _globals['_METADATAWORKFLOWS']._serialized_end=1129 - _globals['_METADATASCHEDULER']._serialized_start=1131 - _globals['_METADATASCHEDULER']._serialized_end=1179 - _globals['_ACTORRUNTIME']._serialized_start=1182 - _globals['_ACTORRUNTIME']._serialized_end=1498 - _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_start=1433 - _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_end=1498 - _globals['_ACTIVEACTORSCOUNT']._serialized_start=1500 - _globals['_ACTIVEACTORSCOUNT']._serialized_end=1548 - _globals['_REGISTEREDCOMPONENTS']._serialized_start=1550 - _globals['_REGISTEREDCOMPONENTS']._serialized_end=1639 - _globals['_METADATAHTTPENDPOINT']._serialized_start=1641 - _globals['_METADATAHTTPENDPOINT']._serialized_end=1683 - _globals['_APPCONNECTIONPROPERTIES']._serialized_start=1686 - _globals['_APPCONNECTIONPROPERTIES']._serialized_end=1895 - _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_start=1898 - _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_end=2118 - _globals['_PUBSUBSUBSCRIPTION']._serialized_start=2121 - _globals['_PUBSUBSUBSCRIPTION']._serialized_end=2511 - _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_start=2464 - _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_end=2511 - _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_start=2513 - _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_end=2600 - _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_start=2602 - _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_end=2655 - _globals['_SETMETADATAREQUEST']._serialized_start=2657 - _globals['_SETMETADATAREQUEST']._serialized_end=2705 + _globals['_GETMETADATARESPONSE']._serialized_end=1138 + _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_start=1055 + _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_end=1110 + _globals['_METADATAWORKFLOWS']._serialized_start=1140 + _globals['_METADATAWORKFLOWS']._serialized_end=1204 + _globals['_METADATASCHEDULER']._serialized_start=1206 + _globals['_METADATASCHEDULER']._serialized_end=1254 + _globals['_ACTORRUNTIME']._serialized_start=1257 + _globals['_ACTORRUNTIME']._serialized_end=1573 + _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_start=1508 + _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_end=1573 + _globals['_ACTIVEACTORSCOUNT']._serialized_start=1575 + _globals['_ACTIVEACTORSCOUNT']._serialized_end=1623 + _globals['_REGISTEREDCOMPONENTS']._serialized_start=1625 + _globals['_REGISTEREDCOMPONENTS']._serialized_end=1714 + _globals['_METADATAHTTPENDPOINT']._serialized_start=1716 + _globals['_METADATAHTTPENDPOINT']._serialized_end=1758 + _globals['_METADATAMCPSERVER']._serialized_start=1760 + _globals['_METADATAMCPSERVER']._serialized_end=1799 + _globals['_APPCONNECTIONPROPERTIES']._serialized_start=1802 + _globals['_APPCONNECTIONPROPERTIES']._serialized_end=2011 + _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_start=2014 + _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_end=2234 + _globals['_PUBSUBSUBSCRIPTION']._serialized_start=2237 + _globals['_PUBSUBSUBSCRIPTION']._serialized_end=2627 + _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_start=2580 + _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_end=2627 + _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_start=2629 + _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_end=2716 + _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_start=2718 + _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_end=2771 + _globals['_SETMETADATAREQUEST']._serialized_start=2773 + _globals['_SETMETADATAREQUEST']._serialized_end=2821 # @@protoc_insertion_point(module_scope) diff --git a/dapr/proto/runtime/v1/metadata_pb2.pyi b/dapr/proto/runtime/v1/metadata_pb2.pyi index 82ab0137f..f79863821 100644 --- a/dapr/proto/runtime/v1/metadata_pb2.pyi +++ b/dapr/proto/runtime/v1/metadata_pb2.pyi @@ -112,6 +112,7 @@ class GetMetadataResponse(_message.Message): ACTOR_RUNTIME_FIELD_NUMBER: _builtins.int SCHEDULER_FIELD_NUMBER: _builtins.int WORKFLOWS_FIELD_NUMBER: _builtins.int + MCP_SERVERS_FIELD_NUMBER: _builtins.int id: _builtins.str runtime_version: _builtins.str @_builtins.property @@ -137,6 +138,8 @@ class GetMetadataResponse(_message.Message): def scheduler(self) -> Global___MetadataScheduler: ... @_builtins.property def workflows(self) -> Global___MetadataWorkflows: ... + @_builtins.property + def mcp_servers(self) -> _containers.RepeatedCompositeFieldContainer[Global___MetadataMCPServer]: ... def __init__( self, *, @@ -152,10 +155,11 @@ class GetMetadataResponse(_message.Message): actor_runtime: Global___ActorRuntime | None = ..., scheduler: Global___MetadataScheduler | None = ..., workflows: Global___MetadataWorkflows | None = ..., + mcp_servers: _abc.Iterable[Global___MetadataMCPServer] | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler", "_workflows", b"_workflows", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "scheduler", b"scheduler", "workflows", b"workflows"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler", "_workflows", b"_workflows", "active_actors_count", b"active_actors_count", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "enabled_features", b"enabled_features", "extended_metadata", b"extended_metadata", "http_endpoints", b"http_endpoints", "id", b"id", "registered_components", b"registered_components", "runtime_version", b"runtime_version", "scheduler", b"scheduler", "subscriptions", b"subscriptions", "workflows", b"workflows"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler", "_workflows", b"_workflows", "active_actors_count", b"active_actors_count", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "enabled_features", b"enabled_features", "extended_metadata", b"extended_metadata", "http_endpoints", b"http_endpoints", "id", b"id", "mcp_servers", b"mcp_servers", "registered_components", b"registered_components", "runtime_version", b"runtime_version", "scheduler", b"scheduler", "subscriptions", b"subscriptions", "workflows", b"workflows"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__scheduler: _TypeAlias = _typing.Literal["scheduler"] # noqa: Y015 _WhichOneofArgType__scheduler: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler"] # noqa: Y015 @@ -324,6 +328,22 @@ class MetadataHTTPEndpoint(_message.Message): Global___MetadataHTTPEndpoint: _TypeAlias = MetadataHTTPEndpoint # noqa: Y015 +@_typing.final +class MetadataMCPServer(_message.Message): + DESCRIPTOR: _descriptor.Descriptor + + NAME_FIELD_NUMBER: _builtins.int + name: _builtins.str + def __init__( + self, + *, + name: _builtins.str = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["name", b"name"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___MetadataMCPServer: _TypeAlias = MetadataMCPServer # noqa: Y015 + @_typing.final class AppConnectionProperties(_message.Message): DESCRIPTOR: _descriptor.Descriptor diff --git a/examples/mcp/README.md b/examples/mcp/README.md new file mode 100644 index 000000000..11205a74c --- /dev/null +++ b/examples/mcp/README.md @@ -0,0 +1,70 @@ +# MCP Examples + +Examples demonstrating how to use the `DaprMCPClient` from the Dapr Python SDK +to discover and invoke MCP tools via Dapr's built-in workflow orchestrations. + +## Prerequisites + +- **Dapr CLI** installed with `dapr init` completed (provides Redis on `localhost:6379`) +- **Python 3.11+** +- Install deps: `pip install -r requirements.txt` + +## Files + +| File | Purpose | +|------|---------| +| `mcp_tool_discovery.py` | The example: discovers tools and runs one in a workflow. | +| `weather_mcp_server.py` | Self-contained MCP server with `get_weather` / `get_forecast` tools (streamable-HTTP on `:8081/mcp`). | +| `resources/weather.yaml` | Dapr `MCPServer` resource pointing the sidecar at the weather server. | +| `resources/statestore.yaml` | Redis state store with `actorStateStore: true` (required by workflows). | + +## Run + +In one terminal, start the bundled MCP server: + +```bash +python weather_mcp_server.py +``` + +In another terminal, run the example with Dapr: + +```bash +dapr run \ + --app-id mcp-demo \ + --resources-path ./resources \ + -- python mcp_tool_discovery.py +``` + +The example will: + +1. Connect to the `weather` MCPServer resource via the sidecar. +2. Print each discovered tool's name, description, and workflow name. +3. Schedule a `CallTool` child workflow for the first tool with `{"location": "Seattle"}`. +4. Print the result. + +## Using a different MCP server + +Edit `resources/weather.yaml` to point at any MCP-compatible endpoint. Supported +transports: + +```yaml +spec: + endpoint: + streamableHTTP: + url: http://host:port/mcp +``` + +```yaml +spec: + endpoint: + sse: + url: http://host:port/sse +``` + +```yaml +spec: + endpoint: + stdio: + command: python + args: ["path/to/server.py"] +``` diff --git a/examples/mcp/mcp_tool_discovery.py b/examples/mcp/mcp_tool_discovery.py new file mode 100644 index 000000000..5e1754a7b --- /dev/null +++ b/examples/mcp/mcp_tool_discovery.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +MCP Tool Discovery Example +=========================== + +Demonstrates using DaprMCPClient to discover MCP tools from Dapr +MCPServer resources — without any agent framework dependency. + +This is the SDK-level client that any agent framework can build on top of. + +Prerequisites +------------- +1. A Dapr MCPServer resource named "weather" loaded in the sidecar:: + + apiVersion: dapr.io/v1alpha1 + kind: MCPServer + metadata: + name: weather + spec: + endpoint: + streamableHTTP: + url: http://localhost:8081/mcp + +2. An MCP server running at the configured URL. + +Run +--- +:: + + dapr run --app-id mcp-demo --resources-path ./resources -- python mcp_tool_discovery.py +""" + +from dapr.ext.workflow import ( + DaprMCPClient, + DaprWorkflowClient, + DaprWorkflowContext, + WorkflowActivityContext, + WorkflowRuntime, + create_pydantic_model_from_schema, +) + + +def main(): + # ------------------------------------------------------------------ + # 1. Discover MCP tools from a Dapr MCPServer resource. + # ------------------------------------------------------------------ + print("Connecting to MCPServer 'weather'...") + + client = DaprMCPClient(timeout_in_seconds=30) + client.connect("weather") + + tools = client.get_all_tools() + print(f"\nDiscovered {len(tools)} tool(s):\n") + for tool in tools: + print(f" Name: {tool.name}") + print(f" Description: {tool.description}") + print(f" Server: {tool.server_name}") + print(f" Workflow: {tool.call_tool_workflow}") + if tool.input_schema.get("properties"): + props = list(tool.input_schema["properties"].keys()) + print(f" Parameters: {', '.join(props)}") + print() + + # ------------------------------------------------------------------ + # 2. Use the tool in a Dapr workflow. + # This shows how any framework can use MCPToolDef to schedule + # durable tool calls via child workflows. + # ------------------------------------------------------------------ + if not tools: + print("No tools discovered — exiting.") + return + + tool = tools[0] + print(f"Using tool '{tool.name}' in a workflow...\n") + + # Build a Pydantic model from the tool's JSON Schema for validation. + if tool.input_schema: + ArgsModel = create_pydantic_model_from_schema( + tool.input_schema, f"{tool.name}Args" + ) + print(f" Args model: {ArgsModel.__name__}") + print(f" Fields: {list(ArgsModel.model_fields.keys())}\n") + + # Define a simple workflow that calls the MCP tool. + def call_mcp_tool_workflow(ctx: DaprWorkflowContext, input: dict): + """Workflow that calls an MCP tool as a child workflow.""" + result = yield ctx.call_child_workflow( + workflow=tool.call_tool_workflow, + input={ + "toolName": tool.name, + "arguments": input.get("arguments", {}), + }, + ) + return result + + def print_result(ctx: WorkflowActivityContext, input): + """Activity that prints the tool result.""" + print(f" Tool result: {input}") + + # Register and run the workflow. + wfr = WorkflowRuntime() + wfr.register_workflow(call_mcp_tool_workflow) + wfr.register_activity(print_result) + wfr.start() + + wf_client = DaprWorkflowClient() + instance_id = wf_client.schedule_new_workflow( + workflow=call_mcp_tool_workflow, + input={"arguments": {"location": "Seattle"}}, + ) + print(f" Scheduled workflow: {instance_id}") + + state = wf_client.wait_for_workflow_completion( + instance_id=instance_id, + timeout_in_seconds=30, + fetch_payloads=True, + ) + + if state: + print(f" Status: {state.runtime_status.name}") + print(f" Output: {state.serialized_output}") + else: + print(" Workflow timed out.") + + wfr.shutdown() + print("\nDone.") + + +if __name__ == "__main__": + main() diff --git a/examples/mcp/requirements.txt b/examples/mcp/requirements.txt new file mode 100644 index 000000000..087549b94 --- /dev/null +++ b/examples/mcp/requirements.txt @@ -0,0 +1,2 @@ +dapr-ext-workflow +mcp diff --git a/examples/mcp/resources/statestore.yaml b/examples/mcp/resources/statestore.yaml new file mode 100644 index 000000000..0bbb9d8f9 --- /dev/null +++ b/examples/mcp/resources/statestore.yaml @@ -0,0 +1,14 @@ +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: workflowstatestore +spec: + type: state.redis + version: v1 + metadata: + - name: redisHost + value: localhost:6379 + - name: redisPassword + value: "" + - name: actorStateStore + value: "true" diff --git a/examples/mcp/resources/weather.yaml b/examples/mcp/resources/weather.yaml new file mode 100644 index 000000000..2cb0710c1 --- /dev/null +++ b/examples/mcp/resources/weather.yaml @@ -0,0 +1,8 @@ +apiVersion: dapr.io/v1alpha1 +kind: MCPServer +metadata: + name: weather +spec: + endpoint: + streamableHTTP: + url: http://localhost:8081/mcp diff --git a/examples/mcp/weather_mcp_server.py b/examples/mcp/weather_mcp_server.py new file mode 100644 index 000000000..26d0479cb --- /dev/null +++ b/examples/mcp/weather_mcp_server.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Weather MCP Server (streamable-HTTP transport) +============================================== + +Self-contained MCP server used by ``mcp_tool_discovery.py``. Exposes +``get_weather`` and ``get_forecast`` tools over the streamable-HTTP +transport at ``http://:/mcp``. + +Run:: + + python weather_mcp_server.py [--host 0.0.0.0] [--port 8081] +""" + +import argparse +import logging +import random + +from mcp.server.fastmcp import FastMCP + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger("weather-mcp-server") + + +def build_server(host: str, port: int) -> FastMCP: + mcp = FastMCP("WeatherService", host=host, port=port) + + @mcp.tool() + async def get_weather(location: str) -> str: + """Get current weather information for a location. + + Args: + location: City or region name (e.g. 'Seattle', 'London'). + + Returns: + Current temperature and conditions. + """ + temperature = random.randint(32, 105) + conditions = random.choice( + ["sunny", "cloudy", "partly cloudy", "rainy", "windy", "snowy", "foggy"] + ) + humidity = random.randint(20, 95) + return f"{location}: {temperature}F, {conditions}, {humidity}% humidity." + + @mcp.tool() + async def get_forecast(location: str, days: int = 5) -> str: + """Get a multi-day weather forecast for a location. + + Args: + location: City or region name. + days: Number of days to forecast (default 5, max 10). + + Returns: + Multi-line forecast summary. + """ + days = min(max(days, 1), 10) + lines = [f"{location} {days}-day forecast:"] + for i in range(1, days + 1): + high = random.randint(55, 105) + low = high - random.randint(10, 25) + cond = random.choice( + ["sunny", "cloudy", "rainy", "stormy", "clear", "partly cloudy"] + ) + lines.append(f" Day {i}: High {high}F / Low {low}F, {cond}") + return "\n".join(lines) + + return mcp + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Weather MCP server (streamable-HTTP transport)" + ) + parser.add_argument("--host", default="0.0.0.0") + parser.add_argument("--port", type=int, default=8081) + args = parser.parse_args() + + mcp = build_server(args.host, args.port) + logger.info( + "Weather MCP server listening on http://%s:%d/mcp", args.host, args.port + ) + mcp.run(transport="streamable-http") + + +if __name__ == "__main__": + main() diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py index ef8e082e9..aa8f8e7c0 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py @@ -22,6 +22,10 @@ from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name from dapr.ext.workflow.workflow_state import WorkflowState, WorkflowStatus +# MCP +from dapr.ext.workflow.mcp import DaprMCPClient, MCPToolDef, MCP_WORKFLOW_PREFIX +from dapr.ext.workflow.mcp_schema import create_pydantic_model_from_schema + __all__ = [ 'WorkflowRuntime', 'DaprWorkflowClient', @@ -34,4 +38,9 @@ 'alternate_name', 'RetryPolicy', 'TaskFailedError', + # MCP + 'DaprMCPClient', + 'MCPToolDef', + 'MCP_WORKFLOW_PREFIX', + 'create_pydantic_model_from_schema', ] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py index c969a3d2a..8ac4db352 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x62\x61\x63kend_service.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"T\n\x0f\x41\x64\x64\x45ventRequest\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x12\n\x10\x41\x64\x64\x45ventResponse\"`\n\x1f\x43ompleteActivityWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12$\n\rresponseEvent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\"\n CompleteActivityWorkItemResponse\"\xa4\x03\n\x1f\x43ompleteWorkflowWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12#\n\x08instance\x18\x02 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12\x32\n\x0c\x63ustomStatus\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\nnewHistory\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12\x1f\n\x08newTasks\x18\x06 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewTimers\x18\x07 \x03(\x0b\x32\r.HistoryEvent\x12\x43\n\x0bnewMessages\x18\x08 \x03(\x0b\x32..durabletask.protos.backend.v1.WorkflowMessage\x12\x37\n\x12numEventsProcessed\x18\t \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\"\n CompleteWorkflowWorkItemResponse\"T\n\x0fWorkflowMessage\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x9c\x01\n\x14\x42\x61\x63kendWorkflowState\x12\x1c\n\x05inbox\x18\x01 \x03(\x0b\x32\r.HistoryEvent\x12\x1e\n\x07history\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x12\n\ngeneration\x18\x04 \x01(\x04\"P\n\x1d\x43reateWorkflowInstanceRequest\x12!\n\nstartEvent\x18\x01 \x01(\x0b\x32\r.HistoryEventJ\x04\x08\x02\x10\x03R\x06policy\"\xca\x03\n\x10WorkflowMetadata\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12-\n\tcreatedAt\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rlastUpdatedAt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\t \x01(\x0b\x32\x13.TaskFailureDetails\x12/\n\x0b\x63ompletedAt\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x18\n\x10parentInstanceId\x18\x0b \x01(\t\"\x99\x01\n\x1c\x42\x61\x63kendWorkflowStateMetadata\x12\x13\n\x0binboxLength\x18\x01 \x01(\x04\x12\x15\n\rhistoryLength\x18\x02 \x01(\x04\x12\x12\n\ngeneration\x18\x03 \x01(\x04\x12\x17\n\x0fsignatureLength\x18\x04 \x01(\x04\x12 \n\x18signingCertificateLength\x18\x05 \x01(\x04\")\n\x12SigningCertificate\x12\x13\n\x0b\x63\x65rtificate\x18\x01 \x01(\x0c\"\xc4\x01\n\x10HistorySignature\x12\x17\n\x0fstartEventIndex\x18\x01 \x01(\x04\x12\x12\n\neventCount\x18\x02 \x01(\x04\x12$\n\x17previousSignatureDigest\x18\x03 \x01(\x0cH\x00\x88\x01\x01\x12\x14\n\x0c\x65ventsDigest\x18\x04 \x01(\x0c\x12\x18\n\x10\x63\x65rtificateIndex\x18\x05 \x01(\x04\x12\x11\n\tsignature\x18\x06 \x01(\x0c\x42\x1a\n\x18_previousSignatureDigest\"E\n\x0c\x44urableTimer\x12!\n\ntimerEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x12\n\ngeneration\x18\x02 \x01(\x04\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x62\x61\x63kend_service.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"T\n\x0f\x41\x64\x64\x45ventRequest\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x12\n\x10\x41\x64\x64\x45ventResponse\"`\n\x1f\x43ompleteActivityWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12$\n\rresponseEvent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\"\n CompleteActivityWorkItemResponse\"\xa4\x03\n\x1f\x43ompleteWorkflowWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12#\n\x08instance\x18\x02 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12\x32\n\x0c\x63ustomStatus\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\nnewHistory\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12\x1f\n\x08newTasks\x18\x06 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewTimers\x18\x07 \x03(\x0b\x32\r.HistoryEvent\x12\x43\n\x0bnewMessages\x18\x08 \x03(\x0b\x32..durabletask.protos.backend.v1.WorkflowMessage\x12\x37\n\x12numEventsProcessed\x18\t \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\"\n CompleteWorkflowWorkItemResponse\"T\n\x0fWorkflowMessage\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x9c\x01\n\x14\x42\x61\x63kendWorkflowState\x12\x1c\n\x05inbox\x18\x01 \x03(\x0b\x32\r.HistoryEvent\x12\x1e\n\x07history\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x12\n\ngeneration\x18\x04 \x01(\x04\"\x83\x01\n\x12\x41\x63tivityInvocation\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x32\n\x11propagatedHistory\x18\x02 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistory\"\x9a\x01\n\x1d\x43reateWorkflowInstanceRequest\x12!\n\nstartEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x32\n\x11propagatedHistory\x18\x03 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistoryJ\x04\x08\x02\x10\x03R\x06policy\"\xd2\x04\n\x10WorkflowMetadata\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12-\n\tcreatedAt\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rlastUpdatedAt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\t \x01(\x0b\x32\x13.TaskFailureDetails\x12/\n\x0b\x63ompletedAt\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x18\n\x10parentInstanceId\x18\x0b \x01(\t\x12\x32\n\x07version\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x00\x88\x01\x01\x12\x36\n\x0bparentAppId\x18\r \x01(\x0b\x32\x1c.google.protobuf.StringValueH\x01\x88\x01\x01\x42\n\n\x08_versionB\x0e\n\x0c_parentAppId\"\x99\x01\n\x1c\x42\x61\x63kendWorkflowStateMetadata\x12\x13\n\x0binboxLength\x18\x01 \x01(\x04\x12\x15\n\rhistoryLength\x18\x02 \x01(\x04\x12\x12\n\ngeneration\x18\x03 \x01(\x04\x12\x17\n\x0fsignatureLength\x18\x04 \x01(\x04\x12 \n\x18signingCertificateLength\x18\x05 \x01(\x04\")\n\x12SigningCertificate\x12\x13\n\x0b\x63\x65rtificate\x18\x01 \x01(\x0c\"\xc4\x01\n\x10HistorySignature\x12\x17\n\x0fstartEventIndex\x18\x01 \x01(\x04\x12\x12\n\neventCount\x18\x02 \x01(\x04\x12$\n\x17previousSignatureDigest\x18\x03 \x01(\x0cH\x00\x88\x01\x01\x12\x14\n\x0c\x65ventsDigest\x18\x04 \x01(\x0c\x12\x18\n\x10\x63\x65rtificateIndex\x18\x05 \x01(\x04\x12\x11\n\tsignature\x18\x06 \x01(\x0c\x42\x1a\n\x18_previousSignatureDigest\"E\n\x0c\x44urableTimer\x12!\n\ntimerEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x12\n\ngeneration\x18\x02 \x01(\x04\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -52,16 +52,18 @@ _globals['_WORKFLOWMESSAGE']._serialized_end=947 _globals['_BACKENDWORKFLOWSTATE']._serialized_start=950 _globals['_BACKENDWORKFLOWSTATE']._serialized_end=1106 - _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_start=1108 - _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_end=1188 - _globals['_WORKFLOWMETADATA']._serialized_start=1191 - _globals['_WORKFLOWMETADATA']._serialized_end=1649 - _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_start=1652 - _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_end=1805 - _globals['_SIGNINGCERTIFICATE']._serialized_start=1807 - _globals['_SIGNINGCERTIFICATE']._serialized_end=1848 - _globals['_HISTORYSIGNATURE']._serialized_start=1851 - _globals['_HISTORYSIGNATURE']._serialized_end=2047 - _globals['_DURABLETIMER']._serialized_start=2049 - _globals['_DURABLETIMER']._serialized_end=2118 + _globals['_ACTIVITYINVOCATION']._serialized_start=1109 + _globals['_ACTIVITYINVOCATION']._serialized_end=1240 + _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_start=1243 + _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_end=1397 + _globals['_WORKFLOWMETADATA']._serialized_start=1400 + _globals['_WORKFLOWMETADATA']._serialized_end=1994 + _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_start=1997 + _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_end=2150 + _globals['_SIGNINGCERTIFICATE']._serialized_start=2152 + _globals['_SIGNINGCERTIFICATE']._serialized_end=2193 + _globals['_HISTORYSIGNATURE']._serialized_start=2196 + _globals['_HISTORYSIGNATURE']._serialized_end=2392 + _globals['_DURABLETIMER']._serialized_start=2394 + _globals['_DURABLETIMER']._serialized_end=2463 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi index 7b1a84ded..698b2b0cb 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi @@ -17,10 +17,10 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orches import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @@ -50,6 +50,7 @@ class AddEventRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["event", b"event", "instance", b"instance"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___AddEventRequest: _TypeAlias = AddEventRequest # noqa: Y015 @@ -64,6 +65,11 @@ class AddEventResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___AddEventResponse: _TypeAlias = AddEventResponse # noqa: Y015 @@ -93,6 +99,7 @@ class CompleteActivityWorkItemRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completionToken", b"completionToken", "responseEvent", b"responseEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteActivityWorkItemRequest: _TypeAlias = CompleteActivityWorkItemRequest # noqa: Y015 @@ -107,6 +114,11 @@ class CompleteActivityWorkItemResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteActivityWorkItemResponse: _TypeAlias = CompleteActivityWorkItemResponse # noqa: Y015 @@ -163,6 +175,7 @@ class CompleteWorkflowWorkItemRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completionToken", b"completionToken", "customStatus", b"customStatus", "instance", b"instance", "newHistory", b"newHistory", "newMessages", b"newMessages", "newTasks", b"newTasks", "newTimers", b"newTimers", "numEventsProcessed", b"numEventsProcessed", "runtimeStatus", b"runtimeStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteWorkflowWorkItemRequest: _TypeAlias = CompleteWorkflowWorkItemRequest # noqa: Y015 @@ -177,6 +190,11 @@ class CompleteWorkflowWorkItemResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteWorkflowWorkItemResponse: _TypeAlias = CompleteWorkflowWorkItemResponse # noqa: Y015 @@ -206,6 +224,7 @@ class WorkflowMessage(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["event", b"event", "instance", b"instance"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___WorkflowMessage: _TypeAlias = WorkflowMessage # noqa: Y015 @@ -236,25 +255,67 @@ class BackendWorkflowState(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["customStatus", b"customStatus", "generation", b"generation", "history", b"history", "inbox", b"inbox"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___BackendWorkflowState: _TypeAlias = BackendWorkflowState # noqa: Y015 +@_typing.final +class ActivityInvocation(_message.Message): + """ActivityInvocation wraps a TaskScheduled HistoryEvent with optional + propagated history for delivery to an activity actor. + """ + + DESCRIPTOR: _descriptor.Descriptor + + HISTORYEVENT_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int + @_builtins.property + def historyEvent(self) -> _history_events_pb2.HistoryEvent: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from the calling workflow.""" + + def __init__( + self, + *, + historyEvent: _history_events_pb2.HistoryEvent | None = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory"] # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... + +Global___ActivityInvocation: _TypeAlias = ActivityInvocation # noqa: Y015 + @_typing.final class CreateWorkflowInstanceRequest(_message.Message): DESCRIPTOR: _descriptor.Descriptor STARTEVENT_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int @_builtins.property def startEvent(self) -> _history_events_pb2.HistoryEvent: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from the parent workflow.""" + def __init__( self, *, startEvent: _history_events_pb2.HistoryEvent | None = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["startEvent", b"startEvent"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "propagatedHistory", b"propagatedHistory", "startEvent", b"startEvent"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["startEvent", b"startEvent"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "propagatedHistory", b"propagatedHistory", "startEvent", b"startEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... Global___CreateWorkflowInstanceRequest: _TypeAlias = CreateWorkflowInstanceRequest # noqa: Y015 @@ -273,6 +334,8 @@ class WorkflowMetadata(_message.Message): FAILUREDETAILS_FIELD_NUMBER: _builtins.int COMPLETEDAT_FIELD_NUMBER: _builtins.int PARENTINSTANCEID_FIELD_NUMBER: _builtins.int + VERSION_FIELD_NUMBER: _builtins.int + PARENTAPPID_FIELD_NUMBER: _builtins.int instanceId: _builtins.str name: _builtins.str runtimeStatus: _orchestration_pb2.OrchestrationStatus.ValueType @@ -291,6 +354,10 @@ class WorkflowMetadata(_message.Message): def failureDetails(self) -> _orchestration_pb2.TaskFailureDetails: ... @_builtins.property def completedAt(self) -> _timestamp_pb2.Timestamp: ... + @_builtins.property + def version(self) -> _wrappers_pb2.StringValue: ... + @_builtins.property + def parentAppId(self) -> _wrappers_pb2.StringValue: ... def __init__( self, *, @@ -305,11 +372,21 @@ class WorkflowMetadata(_message.Message): failureDetails: _orchestration_pb2.TaskFailureDetails | None = ..., completedAt: _timestamp_pb2.Timestamp | None = ..., parentInstanceId: _builtins.str = ..., + version: _wrappers_pb2.StringValue | None = ..., + parentAppId: _wrappers_pb2.StringValue | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "lastUpdatedAt", b"lastUpdatedAt", "output", b"output"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_parentAppId", b"_parentAppId", "_version", b"_version", "completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "lastUpdatedAt", b"lastUpdatedAt", "output", b"output", "parentAppId", b"parentAppId", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedAt", b"lastUpdatedAt", "name", b"name", "output", b"output", "parentInstanceId", b"parentInstanceId", "runtimeStatus", b"runtimeStatus"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_parentAppId", b"_parentAppId", "_version", b"_version", "completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedAt", b"lastUpdatedAt", "name", b"name", "output", b"output", "parentAppId", b"parentAppId", "parentInstanceId", b"parentInstanceId", "runtimeStatus", b"runtimeStatus", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__parentAppId: _TypeAlias = _typing.Literal["parentAppId"] # noqa: Y015 + _WhichOneofArgType__parentAppId: _TypeAlias = _typing.Literal["_parentAppId", b"_parentAppId"] # noqa: Y015 + _WhichOneofReturnType__version: _TypeAlias = _typing.Literal["version"] # noqa: Y015 + _WhichOneofArgType__version: _TypeAlias = _typing.Literal["_version", b"_version"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__parentAppId) -> _WhichOneofReturnType__parentAppId | None: ... + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__version) -> _WhichOneofReturnType__version | None: ... Global___WorkflowMetadata: _TypeAlias = WorkflowMetadata # noqa: Y015 @@ -338,8 +415,11 @@ class BackendWorkflowStateMetadata(_message.Message): signatureLength: _builtins.int = ..., signingCertificateLength: _builtins.int = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["generation", b"generation", "historyLength", b"historyLength", "inboxLength", b"inboxLength", "signatureLength", b"signatureLength", "signingCertificateLength", b"signingCertificateLength"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___BackendWorkflowStateMetadata: _TypeAlias = BackendWorkflowStateMetadata # noqa: Y015 @@ -365,8 +445,11 @@ class SigningCertificate(_message.Message): *, certificate: _builtins.bytes = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["certificate", b"certificate"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___SigningCertificate: _TypeAlias = SigningCertificate # noqa: Y015 @@ -457,5 +540,6 @@ class DurableTimer(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["generation", b"generation", "timerEvent", b"timerEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___DurableTimer: _TypeAlias = DurableTimer # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py index 4cb95f3af..10a6c6cf9 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py index 7a5dea47b..9d910a0fb 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py @@ -27,7 +27,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14history_events.proto\x1a\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xd6\x03\n\x15\x45xecutionStartedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\x0eparentInstance\x18\x05 \x01(\x0b\x32\x13.ParentInstanceInfo\x12;\n\x17scheduledStartTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12)\n\x12parentTraceContext\x18\x07 \x01(\x0b\x32\r.TraceContext\x12\x34\n\x0eworkflowSpanID\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\t \x03(\x0b\x32 .ExecutionStartedEvent.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa2\x01\n\x17\x45xecutionCompletedEvent\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x03 \x01(\x0b\x32\x13.TaskFailureDetails\"X\n\x18\x45xecutionTerminatedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x02 \x01(\x08\"\x9e\x02\n\x12TaskScheduledEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x04 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"t\n\x12TaskCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"p\n\x0fTaskFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"\xa8\x02\n!ChildWorkflowInstanceCreatedEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x05 \x01(\x0b\x32\r.TraceContext\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"l\n#ChildWorkflowInstanceCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"h\n ChildWorkflowInstanceFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\"\x18\n\x16TimerOriginCreateTimer\"(\n\x18TimerOriginExternalEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xa5\x02\n\x11TimerCreatedEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12>\n\x17rerunParentInstanceInfo\x18\x03 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x02\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x05 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x42\x08\n\x06originB\x07\n\x05_nameB\x1a\n\x18_rerunParentInstanceInfo\"N\n\x0fTimerFiredEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07timerId\x18\x02 \x01(\x05\"J\n\x14WorkflowStartedEvent\x12&\n\x07version\x18\x01 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\x18\n\x16WorkflowCompletedEvent\"_\n\x0e\x45ventSentEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"M\n\x10\x45ventRaisedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05input\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"A\n\x12\x43ontinueAsNewEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"F\n\x17\x45xecutionSuspendedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"D\n\x15\x45xecutionResumedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"a\n\x15\x45xecutionStalledEvent\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xa8\t\n\x0cHistoryEvent\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\x05\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x10\x65xecutionStarted\x18\x03 \x01(\x0b\x32\x16.ExecutionStartedEventH\x00\x12\x36\n\x12\x65xecutionCompleted\x18\x04 \x01(\x0b\x32\x18.ExecutionCompletedEventH\x00\x12\x38\n\x13\x65xecutionTerminated\x18\x05 \x01(\x0b\x32\x19.ExecutionTerminatedEventH\x00\x12,\n\rtaskScheduled\x18\x06 \x01(\x0b\x32\x13.TaskScheduledEventH\x00\x12,\n\rtaskCompleted\x18\x07 \x01(\x0b\x32\x13.TaskCompletedEventH\x00\x12&\n\ntaskFailed\x18\x08 \x01(\x0b\x32\x10.TaskFailedEventH\x00\x12J\n\x1c\x63hildWorkflowInstanceCreated\x18\t \x01(\x0b\x32\".ChildWorkflowInstanceCreatedEventH\x00\x12N\n\x1e\x63hildWorkflowInstanceCompleted\x18\n \x01(\x0b\x32$.ChildWorkflowInstanceCompletedEventH\x00\x12H\n\x1b\x63hildWorkflowInstanceFailed\x18\x0b \x01(\x0b\x32!.ChildWorkflowInstanceFailedEventH\x00\x12*\n\x0ctimerCreated\x18\x0c \x01(\x0b\x32\x12.TimerCreatedEventH\x00\x12&\n\ntimerFired\x18\r \x01(\x0b\x32\x10.TimerFiredEventH\x00\x12\x30\n\x0fworkflowStarted\x18\x0e \x01(\x0b\x32\x15.WorkflowStartedEventH\x00\x12\x34\n\x11workflowCompleted\x18\x0f \x01(\x0b\x32\x17.WorkflowCompletedEventH\x00\x12$\n\teventSent\x18\x10 \x01(\x0b\x32\x0f.EventSentEventH\x00\x12(\n\x0b\x65ventRaised\x18\x11 \x01(\x0b\x32\x11.EventRaisedEventH\x00\x12,\n\rcontinueAsNew\x18\x14 \x01(\x0b\x32\x13.ContinueAsNewEventH\x00\x12\x36\n\x12\x65xecutionSuspended\x18\x15 \x01(\x0b\x32\x18.ExecutionSuspendedEventH\x00\x12\x32\n\x10\x65xecutionResumed\x18\x16 \x01(\x0b\x32\x16.ExecutionResumedEventH\x00\x12\x32\n\x10\x65xecutionStalled\x18\x1f \x01(\x0b\x32\x16.ExecutionStalledEventH\x00\x12 \n\x06router\x18\x1e \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x0b\n\teventTypeB\t\n\x07_routerJ\x04\x08\x12\x10\x13J\x04\x08\x13\x10\x14J\x04\x08\x17\x10\x18J\x04\x08\x18\x10\x19J\x04\x08\x19\x10\x1aJ\x04\x08\x1a\x10\x1bJ\x04\x08\x1b\x10\x1cJ\x04\x08\x1c\x10\x1dJ\x04\x08\x1d\x10\x1e\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14history_events.proto\x1a\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"~\n\x16PropagatedHistoryChunk\x12\r\n\x05\x61ppId\x18\x01 \x01(\t\x12\x17\n\x0fstartEventIndex\x18\x02 \x01(\x05\x12\x12\n\neventCount\x18\x03 \x01(\x05\x12\x12\n\ninstanceId\x18\x04 \x01(\t\x12\x14\n\x0cworkflowName\x18\x05 \x01(\t\"\x84\x01\n\x11PropagatedHistory\x12\x1d\n\x06\x65vents\x18\x01 \x03(\x0b\x32\r.HistoryEvent\x12\'\n\x05scope\x18\x02 \x01(\x0e\x32\x18.HistoryPropagationScope\x12\'\n\x06\x63hunks\x18\x03 \x03(\x0b\x32\x17.PropagatedHistoryChunk\"\xe9\x03\n\x15\x45xecutionStartedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\x0eparentInstance\x18\x05 \x01(\x0b\x32\x13.ParentInstanceInfo\x12;\n\x17scheduledStartTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12)\n\x12parentTraceContext\x18\x07 \x01(\x0b\x32\r.TraceContext\x12\x34\n\x0eworkflowSpanID\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\t \x03(\x0b\x32 .ExecutionStartedEvent.TagsEntry\x12\x11\n\tinProcess\x18\n \x01(\x08\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa2\x01\n\x17\x45xecutionCompletedEvent\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x03 \x01(\x0b\x32\x13.TaskFailureDetails\"X\n\x18\x45xecutionTerminatedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x02 \x01(\x08\"\x8d\x03\n\x12TaskScheduledEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x04 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x12>\n\x17historyPropagationScope\x18\x07 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x12\x11\n\tinProcess\x18\x08 \x01(\x08\x42\x1a\n\x18_rerunParentInstanceInfoB\x1a\n\x18_historyPropagationScope\"t\n\x12TaskCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"p\n\x0fTaskFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"\x84\x03\n!ChildWorkflowInstanceCreatedEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x05 \x01(\x0b\x32\r.TraceContext\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x12>\n\x17historyPropagationScope\x18\x07 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfoB\x1a\n\x18_historyPropagationScope\"l\n#ChildWorkflowInstanceCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"h\n ChildWorkflowInstanceFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\"\x18\n\x16TimerOriginCreateTimer\"(\n\x18TimerOriginExternalEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\"3\n\x18TimerOriginActivityRetry\x12\x17\n\x0ftaskExecutionId\x18\x01 \x01(\t\"3\n\x1dTimerOriginChildWorkflowRetry\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"\x97\x03\n\x11TimerCreatedEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12>\n\x17rerunParentInstanceInfo\x18\x03 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x02\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x05 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x06 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x07 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_nameB\x1a\n\x18_rerunParentInstanceInfo\"N\n\x0fTimerFiredEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07timerId\x18\x02 \x01(\x05\"J\n\x14WorkflowStartedEvent\x12&\n\x07version\x18\x01 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\x18\n\x16WorkflowCompletedEvent\"_\n\x0e\x45ventSentEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"M\n\x10\x45ventRaisedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05input\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"A\n\x12\x43ontinueAsNewEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"F\n\x17\x45xecutionSuspendedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"D\n\x15\x45xecutionResumedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"a\n\x15\x45xecutionStalledEvent\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xa8\t\n\x0cHistoryEvent\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\x05\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x10\x65xecutionStarted\x18\x03 \x01(\x0b\x32\x16.ExecutionStartedEventH\x00\x12\x36\n\x12\x65xecutionCompleted\x18\x04 \x01(\x0b\x32\x18.ExecutionCompletedEventH\x00\x12\x38\n\x13\x65xecutionTerminated\x18\x05 \x01(\x0b\x32\x19.ExecutionTerminatedEventH\x00\x12,\n\rtaskScheduled\x18\x06 \x01(\x0b\x32\x13.TaskScheduledEventH\x00\x12,\n\rtaskCompleted\x18\x07 \x01(\x0b\x32\x13.TaskCompletedEventH\x00\x12&\n\ntaskFailed\x18\x08 \x01(\x0b\x32\x10.TaskFailedEventH\x00\x12J\n\x1c\x63hildWorkflowInstanceCreated\x18\t \x01(\x0b\x32\".ChildWorkflowInstanceCreatedEventH\x00\x12N\n\x1e\x63hildWorkflowInstanceCompleted\x18\n \x01(\x0b\x32$.ChildWorkflowInstanceCompletedEventH\x00\x12H\n\x1b\x63hildWorkflowInstanceFailed\x18\x0b \x01(\x0b\x32!.ChildWorkflowInstanceFailedEventH\x00\x12*\n\x0ctimerCreated\x18\x0c \x01(\x0b\x32\x12.TimerCreatedEventH\x00\x12&\n\ntimerFired\x18\r \x01(\x0b\x32\x10.TimerFiredEventH\x00\x12\x30\n\x0fworkflowStarted\x18\x0e \x01(\x0b\x32\x15.WorkflowStartedEventH\x00\x12\x34\n\x11workflowCompleted\x18\x0f \x01(\x0b\x32\x17.WorkflowCompletedEventH\x00\x12$\n\teventSent\x18\x10 \x01(\x0b\x32\x0f.EventSentEventH\x00\x12(\n\x0b\x65ventRaised\x18\x11 \x01(\x0b\x32\x11.EventRaisedEventH\x00\x12,\n\rcontinueAsNew\x18\x14 \x01(\x0b\x32\x13.ContinueAsNewEventH\x00\x12\x36\n\x12\x65xecutionSuspended\x18\x15 \x01(\x0b\x32\x18.ExecutionSuspendedEventH\x00\x12\x32\n\x10\x65xecutionResumed\x18\x16 \x01(\x0b\x32\x16.ExecutionResumedEventH\x00\x12\x32\n\x10\x65xecutionStalled\x18\x1f \x01(\x0b\x32\x16.ExecutionStalledEventH\x00\x12 \n\x06router\x18\x1e \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x0b\n\teventTypeB\t\n\x07_routerJ\x04\x08\x12\x10\x13J\x04\x08\x13\x10\x14J\x04\x08\x17\x10\x18J\x04\x08\x18\x10\x19J\x04\x08\x19\x10\x1aJ\x04\x08\x1a\x10\x1bJ\x04\x08\x1b\x10\x1cJ\x04\x08\x1c\x10\x1dJ\x04\x08\x1d\x10\x1e\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -37,50 +37,58 @@ _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._loaded_options = None _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_options = b'8\001' - _globals['_EXECUTIONSTARTEDEVENT']._serialized_start=111 - _globals['_EXECUTIONSTARTEDEVENT']._serialized_end=581 - _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_start=538 - _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_end=581 - _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_start=584 - _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_end=746 - _globals['_EXECUTIONTERMINATEDEVENT']._serialized_start=748 - _globals['_EXECUTIONTERMINATEDEVENT']._serialized_end=836 - _globals['_TASKSCHEDULEDEVENT']._serialized_start=839 - _globals['_TASKSCHEDULEDEVENT']._serialized_end=1125 - _globals['_TASKCOMPLETEDEVENT']._serialized_start=1127 - _globals['_TASKCOMPLETEDEVENT']._serialized_end=1243 - _globals['_TASKFAILEDEVENT']._serialized_start=1245 - _globals['_TASKFAILEDEVENT']._serialized_end=1357 - _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_start=1360 - _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_end=1656 - _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_start=1658 - _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_end=1766 - _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_start=1768 - _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_end=1872 - _globals['_TIMERORIGINCREATETIMER']._serialized_start=1874 - _globals['_TIMERORIGINCREATETIMER']._serialized_end=1898 - _globals['_TIMERORIGINEXTERNALEVENT']._serialized_start=1900 - _globals['_TIMERORIGINEXTERNALEVENT']._serialized_end=1940 - _globals['_TIMERCREATEDEVENT']._serialized_start=1943 - _globals['_TIMERCREATEDEVENT']._serialized_end=2236 - _globals['_TIMERFIREDEVENT']._serialized_start=2238 - _globals['_TIMERFIREDEVENT']._serialized_end=2316 - _globals['_WORKFLOWSTARTEDEVENT']._serialized_start=2318 - _globals['_WORKFLOWSTARTEDEVENT']._serialized_end=2392 - _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_start=2394 - _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_end=2418 - _globals['_EVENTSENTEVENT']._serialized_start=2420 - _globals['_EVENTSENTEVENT']._serialized_end=2515 - _globals['_EVENTRAISEDEVENT']._serialized_start=2517 - _globals['_EVENTRAISEDEVENT']._serialized_end=2594 - _globals['_CONTINUEASNEWEVENT']._serialized_start=2596 - _globals['_CONTINUEASNEWEVENT']._serialized_end=2661 - _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_start=2663 - _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_end=2733 - _globals['_EXECUTIONRESUMEDEVENT']._serialized_start=2735 - _globals['_EXECUTIONRESUMEDEVENT']._serialized_end=2803 - _globals['_EXECUTIONSTALLEDEVENT']._serialized_start=2805 - _globals['_EXECUTIONSTALLEDEVENT']._serialized_end=2902 - _globals['_HISTORYEVENT']._serialized_start=2905 - _globals['_HISTORYEVENT']._serialized_end=4097 + _globals['_PROPAGATEDHISTORYCHUNK']._serialized_start=110 + _globals['_PROPAGATEDHISTORYCHUNK']._serialized_end=236 + _globals['_PROPAGATEDHISTORY']._serialized_start=239 + _globals['_PROPAGATEDHISTORY']._serialized_end=371 + _globals['_EXECUTIONSTARTEDEVENT']._serialized_start=374 + _globals['_EXECUTIONSTARTEDEVENT']._serialized_end=863 + _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_start=820 + _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_end=863 + _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_start=866 + _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_end=1028 + _globals['_EXECUTIONTERMINATEDEVENT']._serialized_start=1030 + _globals['_EXECUTIONTERMINATEDEVENT']._serialized_end=1118 + _globals['_TASKSCHEDULEDEVENT']._serialized_start=1121 + _globals['_TASKSCHEDULEDEVENT']._serialized_end=1518 + _globals['_TASKCOMPLETEDEVENT']._serialized_start=1520 + _globals['_TASKCOMPLETEDEVENT']._serialized_end=1636 + _globals['_TASKFAILEDEVENT']._serialized_start=1638 + _globals['_TASKFAILEDEVENT']._serialized_end=1750 + _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_start=1753 + _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_end=2141 + _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_start=2143 + _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_end=2251 + _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_start=2253 + _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_end=2357 + _globals['_TIMERORIGINCREATETIMER']._serialized_start=2359 + _globals['_TIMERORIGINCREATETIMER']._serialized_end=2383 + _globals['_TIMERORIGINEXTERNALEVENT']._serialized_start=2385 + _globals['_TIMERORIGINEXTERNALEVENT']._serialized_end=2425 + _globals['_TIMERORIGINACTIVITYRETRY']._serialized_start=2427 + _globals['_TIMERORIGINACTIVITYRETRY']._serialized_end=2478 + _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_start=2480 + _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_end=2531 + _globals['_TIMERCREATEDEVENT']._serialized_start=2534 + _globals['_TIMERCREATEDEVENT']._serialized_end=2941 + _globals['_TIMERFIREDEVENT']._serialized_start=2943 + _globals['_TIMERFIREDEVENT']._serialized_end=3021 + _globals['_WORKFLOWSTARTEDEVENT']._serialized_start=3023 + _globals['_WORKFLOWSTARTEDEVENT']._serialized_end=3097 + _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_start=3099 + _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_end=3123 + _globals['_EVENTSENTEVENT']._serialized_start=3125 + _globals['_EVENTSENTEVENT']._serialized_end=3220 + _globals['_EVENTRAISEDEVENT']._serialized_start=3222 + _globals['_EVENTRAISEDEVENT']._serialized_end=3299 + _globals['_CONTINUEASNEWEVENT']._serialized_start=3301 + _globals['_CONTINUEASNEWEVENT']._serialized_end=3366 + _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_start=3368 + _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_end=3438 + _globals['_EXECUTIONRESUMEDEVENT']._serialized_start=3440 + _globals['_EXECUTIONRESUMEDEVENT']._serialized_end=3508 + _globals['_EXECUTIONSTALLEDEVENT']._serialized_start=3510 + _globals['_EXECUTIONSTALLEDEVENT']._serialized_end=3607 + _globals['_HISTORYEVENT']._serialized_start=3610 + _globals['_HISTORYEVENT']._serialized_end=4802 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi index 26ff5cfd6..de48c9377 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi @@ -16,13 +16,91 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orches import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor +@_typing.final +class PropagatedHistoryChunk(_message.Message): + """A contiguous range of events produced by a single app. + Used to track which app produced which events when history from + multiple workflows is flattened into a single PropagatedHistory. + """ + + DESCRIPTOR: _descriptor.Descriptor + + APPID_FIELD_NUMBER: _builtins.int + STARTEVENTINDEX_FIELD_NUMBER: _builtins.int + EVENTCOUNT_FIELD_NUMBER: _builtins.int + INSTANCEID_FIELD_NUMBER: _builtins.int + WORKFLOWNAME_FIELD_NUMBER: _builtins.int + appId: _builtins.str + startEventIndex: _builtins.int + """Index of the first event in this chunk (inclusive), + into PropagatedHistory.events. + """ + eventCount: _builtins.int + """Number of events in this chunk.""" + instanceId: _builtins.str + """The workflow instance ID/name that produced the events in this chunk.""" + workflowName: _builtins.str + def __init__( + self, + *, + appId: _builtins.str = ..., + startEventIndex: _builtins.int = ..., + eventCount: _builtins.int = ..., + instanceId: _builtins.str = ..., + workflowName: _builtins.str = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["appId", b"appId", "eventCount", b"eventCount", "instanceId", b"instanceId", "startEventIndex", b"startEventIndex", "workflowName", b"workflowName"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... + +Global___PropagatedHistoryChunk: _TypeAlias = PropagatedHistoryChunk # noqa: Y015 + +@_typing.final +class PropagatedHistory(_message.Message): + DESCRIPTOR: _descriptor.Descriptor + + EVENTS_FIELD_NUMBER: _builtins.int + SCOPE_FIELD_NUMBER: _builtins.int + CHUNKS_FIELD_NUMBER: _builtins.int + scope: _orchestration_pb2.HistoryPropagationScope.ValueType + """The propagation scope that was used to produce this history.""" + @_builtins.property + def events(self) -> _containers.RepeatedCompositeFieldContainer[Global___HistoryEvent]: + """The history events being propagated, copied from the parent's + history at propagation time. + """ + + @_builtins.property + def chunks(self) -> _containers.RepeatedCompositeFieldContainer[Global___PropagatedHistoryChunk]: + """Chunk boundaries identifying which app produced which events. + Chunks are ordered & non-overlapping. Together they cover + all events in the events field. + """ + + def __init__( + self, + *, + events: _abc.Iterable[Global___HistoryEvent] | None = ..., + scope: _orchestration_pb2.HistoryPropagationScope.ValueType = ..., + chunks: _abc.Iterable[Global___PropagatedHistoryChunk] | None = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["chunks", b"chunks", "events", b"events", "scope", b"scope"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... + +Global___PropagatedHistory: _TypeAlias = PropagatedHistory # noqa: Y015 + @_typing.final class ExecutionStartedEvent(_message.Message): DESCRIPTOR: _descriptor.Descriptor @@ -41,8 +119,11 @@ class ExecutionStartedEvent(_message.Message): key: _builtins.str = ..., value: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["key", b"key", "value", b"value"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... NAME_FIELD_NUMBER: _builtins.int VERSION_FIELD_NUMBER: _builtins.int @@ -53,7 +134,12 @@ class ExecutionStartedEvent(_message.Message): PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int WORKFLOWSPANID_FIELD_NUMBER: _builtins.int TAGS_FIELD_NUMBER: _builtins.int + INPROCESS_FIELD_NUMBER: _builtins.int name: _builtins.str + inProcess: _builtins.bool + """inProcess is set by the sidecar at creation time when the workflow runs in-process. + This means work items for this instance are routed within-process instead of the default gRPC work-item stream. + """ @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -82,11 +168,13 @@ class ExecutionStartedEvent(_message.Message): parentTraceContext: _orchestration_pb2.TraceContext | None = ..., workflowSpanID: _wrappers_pb2.StringValue | None = ..., tags: _abc.Mapping[_builtins.str, _builtins.str] | None = ..., + inProcess: _builtins.bool = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "parentInstance", b"parentInstance", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "version", b"version", "workflowInstance", b"workflowInstance", "workflowSpanID", b"workflowSpanID"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name", "parentInstance", b"parentInstance", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version", "workflowInstance", b"workflowInstance", "workflowSpanID", b"workflowSpanID"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["inProcess", b"inProcess", "input", b"input", "name", b"name", "parentInstance", b"parentInstance", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version", "workflowInstance", b"workflowInstance", "workflowSpanID", b"workflowSpanID"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionStartedEvent: _TypeAlias = ExecutionStartedEvent # noqa: Y015 @@ -113,6 +201,7 @@ class ExecutionCompletedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "result", b"result", "workflowStatus", b"workflowStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionCompletedEvent: _TypeAlias = ExecutionCompletedEvent # noqa: Y015 @@ -135,6 +224,7 @@ class ExecutionTerminatedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "recurse", b"recurse"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionTerminatedEvent: _TypeAlias = ExecutionTerminatedEvent # noqa: Y015 @@ -148,8 +238,19 @@ class TaskScheduledEvent(_message.Message): PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int RERUNPARENTINSTANCEINFO_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int + INPROCESS_FIELD_NUMBER: _builtins.int name: _builtins.str taskExecutionId: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope used when this task was originally scheduled. + Persisted on the event so rerun can re-issue the task with the same + scope after the action has been discarded. + """ + inProcess: _builtins.bool + """inProcess is propagated from the parent workflow's ExecutionStartedEvent.inProcess. + This means the activity worker can route within-process instead of the default gRPC work-item stream. + """ @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -171,13 +272,20 @@ class TaskScheduledEvent(_message.Message): parentTraceContext: _orchestration_pb2.TraceContext | None = ..., taskExecutionId: _builtins.str = ..., rerunParentInstanceInfo: _orchestration_pb2.RerunParentInstanceInfo | None = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., + inProcess: _builtins.bool = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "inProcess", b"inProcess", "input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["rerunParentInstanceInfo"] # noqa: Y015 _WhichOneofArgType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__rerunParentInstanceInfo) -> _WhichOneofReturnType__rerunParentInstanceInfo | None: ... Global___TaskScheduledEvent: _TypeAlias = TaskScheduledEvent # noqa: Y015 @@ -204,6 +312,7 @@ class TaskCompletedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["result", b"result", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TaskCompletedEvent: _TypeAlias = TaskCompletedEvent # noqa: Y015 @@ -229,6 +338,7 @@ class TaskFailedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TaskFailedEvent: _TypeAlias = TaskFailedEvent # noqa: Y015 @@ -242,8 +352,14 @@ class ChildWorkflowInstanceCreatedEvent(_message.Message): INPUT_FIELD_NUMBER: _builtins.int PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int RERUNPARENTINSTANCEINFO_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int instanceId: _builtins.str name: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope used when this child workflow was originally + scheduled. Persisted on the event so rerun can re-issue the child with + the same scope after the action has been discarded. + """ @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -265,13 +381,19 @@ class ChildWorkflowInstanceCreatedEvent(_message.Message): input: _wrappers_pb2.StringValue | None = ..., parentTraceContext: _orchestration_pb2.TraceContext | None = ..., rerunParentInstanceInfo: _orchestration_pb2.RerunParentInstanceInfo | None = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "input", b"input", "instanceId", b"instanceId", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "instanceId", b"instanceId", "name", b"name", "parentTraceContext", b"parentTraceContext", "rerunParentInstanceInfo", b"rerunParentInstanceInfo", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["rerunParentInstanceInfo"] # noqa: Y015 _WhichOneofArgType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__rerunParentInstanceInfo) -> _WhichOneofReturnType__rerunParentInstanceInfo | None: ... Global___ChildWorkflowInstanceCreatedEvent: _TypeAlias = ChildWorkflowInstanceCreatedEvent # noqa: Y015 @@ -295,6 +417,7 @@ class ChildWorkflowInstanceCompletedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["result", b"result", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ChildWorkflowInstanceCompletedEvent: _TypeAlias = ChildWorkflowInstanceCompletedEvent # noqa: Y015 @@ -317,6 +440,7 @@ class ChildWorkflowInstanceFailedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ChildWorkflowInstanceFailedEvent: _TypeAlias = ChildWorkflowInstanceFailedEvent # noqa: Y015 @@ -329,6 +453,11 @@ class TimerOriginCreateTimer(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TimerOriginCreateTimer: _TypeAlias = TimerOriginCreateTimer # noqa: Y015 @@ -346,11 +475,58 @@ class TimerOriginExternalEvent(_message.Message): *, name: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TimerOriginExternalEvent: _TypeAlias = TimerOriginExternalEvent # noqa: Y015 +@_typing.final +class TimerOriginActivityRetry(_message.Message): + """Indicates the timer was created as a retry delay for an activity execution.""" + + DESCRIPTOR: _descriptor.Descriptor + + TASKEXECUTIONID_FIELD_NUMBER: _builtins.int + taskExecutionId: _builtins.str + """The task execution ID of the activity being retried.""" + def __init__( + self, + *, + taskExecutionId: _builtins.str = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["taskExecutionId", b"taskExecutionId"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... + +Global___TimerOriginActivityRetry: _TypeAlias = TimerOriginActivityRetry # noqa: Y015 + +@_typing.final +class TimerOriginChildWorkflowRetry(_message.Message): + """Indicates the timer was created as a retry delay for a child workflow execution.""" + + DESCRIPTOR: _descriptor.Descriptor + + INSTANCEID_FIELD_NUMBER: _builtins.int + instanceId: _builtins.str + """The instance ID of the workflow being retried.""" + def __init__( + self, + *, + instanceId: _builtins.str = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... + +Global___TimerOriginChildWorkflowRetry: _TypeAlias = TimerOriginChildWorkflowRetry # noqa: Y015 + @_typing.final class TimerCreatedEvent(_message.Message): DESCRIPTOR: _descriptor.Descriptor @@ -360,6 +536,8 @@ class TimerCreatedEvent(_message.Message): RERUNPARENTINSTANCEINFO_FIELD_NUMBER: _builtins.int CREATETIMER_FIELD_NUMBER: _builtins.int EXTERNALEVENT_FIELD_NUMBER: _builtins.int + ACTIVITYRETRY_FIELD_NUMBER: _builtins.int + CHILDWORKFLOWRETRY_FIELD_NUMBER: _builtins.int name: _builtins.str @_builtins.property def fireAt(self) -> _timestamp_pb2.Timestamp: ... @@ -373,6 +551,10 @@ class TimerCreatedEvent(_message.Message): def createTimer(self) -> Global___TimerOriginCreateTimer: ... @_builtins.property def externalEvent(self) -> Global___TimerOriginExternalEvent: ... + @_builtins.property + def activityRetry(self) -> Global___TimerOriginActivityRetry: ... + @_builtins.property + def childWorkflowRetry(self) -> Global___TimerOriginChildWorkflowRetry: ... def __init__( self, *, @@ -381,16 +563,18 @@ class TimerCreatedEvent(_message.Message): rerunParentInstanceInfo: _orchestration_pb2.RerunParentInstanceInfo | None = ..., createTimer: Global___TimerOriginCreateTimer | None = ..., externalEvent: Global___TimerOriginExternalEvent | None = ..., + activityRetry: Global___TimerOriginActivityRetry | None = ..., + childWorkflowRetry: Global___TimerOriginChildWorkflowRetry | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__name: _TypeAlias = _typing.Literal["name"] # noqa: Y015 _WhichOneofArgType__name: _TypeAlias = _typing.Literal["_name", b"_name"] # noqa: Y015 _WhichOneofReturnType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["rerunParentInstanceInfo"] # noqa: Y015 _WhichOneofArgType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo"] # noqa: Y015 - _WhichOneofReturnType_origin: _TypeAlias = _typing.Literal["createTimer", "externalEvent"] # noqa: Y015 + _WhichOneofReturnType_origin: _TypeAlias = _typing.Literal["createTimer", "externalEvent", "activityRetry", "childWorkflowRetry"] # noqa: Y015 _WhichOneofArgType_origin: _TypeAlias = _typing.Literal["origin", b"origin"] # noqa: Y015 @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__name) -> _WhichOneofReturnType__name | None: ... @@ -420,6 +604,7 @@ class TimerFiredEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["fireAt", b"fireAt", "timerId", b"timerId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TimerFiredEvent: _TypeAlias = TimerFiredEvent # noqa: Y015 @@ -454,6 +639,11 @@ class WorkflowCompletedEvent(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___WorkflowCompletedEvent: _TypeAlias = WorkflowCompletedEvent # noqa: Y015 @@ -479,6 +669,7 @@ class EventSentEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "instanceId", b"instanceId", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___EventSentEvent: _TypeAlias = EventSentEvent # noqa: Y015 @@ -501,6 +692,7 @@ class EventRaisedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___EventRaisedEvent: _TypeAlias = EventRaisedEvent # noqa: Y015 @@ -520,6 +712,7 @@ class ContinueAsNewEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ContinueAsNewEvent: _TypeAlias = ContinueAsNewEvent # noqa: Y015 @@ -539,6 +732,7 @@ class ExecutionSuspendedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionSuspendedEvent: _TypeAlias = ExecutionSuspendedEvent # noqa: Y015 @@ -558,6 +752,7 @@ class ExecutionResumedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionResumedEvent: _TypeAlias = ExecutionResumedEvent # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py index 8a738f5a2..201aad188 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py index f9727296e..30c92da8c 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py @@ -26,7 +26,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"K\n\nTaskRouter\x12\x13\n\x0bsourceAppID\x18\x01 \x01(\t\x12\x18\n\x0btargetAppID\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_targetAppID\">\n\x0fWorkflowVersion\x12\x0f\n\x07patches\x18\x01 \x03(\t\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_name\"Y\n\x10WorkflowInstance\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb2\x01\n\x12TaskFailureDetails\x12\x11\n\terrorType\x18\x01 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x30\n\nstackTrace\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x0cinnerFailure\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x16\n\x0eisNonRetriable\x18\x05 \x01(\x08\"\xd3\x01\n\x12ParentInstanceInfo\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12*\n\x04name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x12\n\x05\x61ppID\x18\x05 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_appID\"-\n\x17RerunParentInstanceInfo\x12\x12\n\ninstanceID\x18\x01 \x01(\t\"i\n\x0cTraceContext\x12\x13\n\x0btraceParent\x18\x01 \x01(\t\x12\x12\n\x06spanID\x18\x02 \x01(\tB\x02\x18\x01\x12\x30\n\ntraceState\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xef\x05\n\rWorkflowState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x0eworkflowStatus\x18\x04 \x01(\x0e\x32\x14.OrchestrationStatus\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10\x63reatedTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14lastUpdatedTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\n \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x0b \x01(\x0b\x32\x13.TaskFailureDetails\x12\x31\n\x0b\x65xecutionId\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x12\x63ompletedTimestamp\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x10parentInstanceId\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x04tags\x18\x0f \x03(\x0b\x32\x18.WorkflowState.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*>\n\rStalledReason\x12\x12\n\x0ePATCH_MISMATCH\x10\x00\x12\x19\n\x15VERSION_NOT_AVAILABLE\x10\x01*\xd7\x02\n\x13OrchestrationStatus\x12 \n\x1cORCHESTRATION_STATUS_RUNNING\x10\x00\x12\"\n\x1eORCHESTRATION_STATUS_COMPLETED\x10\x01\x12)\n%ORCHESTRATION_STATUS_CONTINUED_AS_NEW\x10\x02\x12\x1f\n\x1bORCHESTRATION_STATUS_FAILED\x10\x03\x12!\n\x1dORCHESTRATION_STATUS_CANCELED\x10\x04\x12#\n\x1fORCHESTRATION_STATUS_TERMINATED\x10\x05\x12 \n\x1cORCHESTRATION_STATUS_PENDING\x10\x06\x12\"\n\x1eORCHESTRATION_STATUS_SUSPENDED\x10\x07\x12 \n\x1cORCHESTRATION_STATUS_STALLED\x10\x08\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"K\n\nTaskRouter\x12\x13\n\x0bsourceAppID\x18\x01 \x01(\t\x12\x18\n\x0btargetAppID\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_targetAppID\">\n\x0fWorkflowVersion\x12\x0f\n\x07patches\x18\x01 \x03(\t\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_name\"Y\n\x10WorkflowInstance\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb2\x01\n\x12TaskFailureDetails\x12\x11\n\terrorType\x18\x01 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x30\n\nstackTrace\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x0cinnerFailure\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x16\n\x0eisNonRetriable\x18\x05 \x01(\x08\"\xd3\x01\n\x12ParentInstanceInfo\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12*\n\x04name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x12\n\x05\x61ppID\x18\x05 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_appID\"-\n\x17RerunParentInstanceInfo\x12\x12\n\ninstanceID\x18\x01 \x01(\t\"i\n\x0cTraceContext\x12\x13\n\x0btraceParent\x18\x01 \x01(\t\x12\x12\n\x06spanID\x18\x02 \x01(\tB\x02\x18\x01\x12\x30\n\ntraceState\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xef\x05\n\rWorkflowState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x0eworkflowStatus\x18\x04 \x01(\x0e\x32\x14.OrchestrationStatus\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10\x63reatedTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14lastUpdatedTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\n \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x0b \x01(\x0b\x32\x13.TaskFailureDetails\x12\x31\n\x0b\x65xecutionId\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x12\x63ompletedTimestamp\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x10parentInstanceId\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x04tags\x18\x0f \x03(\x0b\x32\x18.WorkflowState.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*>\n\rStalledReason\x12\x12\n\x0ePATCH_MISMATCH\x10\x00\x12\x19\n\x15VERSION_NOT_AVAILABLE\x10\x01*\xd7\x02\n\x13OrchestrationStatus\x12 \n\x1cORCHESTRATION_STATUS_RUNNING\x10\x00\x12\"\n\x1eORCHESTRATION_STATUS_COMPLETED\x10\x01\x12)\n%ORCHESTRATION_STATUS_CONTINUED_AS_NEW\x10\x02\x12\x1f\n\x1bORCHESTRATION_STATUS_FAILED\x10\x03\x12!\n\x1dORCHESTRATION_STATUS_CANCELED\x10\x04\x12#\n\x1fORCHESTRATION_STATUS_TERMINATED\x10\x05\x12 \n\x1cORCHESTRATION_STATUS_PENDING\x10\x06\x12\"\n\x1eORCHESTRATION_STATUS_SUSPENDED\x10\x07\x12 \n\x1cORCHESTRATION_STATUS_STALLED\x10\x08*\x8f\x01\n\x17HistoryPropagationScope\x12\"\n\x1eHISTORY_PROPAGATION_SCOPE_NONE\x10\x00\x12)\n%HISTORY_PROPAGATION_SCOPE_OWN_HISTORY\x10\x01\x12%\n!HISTORY_PROPAGATION_SCOPE_LINEAGE\x10\x02\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -42,6 +42,8 @@ _globals['_STALLEDREASON']._serialized_end=1685 _globals['_ORCHESTRATIONSTATUS']._serialized_start=1688 _globals['_ORCHESTRATIONSTATUS']._serialized_end=2031 + _globals['_HISTORYPROPAGATIONSCOPE']._serialized_start=2034 + _globals['_HISTORYPROPAGATIONSCOPE']._serialized_end=2177 _globals['_TASKROUTER']._serialized_start=88 _globals['_TASKROUTER']._serialized_end=163 _globals['_WORKFLOWVERSION']._serialized_start=165 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi index ce5f416c1..0096f805a 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi @@ -16,10 +16,10 @@ import builtins as _builtins import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never if sys.version_info >= (3, 13): from warnings import deprecated as _deprecated @@ -72,6 +72,46 @@ ORCHESTRATION_STATUS_SUSPENDED: OrchestrationStatus.ValueType # 7 ORCHESTRATION_STATUS_STALLED: OrchestrationStatus.ValueType # 8 Global___OrchestrationStatus: _TypeAlias = OrchestrationStatus # noqa: Y015 +class _HistoryPropagationScope: + ValueType = _typing.NewType("ValueType", _builtins.int) + V: _TypeAlias = ValueType # noqa: Y015 + +class _HistoryPropagationScopeEnumTypeWrapper(_enum_type_wrapper._EnumTypeWrapper[_HistoryPropagationScope.ValueType], _builtins.type): + DESCRIPTOR: _descriptor.EnumDescriptor + HISTORY_PROPAGATION_SCOPE_NONE: _HistoryPropagationScope.ValueType # 0 + """No propagation. This is the default for an unset/missing field; the + child receives no history from the caller. + """ + HISTORY_PROPAGATION_SCOPE_OWN_HISTORY: _HistoryPropagationScope.ValueType # 1 + """Propagate the caller's own history events only. The child does + not see any ancestral history (trust boundary). + """ + HISTORY_PROPAGATION_SCOPE_LINEAGE: _HistoryPropagationScope.ValueType # 2 + """Propagate the caller's own history events AND the full ancestral + chain. Any propagated history this workflow received from its + parent is forwarded to the child. + """ + +class HistoryPropagationScope(_HistoryPropagationScope, metaclass=_HistoryPropagationScopeEnumTypeWrapper): + """HistoryPropagationScope controls how history is propagated to a child + workflow or activity + """ + +HISTORY_PROPAGATION_SCOPE_NONE: HistoryPropagationScope.ValueType # 0 +"""No propagation. This is the default for an unset/missing field; the +child receives no history from the caller. +""" +HISTORY_PROPAGATION_SCOPE_OWN_HISTORY: HistoryPropagationScope.ValueType # 1 +"""Propagate the caller's own history events only. The child does +not see any ancestral history (trust boundary). +""" +HISTORY_PROPAGATION_SCOPE_LINEAGE: HistoryPropagationScope.ValueType # 2 +"""Propagate the caller's own history events AND the full ancestral +chain. Any propagated history this workflow received from its +parent is forwarded to the child. +""" +Global___HistoryPropagationScope: _TypeAlias = HistoryPropagationScope # noqa: Y015 + @_typing.final class TaskRouter(_message.Message): DESCRIPTOR: _descriptor.Descriptor @@ -141,6 +181,7 @@ class WorkflowInstance(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["executionId", b"executionId", "instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___WorkflowInstance: _TypeAlias = WorkflowInstance # noqa: Y015 @@ -173,6 +214,7 @@ class TaskFailureDetails(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["errorMessage", b"errorMessage", "errorType", b"errorType", "innerFailure", b"innerFailure", "isNonRetriable", b"isNonRetriable", "stackTrace", b"stackTrace"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TaskFailureDetails: _TypeAlias = TaskFailureDetails # noqa: Y015 @@ -231,8 +273,11 @@ class RerunParentInstanceInfo(_message.Message): *, instanceID: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceID", b"instanceID"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RerunParentInstanceInfo: _TypeAlias = RerunParentInstanceInfo # noqa: Y015 @@ -263,6 +308,7 @@ class TraceContext(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["spanID", b"spanID", "traceParent", b"traceParent", "traceState", b"traceState"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TraceContext: _TypeAlias = TraceContext # noqa: Y015 @@ -284,8 +330,11 @@ class WorkflowState(_message.Message): key: _builtins.str = ..., value: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["key", b"key", "value", b"value"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... INSTANCEID_FIELD_NUMBER: _builtins.int NAME_FIELD_NUMBER: _builtins.int @@ -352,5 +401,6 @@ class WorkflowState(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completedTimestamp", b"completedTimestamp", "createdTimestamp", b"createdTimestamp", "customStatus", b"customStatus", "executionId", b"executionId", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedTimestamp", b"lastUpdatedTimestamp", "name", b"name", "output", b"output", "parentInstanceId", b"parentInstanceId", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version", "workflowStatus", b"workflowStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___WorkflowState: _TypeAlias = WorkflowState # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py index dca81c14f..8afda749e 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py index d63efa775..712f401ca 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_actions.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xc4\x01\n\x12ScheduleTaskAction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x04 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\tB\t\n\x07_router\"\xc6\x01\n\x19\x43reateChildWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x05 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_router\"\xc9\x01\n\x11\x43reateTimerAction\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x03 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x04 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x42\x08\n\x06originB\x07\n\x05_name\"p\n\x0fSendEventAction\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0c\n\x04name\x18\x02 \x01(\t\x12*\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xaa\x02\n\x16\x43ompleteWorkflowAction\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\nnewVersion\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x0f\x63\x61rryoverEvents\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12+\n\x0e\x66\x61ilureDetails\x18\x06 \x01(\x0b\x32\x13.TaskFailureDetails\"l\n\x17TerminateWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x03 \x01(\x08\"#\n!WorkflowVersionNotAvailableAction\"\xd6\x03\n\x0eWorkflowAction\x12\n\n\x02id\x18\x01 \x01(\x05\x12+\n\x0cscheduleTask\x18\x02 \x01(\x0b\x32\x13.ScheduleTaskActionH\x00\x12\x39\n\x13\x63reateChildWorkflow\x18\x03 \x01(\x0b\x32\x1a.CreateChildWorkflowActionH\x00\x12)\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x12.CreateTimerActionH\x00\x12%\n\tsendEvent\x18\x05 \x01(\x0b\x32\x10.SendEventActionH\x00\x12\x33\n\x10\x63ompleteWorkflow\x18\x06 \x01(\x0b\x32\x17.CompleteWorkflowActionH\x00\x12\x35\n\x11terminateWorkflow\x18\x07 \x01(\x0b\x32\x18.TerminateWorkflowActionH\x00\x12I\n\x1bworkflowVersionNotAvailable\x18\n \x01(\x0b\x32\".WorkflowVersionNotAvailableActionH\x00\x12 \n\x06router\x18\t \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x14\n\x12workflowActionTypeB\t\n\x07_routerJ\x04\x08\x08\x10\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_actions.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xb3\x02\n\x12ScheduleTaskAction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x04 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17historyPropagationScope\x18\x06 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x12\x11\n\tinProcess\x18\x07 \x01(\x08\x42\t\n\x07_routerB\x1a\n\x18_historyPropagationScope\"\xb5\x02\n\x19\x43reateChildWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x05 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12>\n\x17historyPropagationScope\x18\x06 \x01(\x0e\x32\x18.HistoryPropagationScopeH\x01\x88\x01\x01\x12\x11\n\tinProcess\x18\x07 \x01(\x08\x42\t\n\x07_routerB\x1a\n\x18_historyPropagationScope\"\xbb\x02\n\x11\x43reateTimerAction\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x03 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x04 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x05 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x06 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_name\"p\n\x0fSendEventAction\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0c\n\x04name\x18\x02 \x01(\t\x12*\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xaa\x02\n\x16\x43ompleteWorkflowAction\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\nnewVersion\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x0f\x63\x61rryoverEvents\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12+\n\x0e\x66\x61ilureDetails\x18\x06 \x01(\x0b\x32\x13.TaskFailureDetails\"l\n\x17TerminateWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x03 \x01(\x08\"#\n!WorkflowVersionNotAvailableAction\"\xd6\x03\n\x0eWorkflowAction\x12\n\n\x02id\x18\x01 \x01(\x05\x12+\n\x0cscheduleTask\x18\x02 \x01(\x0b\x32\x13.ScheduleTaskActionH\x00\x12\x39\n\x13\x63reateChildWorkflow\x18\x03 \x01(\x0b\x32\x1a.CreateChildWorkflowActionH\x00\x12)\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x12.CreateTimerActionH\x00\x12%\n\tsendEvent\x18\x05 \x01(\x0b\x32\x10.SendEventActionH\x00\x12\x33\n\x10\x63ompleteWorkflow\x18\x06 \x01(\x0b\x32\x17.CompleteWorkflowActionH\x00\x12\x35\n\x11terminateWorkflow\x18\x07 \x01(\x0b\x32\x18.TerminateWorkflowActionH\x00\x12I\n\x1bworkflowVersionNotAvailable\x18\n \x01(\x0b\x32\".WorkflowVersionNotAvailableActionH\x00\x12 \n\x06router\x18\t \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x14\n\x12workflowActionTypeB\t\n\x07_routerJ\x04\x08\x08\x10\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -37,19 +37,19 @@ _globals['DESCRIPTOR']._loaded_options = None _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_SCHEDULETASKACTION']._serialized_start=139 - _globals['_SCHEDULETASKACTION']._serialized_end=335 - _globals['_CREATECHILDWORKFLOWACTION']._serialized_start=338 - _globals['_CREATECHILDWORKFLOWACTION']._serialized_end=536 - _globals['_CREATETIMERACTION']._serialized_start=539 - _globals['_CREATETIMERACTION']._serialized_end=740 - _globals['_SENDEVENTACTION']._serialized_start=742 - _globals['_SENDEVENTACTION']._serialized_end=854 - _globals['_COMPLETEWORKFLOWACTION']._serialized_start=857 - _globals['_COMPLETEWORKFLOWACTION']._serialized_end=1155 - _globals['_TERMINATEWORKFLOWACTION']._serialized_start=1157 - _globals['_TERMINATEWORKFLOWACTION']._serialized_end=1265 - _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_start=1267 - _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_end=1302 - _globals['_WORKFLOWACTION']._serialized_start=1305 - _globals['_WORKFLOWACTION']._serialized_end=1775 + _globals['_SCHEDULETASKACTION']._serialized_end=446 + _globals['_CREATECHILDWORKFLOWACTION']._serialized_start=449 + _globals['_CREATECHILDWORKFLOWACTION']._serialized_end=758 + _globals['_CREATETIMERACTION']._serialized_start=761 + _globals['_CREATETIMERACTION']._serialized_end=1076 + _globals['_SENDEVENTACTION']._serialized_start=1078 + _globals['_SENDEVENTACTION']._serialized_end=1190 + _globals['_COMPLETEWORKFLOWACTION']._serialized_start=1193 + _globals['_COMPLETEWORKFLOWACTION']._serialized_end=1491 + _globals['_TERMINATEWORKFLOWACTION']._serialized_start=1493 + _globals['_TERMINATEWORKFLOWACTION']._serialized_end=1601 + _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_start=1603 + _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_end=1638 + _globals['_WORKFLOWACTION']._serialized_start=1641 + _globals['_WORKFLOWACTION']._serialized_end=2111 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi index 1b754f784..02601442b 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi @@ -17,10 +17,10 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orches import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @@ -33,8 +33,14 @@ class ScheduleTaskAction(_message.Message): INPUT_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int + INPROCESS_FIELD_NUMBER: _builtins.int name: _builtins.str taskExecutionId: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope. Absent/SCOPE_NONE = no propagation.""" + inProcess: _builtins.bool + """inProcess is set by the sidecar when the activity should run in-process.""" @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -49,13 +55,20 @@ class ScheduleTaskAction(_message.Message): input: _wrappers_pb2.StringValue | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., taskExecutionId: _builtins.str = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., + inProcess: _builtins.bool = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "name", b"name", "router", b"router", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "inProcess", b"inProcess", "input", b"input", "name", b"name", "router", b"router", "taskExecutionId", b"taskExecutionId", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... Global___ScheduleTaskAction: _TypeAlias = ScheduleTaskAction # noqa: Y015 @@ -69,8 +82,14 @@ class CreateChildWorkflowAction(_message.Message): VERSION_FIELD_NUMBER: _builtins.int INPUT_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int + HISTORYPROPAGATIONSCOPE_FIELD_NUMBER: _builtins.int + INPROCESS_FIELD_NUMBER: _builtins.int instanceId: _builtins.str name: _builtins.str + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType + """History propagation scope. Absent/SCOPE_NONE = no propagation.""" + inProcess: _builtins.bool + """inProcess is set by the sidecar at creation time when the workflow runs in-process.""" @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -85,13 +104,20 @@ class CreateChildWorkflowAction(_message.Message): version: _wrappers_pb2.StringValue | None = ..., input: _wrappers_pb2.StringValue | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., + historyPropagationScope: _orchestration_pb2.HistoryPropagationScope.ValueType | None = ..., + inProcess: _builtins.bool = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "input", b"input", "router", b"router", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "input", b"input", "instanceId", b"instanceId", "name", b"name", "router", b"router", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope", "_router", b"_router", "historyPropagationScope", b"historyPropagationScope", "inProcess", b"inProcess", "input", b"input", "instanceId", b"instanceId", "name", b"name", "router", b"router", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__historyPropagationScope: _TypeAlias = _typing.Literal["historyPropagationScope"] # noqa: Y015 + _WhichOneofArgType__historyPropagationScope: _TypeAlias = _typing.Literal["_historyPropagationScope", b"_historyPropagationScope"] # noqa: Y015 _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__historyPropagationScope) -> _WhichOneofReturnType__historyPropagationScope | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... Global___CreateChildWorkflowAction: _TypeAlias = CreateChildWorkflowAction # noqa: Y015 @@ -104,6 +130,8 @@ class CreateTimerAction(_message.Message): NAME_FIELD_NUMBER: _builtins.int CREATETIMER_FIELD_NUMBER: _builtins.int EXTERNALEVENT_FIELD_NUMBER: _builtins.int + ACTIVITYRETRY_FIELD_NUMBER: _builtins.int + CHILDWORKFLOWRETRY_FIELD_NUMBER: _builtins.int name: _builtins.str @_builtins.property def fireAt(self) -> _timestamp_pb2.Timestamp: ... @@ -111,6 +139,10 @@ class CreateTimerAction(_message.Message): def createTimer(self) -> _history_events_pb2.TimerOriginCreateTimer: ... @_builtins.property def externalEvent(self) -> _history_events_pb2.TimerOriginExternalEvent: ... + @_builtins.property + def activityRetry(self) -> _history_events_pb2.TimerOriginActivityRetry: ... + @_builtins.property + def childWorkflowRetry(self) -> _history_events_pb2.TimerOriginChildWorkflowRetry: ... def __init__( self, *, @@ -118,14 +150,16 @@ class CreateTimerAction(_message.Message): name: _builtins.str | None = ..., createTimer: _history_events_pb2.TimerOriginCreateTimer | None = ..., externalEvent: _history_events_pb2.TimerOriginExternalEvent | None = ..., + activityRetry: _history_events_pb2.TimerOriginActivityRetry | None = ..., + childWorkflowRetry: _history_events_pb2.TimerOriginChildWorkflowRetry | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__name: _TypeAlias = _typing.Literal["name"] # noqa: Y015 _WhichOneofArgType__name: _TypeAlias = _typing.Literal["_name", b"_name"] # noqa: Y015 - _WhichOneofReturnType_origin: _TypeAlias = _typing.Literal["createTimer", "externalEvent"] # noqa: Y015 + _WhichOneofReturnType_origin: _TypeAlias = _typing.Literal["createTimer", "externalEvent", "activityRetry", "childWorkflowRetry"] # noqa: Y015 _WhichOneofArgType_origin: _TypeAlias = _typing.Literal["origin", b"origin"] # noqa: Y015 @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__name) -> _WhichOneofReturnType__name | None: ... @@ -157,6 +191,7 @@ class SendEventAction(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["data", b"data", "instance", b"instance", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___SendEventAction: _TypeAlias = SendEventAction # noqa: Y015 @@ -195,6 +230,7 @@ class CompleteWorkflowAction(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["carryoverEvents", b"carryoverEvents", "details", b"details", "failureDetails", b"failureDetails", "newVersion", b"newVersion", "result", b"result", "workflowStatus", b"workflowStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteWorkflowAction: _TypeAlias = CompleteWorkflowAction # noqa: Y015 @@ -220,6 +256,7 @@ class TerminateWorkflowAction(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "reason", b"reason", "recurse", b"recurse"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TerminateWorkflowAction: _TypeAlias = TerminateWorkflowAction # noqa: Y015 @@ -230,6 +267,11 @@ class WorkflowVersionNotAvailableAction(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___WorkflowVersionNotAvailableAction: _TypeAlias = WorkflowVersionNotAvailableAction # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py index 2445897f9..042994a2a 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py index a3255f819..e9c39e64d 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py @@ -30,7 +30,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_service.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1aorchestrator_actions.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xfc\x01\n\x0f\x41\x63tivityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0e\n\x06taskId\x18\x05 \x01(\x05\x12)\n\x12parentTraceContext\x18\x06 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x07 \x01(\t\"\xaa\x01\n\x10\x41\x63tivityResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0e\n\x06taskId\x18\x02 \x01(\x05\x12,\n\x06result\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0f\x63ompletionToken\x18\x05 \x01(\t\"\xf2\x01\n\x0fWorkflowRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\npastEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewEvents\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12 \n\x18requiresHistoryStreaming\x18\x06 \x01(\x08\x12 \n\x06router\x18\x07 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_routerJ\x04\x08\x05\x10\x06\"\x82\x02\n\x10WorkflowResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\x07\x61\x63tions\x18\x02 \x03(\x0b\x32\x0f.WorkflowAction\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0f\x63ompletionToken\x18\x04 \x01(\t\x12\x37\n\x12numEventsProcessed\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12&\n\x07version\x18\x06 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\xaf\x03\n\x15\x43reateInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0b\x65xecutionId\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\x08 \x03(\x0b\x32 .CreateInstanceRequest.TagsEntry\x12)\n\x12parentTraceContext\x18\t \x01(\x0b\x32\r.TraceContext\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x06\x10\x07R\x1aorchestrationIdReusePolicy\",\n\x16\x43reateInstanceResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"E\n\x12GetInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x1b\n\x13getInputsAndOutputs\x18\x02 \x01(\x08\"L\n\x13GetInstanceResponse\x12\x0e\n\x06\x65xists\x18\x01 \x01(\x08\x12%\n\rworkflowState\x18\x02 \x01(\x0b\x32\x0e.WorkflowState\"b\n\x11RaiseEventRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x14\n\x12RaiseEventResponse\"g\n\x10TerminateRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06output\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x11\n\trecursive\x18\x03 \x01(\x08\"\x13\n\x11TerminateResponse\"R\n\x0eSuspendRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x11\n\x0fSuspendResponse\"Q\n\rResumeRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x10\n\x0eResumeResponse\"\x9e\x01\n\x15PurgeInstancesRequest\x12\x14\n\ninstanceId\x18\x01 \x01(\tH\x00\x12\x33\n\x13purgeInstanceFilter\x18\x02 \x01(\x0b\x32\x14.PurgeInstanceFilterH\x00\x12\x11\n\trecursive\x18\x03 \x01(\x08\x12\x12\n\x05\x66orce\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\t\n\x07requestB\x08\n\x06_force\"\xaa\x01\n\x13PurgeInstanceFilter\x12\x33\n\x0f\x63reatedTimeFrom\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcreatedTimeTo\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\rruntimeStatus\x18\x03 \x03(\x0e\x32\x14.OrchestrationStatus\"f\n\x16PurgeInstancesResponse\x12\x1c\n\x14\x64\x65letedInstanceCount\x18\x01 \x01(\x05\x12.\n\nisComplete\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\"-\n\x13GetWorkItemsRequestJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\n\x10\x0b\"\x9a\x01\n\x08WorkItem\x12+\n\x0fworkflowRequest\x18\x01 \x01(\x0b\x32\x10.WorkflowRequestH\x00\x12+\n\x0f\x61\x63tivityRequest\x18\x02 \x01(\x0b\x32\x10.ActivityRequestH\x00\x12\x17\n\x0f\x63ompletionToken\x18\n \x01(\tB\t\n\x07requestJ\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"\x16\n\x14\x43ompleteTaskResponse\"\x85\x02\n\x1dRerunWorkflowFromEventRequest\x12\x18\n\x10sourceInstanceID\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventID\x18\x02 \x01(\r\x12\x1a\n\rnewInstanceID\x18\x03 \x01(\tH\x00\x88\x01\x01\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x16\n\x0eoverwriteInput\x18\x05 \x01(\x08\x12\'\n\x1anewChildWorkflowInstanceID\x18\x06 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_newInstanceIDB\x1d\n\x1b_newChildWorkflowInstanceID\"7\n\x1eRerunWorkflowFromEventResponse\x12\x15\n\rnewInstanceID\x18\x01 \x01(\t\"r\n\x16ListInstanceIDsRequest\x12\x1e\n\x11\x63ontinuationToken\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08pageSize\x18\x02 \x01(\rH\x01\x88\x01\x01\x42\x14\n\x12_continuationTokenB\x0b\n\t_pageSize\"d\n\x17ListInstanceIDsResponse\x12\x13\n\x0binstanceIds\x18\x01 \x03(\t\x12\x1e\n\x11\x63ontinuationToken\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x14\n\x12_continuationToken\"/\n\x19GetInstanceHistoryRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\";\n\x1aGetInstanceHistoryResponse\x12\x1d\n\x06\x65vents\x18\x01 \x03(\x0b\x32\r.HistoryEvent*^\n\x10WorkerCapability\x12!\n\x1dWORKER_CAPABILITY_UNSPECIFIED\x10\x00\x12\'\n#WORKER_CAPABILITY_HISTORY_STREAMING\x10\x01\x32\xe8\x08\n\x15TaskHubSidecarService\x12\x37\n\x05Hello\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12@\n\rStartInstance\x12\x16.CreateInstanceRequest\x1a\x17.CreateInstanceResponse\x12\x38\n\x0bGetInstance\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x41\n\x14WaitForInstanceStart\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x46\n\x19WaitForInstanceCompletion\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x35\n\nRaiseEvent\x12\x12.RaiseEventRequest\x1a\x13.RaiseEventResponse\x12:\n\x11TerminateInstance\x12\x11.TerminateRequest\x1a\x12.TerminateResponse\x12\x34\n\x0fSuspendInstance\x12\x0f.SuspendRequest\x1a\x10.SuspendResponse\x12\x31\n\x0eResumeInstance\x12\x0e.ResumeRequest\x1a\x0f.ResumeResponse\x12\x41\n\x0ePurgeInstances\x12\x16.PurgeInstancesRequest\x1a\x17.PurgeInstancesResponse\x12\x31\n\x0cGetWorkItems\x12\x14.GetWorkItemsRequest\x1a\t.WorkItem0\x01\x12@\n\x14\x43ompleteActivityTask\x12\x11.ActivityResponse\x1a\x15.CompleteTaskResponse\x12I\n\x18\x43ompleteOrchestratorTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\"\x03\x88\x02\x01\x12@\n\x14\x43ompleteWorkflowTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\x12Y\n\x16RerunWorkflowFromEvent\x12\x1e.RerunWorkflowFromEventRequest\x1a\x1f.RerunWorkflowFromEventResponse\x12\x44\n\x0fListInstanceIDs\x12\x17.ListInstanceIDsRequest\x1a\x18.ListInstanceIDsResponse\x12M\n\x12GetInstanceHistory\x12\x1a.GetInstanceHistoryRequest\x1a\x1b.GetInstanceHistoryResponseBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_service.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1aorchestrator_actions.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xc6\x02\n\x0f\x41\x63tivityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0e\n\x06taskId\x18\x05 \x01(\x05\x12)\n\x12parentTraceContext\x18\x06 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x07 \x01(\t\x12\x32\n\x11propagatedHistory\x18\x08 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistory\"\xaa\x01\n\x10\x41\x63tivityResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0e\n\x06taskId\x18\x02 \x01(\x05\x12,\n\x06result\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0f\x63ompletionToken\x18\x05 \x01(\t\"\xbc\x02\n\x0fWorkflowRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\npastEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewEvents\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12 \n\x18requiresHistoryStreaming\x18\x06 \x01(\x08\x12 \n\x06router\x18\x07 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x32\n\x11propagatedHistory\x18\x08 \x01(\x0b\x32\x12.PropagatedHistoryH\x01\x88\x01\x01\x42\t\n\x07_routerB\x14\n\x12_propagatedHistoryJ\x04\x08\x05\x10\x06\"\x82\x02\n\x10WorkflowResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\x07\x61\x63tions\x18\x02 \x03(\x0b\x32\x0f.WorkflowAction\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0f\x63ompletionToken\x18\x04 \x01(\t\x12\x37\n\x12numEventsProcessed\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12&\n\x07version\x18\x06 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\xaf\x03\n\x15\x43reateInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0b\x65xecutionId\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\x08 \x03(\x0b\x32 .CreateInstanceRequest.TagsEntry\x12)\n\x12parentTraceContext\x18\t \x01(\x0b\x32\r.TraceContext\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x06\x10\x07R\x1aorchestrationIdReusePolicy\",\n\x16\x43reateInstanceResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"E\n\x12GetInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x1b\n\x13getInputsAndOutputs\x18\x02 \x01(\x08\"L\n\x13GetInstanceResponse\x12\x0e\n\x06\x65xists\x18\x01 \x01(\x08\x12%\n\rworkflowState\x18\x02 \x01(\x0b\x32\x0e.WorkflowState\"b\n\x11RaiseEventRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x14\n\x12RaiseEventResponse\"g\n\x10TerminateRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06output\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x11\n\trecursive\x18\x03 \x01(\x08\"\x13\n\x11TerminateResponse\"R\n\x0eSuspendRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x11\n\x0fSuspendResponse\"Q\n\rResumeRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x10\n\x0eResumeResponse\"\x9e\x01\n\x15PurgeInstancesRequest\x12\x14\n\ninstanceId\x18\x01 \x01(\tH\x00\x12\x33\n\x13purgeInstanceFilter\x18\x02 \x01(\x0b\x32\x14.PurgeInstanceFilterH\x00\x12\x11\n\trecursive\x18\x03 \x01(\x08\x12\x12\n\x05\x66orce\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\t\n\x07requestB\x08\n\x06_force\"\xaa\x01\n\x13PurgeInstanceFilter\x12\x33\n\x0f\x63reatedTimeFrom\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcreatedTimeTo\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\rruntimeStatus\x18\x03 \x03(\x0e\x32\x14.OrchestrationStatus\"f\n\x16PurgeInstancesResponse\x12\x1c\n\x14\x64\x65letedInstanceCount\x18\x01 \x01(\x05\x12.\n\nisComplete\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\"-\n\x13GetWorkItemsRequestJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\n\x10\x0b\"\x9a\x01\n\x08WorkItem\x12+\n\x0fworkflowRequest\x18\x01 \x01(\x0b\x32\x10.WorkflowRequestH\x00\x12+\n\x0f\x61\x63tivityRequest\x18\x02 \x01(\x0b\x32\x10.ActivityRequestH\x00\x12\x17\n\x0f\x63ompletionToken\x18\n \x01(\tB\t\n\x07requestJ\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"\x16\n\x14\x43ompleteTaskResponse\"\x85\x02\n\x1dRerunWorkflowFromEventRequest\x12\x18\n\x10sourceInstanceID\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventID\x18\x02 \x01(\r\x12\x1a\n\rnewInstanceID\x18\x03 \x01(\tH\x00\x88\x01\x01\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x16\n\x0eoverwriteInput\x18\x05 \x01(\x08\x12\'\n\x1anewChildWorkflowInstanceID\x18\x06 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_newInstanceIDB\x1d\n\x1b_newChildWorkflowInstanceID\"7\n\x1eRerunWorkflowFromEventResponse\x12\x15\n\rnewInstanceID\x18\x01 \x01(\t\"r\n\x16ListInstanceIDsRequest\x12\x1e\n\x11\x63ontinuationToken\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08pageSize\x18\x02 \x01(\rH\x01\x88\x01\x01\x42\x14\n\x12_continuationTokenB\x0b\n\t_pageSize\"d\n\x17ListInstanceIDsResponse\x12\x13\n\x0binstanceIds\x18\x01 \x03(\t\x12\x1e\n\x11\x63ontinuationToken\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x14\n\x12_continuationToken\"/\n\x19GetInstanceHistoryRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\";\n\x1aGetInstanceHistoryResponse\x12\x1d\n\x06\x65vents\x18\x01 \x03(\x0b\x32\r.HistoryEvent*^\n\x10WorkerCapability\x12!\n\x1dWORKER_CAPABILITY_UNSPECIFIED\x10\x00\x12\'\n#WORKER_CAPABILITY_HISTORY_STREAMING\x10\x01\x32\xe8\x08\n\x15TaskHubSidecarService\x12\x37\n\x05Hello\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12@\n\rStartInstance\x12\x16.CreateInstanceRequest\x1a\x17.CreateInstanceResponse\x12\x38\n\x0bGetInstance\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x41\n\x14WaitForInstanceStart\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x46\n\x19WaitForInstanceCompletion\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x35\n\nRaiseEvent\x12\x12.RaiseEventRequest\x1a\x13.RaiseEventResponse\x12:\n\x11TerminateInstance\x12\x11.TerminateRequest\x1a\x12.TerminateResponse\x12\x34\n\x0fSuspendInstance\x12\x0f.SuspendRequest\x1a\x10.SuspendResponse\x12\x31\n\x0eResumeInstance\x12\x0e.ResumeRequest\x1a\x0f.ResumeResponse\x12\x41\n\x0ePurgeInstances\x12\x16.PurgeInstancesRequest\x1a\x17.PurgeInstancesResponse\x12\x31\n\x0cGetWorkItems\x12\x14.GetWorkItemsRequest\x1a\t.WorkItem0\x01\x12@\n\x14\x43ompleteActivityTask\x12\x11.ActivityResponse\x1a\x15.CompleteTaskResponse\x12I\n\x18\x43ompleteOrchestratorTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\"\x03\x88\x02\x01\x12@\n\x14\x43ompleteWorkflowTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\x12Y\n\x16RerunWorkflowFromEvent\x12\x1e.RerunWorkflowFromEventRequest\x1a\x1f.RerunWorkflowFromEventResponse\x12\x44\n\x0fListInstanceIDs\x12\x17.ListInstanceIDsRequest\x1a\x18.ListInstanceIDsResponse\x12M\n\x12GetInstanceHistory\x12\x1a.GetInstanceHistoryRequest\x1a\x1b.GetInstanceHistoryResponseBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -42,66 +42,66 @@ _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_options = b'8\001' _globals['_TASKHUBSIDECARSERVICE'].methods_by_name['CompleteOrchestratorTask']._loaded_options = None _globals['_TASKHUBSIDECARSERVICE'].methods_by_name['CompleteOrchestratorTask']._serialized_options = b'\210\002\001' - _globals['_WORKERCAPABILITY']._serialized_start=3525 - _globals['_WORKERCAPABILITY']._serialized_end=3619 + _globals['_WORKERCAPABILITY']._serialized_start=3673 + _globals['_WORKERCAPABILITY']._serialized_end=3767 _globals['_ACTIVITYREQUEST']._serialized_start=196 - _globals['_ACTIVITYREQUEST']._serialized_end=448 - _globals['_ACTIVITYRESPONSE']._serialized_start=451 - _globals['_ACTIVITYRESPONSE']._serialized_end=621 - _globals['_WORKFLOWREQUEST']._serialized_start=624 - _globals['_WORKFLOWREQUEST']._serialized_end=866 - _globals['_WORKFLOWRESPONSE']._serialized_start=869 - _globals['_WORKFLOWRESPONSE']._serialized_end=1127 - _globals['_CREATEINSTANCEREQUEST']._serialized_start=1130 - _globals['_CREATEINSTANCEREQUEST']._serialized_end=1561 - _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_start=1484 - _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_end=1527 - _globals['_CREATEINSTANCERESPONSE']._serialized_start=1563 - _globals['_CREATEINSTANCERESPONSE']._serialized_end=1607 - _globals['_GETINSTANCEREQUEST']._serialized_start=1609 - _globals['_GETINSTANCEREQUEST']._serialized_end=1678 - _globals['_GETINSTANCERESPONSE']._serialized_start=1680 - _globals['_GETINSTANCERESPONSE']._serialized_end=1756 - _globals['_RAISEEVENTREQUEST']._serialized_start=1758 - _globals['_RAISEEVENTREQUEST']._serialized_end=1856 - _globals['_RAISEEVENTRESPONSE']._serialized_start=1858 - _globals['_RAISEEVENTRESPONSE']._serialized_end=1878 - _globals['_TERMINATEREQUEST']._serialized_start=1880 - _globals['_TERMINATEREQUEST']._serialized_end=1983 - _globals['_TERMINATERESPONSE']._serialized_start=1985 - _globals['_TERMINATERESPONSE']._serialized_end=2004 - _globals['_SUSPENDREQUEST']._serialized_start=2006 - _globals['_SUSPENDREQUEST']._serialized_end=2088 - _globals['_SUSPENDRESPONSE']._serialized_start=2090 - _globals['_SUSPENDRESPONSE']._serialized_end=2107 - _globals['_RESUMEREQUEST']._serialized_start=2109 - _globals['_RESUMEREQUEST']._serialized_end=2190 - _globals['_RESUMERESPONSE']._serialized_start=2192 - _globals['_RESUMERESPONSE']._serialized_end=2208 - _globals['_PURGEINSTANCESREQUEST']._serialized_start=2211 - _globals['_PURGEINSTANCESREQUEST']._serialized_end=2369 - _globals['_PURGEINSTANCEFILTER']._serialized_start=2372 - _globals['_PURGEINSTANCEFILTER']._serialized_end=2542 - _globals['_PURGEINSTANCESRESPONSE']._serialized_start=2544 - _globals['_PURGEINSTANCESRESPONSE']._serialized_end=2646 - _globals['_GETWORKITEMSREQUEST']._serialized_start=2648 - _globals['_GETWORKITEMSREQUEST']._serialized_end=2693 - _globals['_WORKITEM']._serialized_start=2696 - _globals['_WORKITEM']._serialized_end=2850 - _globals['_COMPLETETASKRESPONSE']._serialized_start=2852 - _globals['_COMPLETETASKRESPONSE']._serialized_end=2874 - _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_start=2877 - _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_end=3138 - _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_start=3140 - _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_end=3195 - _globals['_LISTINSTANCEIDSREQUEST']._serialized_start=3197 - _globals['_LISTINSTANCEIDSREQUEST']._serialized_end=3311 - _globals['_LISTINSTANCEIDSRESPONSE']._serialized_start=3313 - _globals['_LISTINSTANCEIDSRESPONSE']._serialized_end=3413 - _globals['_GETINSTANCEHISTORYREQUEST']._serialized_start=3415 - _globals['_GETINSTANCEHISTORYREQUEST']._serialized_end=3462 - _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_start=3464 - _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_end=3523 - _globals['_TASKHUBSIDECARSERVICE']._serialized_start=3622 - _globals['_TASKHUBSIDECARSERVICE']._serialized_end=4750 + _globals['_ACTIVITYREQUEST']._serialized_end=522 + _globals['_ACTIVITYRESPONSE']._serialized_start=525 + _globals['_ACTIVITYRESPONSE']._serialized_end=695 + _globals['_WORKFLOWREQUEST']._serialized_start=698 + _globals['_WORKFLOWREQUEST']._serialized_end=1014 + _globals['_WORKFLOWRESPONSE']._serialized_start=1017 + _globals['_WORKFLOWRESPONSE']._serialized_end=1275 + _globals['_CREATEINSTANCEREQUEST']._serialized_start=1278 + _globals['_CREATEINSTANCEREQUEST']._serialized_end=1709 + _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_start=1632 + _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_end=1675 + _globals['_CREATEINSTANCERESPONSE']._serialized_start=1711 + _globals['_CREATEINSTANCERESPONSE']._serialized_end=1755 + _globals['_GETINSTANCEREQUEST']._serialized_start=1757 + _globals['_GETINSTANCEREQUEST']._serialized_end=1826 + _globals['_GETINSTANCERESPONSE']._serialized_start=1828 + _globals['_GETINSTANCERESPONSE']._serialized_end=1904 + _globals['_RAISEEVENTREQUEST']._serialized_start=1906 + _globals['_RAISEEVENTREQUEST']._serialized_end=2004 + _globals['_RAISEEVENTRESPONSE']._serialized_start=2006 + _globals['_RAISEEVENTRESPONSE']._serialized_end=2026 + _globals['_TERMINATEREQUEST']._serialized_start=2028 + _globals['_TERMINATEREQUEST']._serialized_end=2131 + _globals['_TERMINATERESPONSE']._serialized_start=2133 + _globals['_TERMINATERESPONSE']._serialized_end=2152 + _globals['_SUSPENDREQUEST']._serialized_start=2154 + _globals['_SUSPENDREQUEST']._serialized_end=2236 + _globals['_SUSPENDRESPONSE']._serialized_start=2238 + _globals['_SUSPENDRESPONSE']._serialized_end=2255 + _globals['_RESUMEREQUEST']._serialized_start=2257 + _globals['_RESUMEREQUEST']._serialized_end=2338 + _globals['_RESUMERESPONSE']._serialized_start=2340 + _globals['_RESUMERESPONSE']._serialized_end=2356 + _globals['_PURGEINSTANCESREQUEST']._serialized_start=2359 + _globals['_PURGEINSTANCESREQUEST']._serialized_end=2517 + _globals['_PURGEINSTANCEFILTER']._serialized_start=2520 + _globals['_PURGEINSTANCEFILTER']._serialized_end=2690 + _globals['_PURGEINSTANCESRESPONSE']._serialized_start=2692 + _globals['_PURGEINSTANCESRESPONSE']._serialized_end=2794 + _globals['_GETWORKITEMSREQUEST']._serialized_start=2796 + _globals['_GETWORKITEMSREQUEST']._serialized_end=2841 + _globals['_WORKITEM']._serialized_start=2844 + _globals['_WORKITEM']._serialized_end=2998 + _globals['_COMPLETETASKRESPONSE']._serialized_start=3000 + _globals['_COMPLETETASKRESPONSE']._serialized_end=3022 + _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_start=3025 + _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_end=3286 + _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_start=3288 + _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_end=3343 + _globals['_LISTINSTANCEIDSREQUEST']._serialized_start=3345 + _globals['_LISTINSTANCEIDSREQUEST']._serialized_end=3459 + _globals['_LISTINSTANCEIDSRESPONSE']._serialized_start=3461 + _globals['_LISTINSTANCEIDSRESPONSE']._serialized_end=3561 + _globals['_GETINSTANCEHISTORYREQUEST']._serialized_start=3563 + _globals['_GETINSTANCEHISTORYREQUEST']._serialized_end=3610 + _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_start=3612 + _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_end=3671 + _globals['_TASKHUBSIDECARSERVICE']._serialized_start=3770 + _globals['_TASKHUBSIDECARSERVICE']._serialized_end=4898 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi index 8786348eb..a75bb3d89 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi @@ -19,10 +19,10 @@ from dapr.ext.workflow._durabletask.internal import orchestrator_actions_pb2 as import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @@ -62,6 +62,7 @@ class ActivityRequest(_message.Message): TASKID_FIELD_NUMBER: _builtins.int PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int name: _builtins.str taskId: _builtins.int taskExecutionId: _builtins.str @@ -73,6 +74,13 @@ class ActivityRequest(_message.Message): def workflowInstance(self) -> _orchestration_pb2.WorkflowInstance: ... @_builtins.property def parentTraceContext(self) -> _orchestration_pb2.TraceContext: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from the calling workflow. + Delivered via the work item stream to the SDK, so that the + activity function can access it via ctx. + """ + def __init__( self, *, @@ -83,11 +91,15 @@ class ActivityRequest(_message.Message): taskId: _builtins.int = ..., parentTraceContext: _orchestration_pb2.TraceContext | None = ..., taskExecutionId: _builtins.str = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "parentTraceContext", b"parentTraceContext", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "input", b"input", "parentTraceContext", b"parentTraceContext", "propagatedHistory", b"propagatedHistory", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "taskExecutionId", b"taskExecutionId", "taskId", b"taskId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "propagatedHistory", b"propagatedHistory", "taskExecutionId", b"taskExecutionId", "taskId", b"taskId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... Global___ActivityRequest: _TypeAlias = ActivityRequest # noqa: Y015 @@ -120,6 +132,7 @@ class ActivityResponse(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completionToken", b"completionToken", "failureDetails", b"failureDetails", "instanceId", b"instanceId", "result", b"result", "taskId", b"taskId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ActivityResponse: _TypeAlias = ActivityResponse # noqa: Y015 @@ -133,6 +146,7 @@ class WorkflowRequest(_message.Message): NEWEVENTS_FIELD_NUMBER: _builtins.int REQUIRESHISTORYSTREAMING_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int instanceId: _builtins.str requiresHistoryStreaming: _builtins.bool @_builtins.property @@ -143,6 +157,13 @@ class WorkflowRequest(_message.Message): def newEvents(self) -> _containers.RepeatedCompositeFieldContainer[_history_events_pb2.HistoryEvent]: ... @_builtins.property def router(self) -> _orchestration_pb2.TaskRouter: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history from a parent workflow. + Delivered via the work item stream to the SDK, so that the + workflow function can access it via ctx. + """ + def __init__( self, *, @@ -152,13 +173,19 @@ class WorkflowRequest(_message.Message): newEvents: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., requiresHistoryStreaming: _builtins.bool = ..., router: _orchestration_pb2.TaskRouter | None = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "executionId", b"executionId", "router", b"router"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "_router", b"_router", "executionId", b"executionId", "propagatedHistory", b"propagatedHistory", "router", b"router"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "executionId", b"executionId", "instanceId", b"instanceId", "newEvents", b"newEvents", "pastEvents", b"pastEvents", "requiresHistoryStreaming", b"requiresHistoryStreaming", "router", b"router"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "_router", b"_router", "executionId", b"executionId", "instanceId", b"instanceId", "newEvents", b"newEvents", "pastEvents", b"pastEvents", "propagatedHistory", b"propagatedHistory", "requiresHistoryStreaming", b"requiresHistoryStreaming", "router", b"router"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 + @_typing.overload + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... + @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... Global___WorkflowRequest: _TypeAlias = WorkflowRequest # noqa: Y015 @@ -225,8 +252,11 @@ class CreateInstanceRequest(_message.Message): key: _builtins.str = ..., value: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["key", b"key", "value", b"value"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... INSTANCEID_FIELD_NUMBER: _builtins.int NAME_FIELD_NUMBER: _builtins.int @@ -266,6 +296,7 @@ class CreateInstanceRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["executionId", b"executionId", "input", b"input", "instanceId", b"instanceId", "name", b"name", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CreateInstanceRequest: _TypeAlias = CreateInstanceRequest # noqa: Y015 @@ -280,8 +311,11 @@ class CreateInstanceResponse(_message.Message): *, instanceId: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CreateInstanceResponse: _TypeAlias = CreateInstanceResponse # noqa: Y015 @@ -299,8 +333,11 @@ class GetInstanceRequest(_message.Message): instanceId: _builtins.str = ..., getInputsAndOutputs: _builtins.bool = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["getInputsAndOutputs", b"getInputsAndOutputs", "instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceRequest: _TypeAlias = GetInstanceRequest # noqa: Y015 @@ -323,6 +360,7 @@ class GetInstanceResponse(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["exists", b"exists", "workflowState", b"workflowState"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceResponse: _TypeAlias = GetInstanceResponse # noqa: Y015 @@ -348,6 +386,7 @@ class RaiseEventRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "instanceId", b"instanceId", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RaiseEventRequest: _TypeAlias = RaiseEventRequest # noqa: Y015 @@ -360,6 +399,11 @@ class RaiseEventResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RaiseEventResponse: _TypeAlias = RaiseEventResponse # noqa: Y015 @@ -385,6 +429,7 @@ class TerminateRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "output", b"output", "recursive", b"recursive"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TerminateRequest: _TypeAlias = TerminateRequest # noqa: Y015 @@ -397,6 +442,11 @@ class TerminateResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TerminateResponse: _TypeAlias = TerminateResponse # noqa: Y015 @@ -419,6 +469,7 @@ class SuspendRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "reason", b"reason"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___SuspendRequest: _TypeAlias = SuspendRequest # noqa: Y015 @@ -431,6 +482,11 @@ class SuspendResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___SuspendResponse: _TypeAlias = SuspendResponse # noqa: Y015 @@ -453,6 +509,7 @@ class ResumeRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "reason", b"reason"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ResumeRequest: _TypeAlias = ResumeRequest # noqa: Y015 @@ -465,6 +522,11 @@ class ResumeResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ResumeResponse: _TypeAlias = ResumeResponse # noqa: Y015 @@ -538,6 +600,7 @@ class PurgeInstanceFilter(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["createdTimeFrom", b"createdTimeFrom", "createdTimeTo", b"createdTimeTo", "runtimeStatus", b"runtimeStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___PurgeInstanceFilter: _TypeAlias = PurgeInstanceFilter # noqa: Y015 @@ -560,6 +623,7 @@ class PurgeInstancesResponse(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["deletedInstanceCount", b"deletedInstanceCount", "isComplete", b"isComplete"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___PurgeInstancesResponse: _TypeAlias = PurgeInstancesResponse # noqa: Y015 @@ -570,6 +634,11 @@ class GetWorkItemsRequest(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetWorkItemsRequest: _TypeAlias = GetWorkItemsRequest # noqa: Y015 @@ -611,6 +680,11 @@ class CompleteTaskResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteTaskResponse: _TypeAlias = CompleteTaskResponse # noqa: Y015 @@ -695,8 +769,11 @@ class RerunWorkflowFromEventResponse(_message.Message): *, newInstanceID: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["newInstanceID", b"newInstanceID"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RerunWorkflowFromEventResponse: _TypeAlias = RerunWorkflowFromEventResponse # noqa: Y015 @@ -785,8 +862,11 @@ class GetInstanceHistoryRequest(_message.Message): *, instanceId: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceHistoryRequest: _TypeAlias = GetInstanceHistoryRequest # noqa: Y015 @@ -804,7 +884,10 @@ class GetInstanceHistoryResponse(_message.Message): *, events: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["events", b"events"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceHistoryResponse: _TypeAlias = GetInstanceHistoryResponse # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py index 85712b33c..39de6a5cf 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py @@ -6,7 +6,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from dapr.ext.workflow._durabletask.internal import orchestrator_service_pb2 as orchestrator__service__pb2 -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py index 4e1561f4e..645e902d6 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13runtime_state.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"_\n\x13RuntimeStateStalled\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xbc\x05\n\x14WorkflowRuntimeState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\tnewEvents\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12 \n\toldEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12#\n\x0cpendingTasks\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12$\n\rpendingTimers\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12S\n\x0fpendingMessages\x18\x06 \x03(\x0b\x32:.durabletask.protos.backend.v1.WorkflowRuntimeStateMessage\x12*\n\nstartEvent\x18\x07 \x01(\x0b\x32\x16.ExecutionStartedEvent\x12\x30\n\x0e\x63ompletedEvent\x18\x08 \x01(\x0b\x32\x18.ExecutionCompletedEvent\x12/\n\x0b\x63reatedTime\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flastUpdatedTime\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcompletedTime\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x16\n\x0e\x63ontinuedAsNew\x18\x0c \x01(\x08\x12\x13\n\x0bisSuspended\x18\r \x01(\x08\x12\x32\n\x0c\x63ustomStatus\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12H\n\x07stalled\x18\x0f \x01(\x0b\x32\x32.durabletask.protos.backend.v1.RuntimeStateStalledH\x00\x88\x01\x01\x42\n\n\x08_stalled\"\\\n\x1bWorkflowRuntimeStateMessage\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x18\n\x10targetInstanceId\x18\x02 \x01(\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13runtime_state.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"_\n\x13RuntimeStateStalled\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xbc\x05\n\x14WorkflowRuntimeState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\tnewEvents\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12 \n\toldEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12#\n\x0cpendingTasks\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12$\n\rpendingTimers\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12S\n\x0fpendingMessages\x18\x06 \x03(\x0b\x32:.durabletask.protos.backend.v1.WorkflowRuntimeStateMessage\x12*\n\nstartEvent\x18\x07 \x01(\x0b\x32\x16.ExecutionStartedEvent\x12\x30\n\x0e\x63ompletedEvent\x18\x08 \x01(\x0b\x32\x18.ExecutionCompletedEvent\x12/\n\x0b\x63reatedTime\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flastUpdatedTime\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcompletedTime\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x16\n\x0e\x63ontinuedAsNew\x18\x0c \x01(\x08\x12\x13\n\x0bisSuspended\x18\r \x01(\x08\x12\x32\n\x0c\x63ustomStatus\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12H\n\x07stalled\x18\x0f \x01(\x0b\x32\x32.durabletask.protos.backend.v1.RuntimeStateStalledH\x00\x88\x01\x01\x42\n\n\x08_stalled\"\xa6\x01\n\x1bWorkflowRuntimeStateMessage\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x18\n\x10targetInstanceId\x18\x02 \x01(\t\x12\x32\n\x11propagatedHistory\x18\x03 \x01(\x0b\x32\x12.PropagatedHistoryH\x00\x88\x01\x01\x42\x14\n\x12_propagatedHistoryBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -40,6 +40,6 @@ _globals['_RUNTIMESTATESTALLED']._serialized_end=257 _globals['_WORKFLOWRUNTIMESTATE']._serialized_start=260 _globals['_WORKFLOWRUNTIMESTATE']._serialized_end=960 - _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_start=962 - _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_end=1054 + _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_start=963 + _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_end=1129 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi index fd0b75f80..6f41ceee6 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi @@ -142,18 +142,31 @@ class WorkflowRuntimeStateMessage(_message.Message): HISTORYEVENT_FIELD_NUMBER: _builtins.int TARGETINSTANCEID_FIELD_NUMBER: _builtins.int + PROPAGATEDHISTORY_FIELD_NUMBER: _builtins.int targetInstanceId: _builtins.str @_builtins.property def historyEvent(self) -> _history_events_pb2.HistoryEvent: ... + @_builtins.property + def propagatedHistory(self) -> _history_events_pb2.PropagatedHistory: + """Propagated history to deliver to the child workflow. + This is a transport field used when creating child workflows with + history propagation enabled. It is NOT stored as part of any + workflow's history events. + """ + def __init__( self, *, historyEvent: _history_events_pb2.HistoryEvent | None = ..., targetInstanceId: _builtins.str = ..., + propagatedHistory: _history_events_pb2.PropagatedHistory | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["historyEvent", b"historyEvent"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["historyEvent", b"historyEvent", "targetInstanceId", b"targetInstanceId"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory", "historyEvent", b"historyEvent", "propagatedHistory", b"propagatedHistory", "targetInstanceId", b"targetInstanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__propagatedHistory: _TypeAlias = _typing.Literal["propagatedHistory"] # noqa: Y015 + _WhichOneofArgType__propagatedHistory: _TypeAlias = _typing.Literal["_propagatedHistory", b"_propagatedHistory"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__propagatedHistory) -> _WhichOneofReturnType__propagatedHistory | None: ... Global___WorkflowRuntimeStateMessage: _TypeAlias = WorkflowRuntimeStateMessage # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py index bb1e985eb..56526119e 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py index ceb8672be..3e522cad9 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py @@ -14,7 +14,13 @@ """ from .dapr_workflow_client import DaprWorkflowClient +from .mcp import DaprMCPClient + +# Re-export MCPToolDef so async users don't need to import from the sync module. +from dapr.ext.workflow.mcp import MCPToolDef __all__ = [ 'DaprWorkflowClient', + 'DaprMCPClient', + 'MCPToolDef', ] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/mcp.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/mcp.py new file mode 100644 index 000000000..5afa0ffa2 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/mcp.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- + +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +"""Async variant of :class:`~dapr.ext.workflow.mcp.DaprMCPClient`.""" + +import logging +import uuid +from typing import Optional, Set + +from dapr.ext.workflow.aio.dapr_workflow_client import DaprWorkflowClient +from dapr.ext.workflow.mcp import MCP_WORKFLOW_PREFIX, _DaprMCPClientBase, _MCP_METHOD_LIST_TOOLS +from dapr.ext.workflow.workflow_state import WorkflowStatus + +logger = logging.getLogger(__name__) + + +class DaprMCPClient(_DaprMCPClientBase): + """Async framework-agnostic client for discovering MCP tools via Dapr workflows. + + This is the async counterpart of :class:`dapr.ext.workflow.mcp.DaprMCPClient`. + All methods that interact with the Dapr sidecar are ``async``. + + Args: + timeout_in_seconds: Maximum seconds to wait for each ``ListTools`` + workflow to complete. + allowed_tools: Optional set of tool names to keep. + wf_client: Optional pre-configured async :class:`DaprWorkflowClient`. + + Example:: + + from dapr.ext.workflow.aio import DaprMCPClient + + client = DaprMCPClient() + await client.connect("weather") + tools = client.get_all_tools() + """ + + def __init__( + self, + *, + timeout_in_seconds: int = 60, + allowed_tools: Optional[Set[str]] = None, + wf_client: Optional[DaprWorkflowClient] = None, + ) -> None: + super().__init__( + timeout_in_seconds=timeout_in_seconds, + allowed_tools=allowed_tools, + ) + self._wf_client = wf_client or DaprWorkflowClient() + + async def connect(self, mcpserver_name: str) -> None: + """Discover tools from a Dapr MCPServer resource. + + Schedules ``dapr.internal.mcp..ListTools``, awaits workflow + completion, and caches the resulting :class:`MCPToolDef` list. + + Args: + mcpserver_name: Name of the ``MCPServer`` Dapr resource (must + match the ``metadata.name`` in the MCPServer YAML). + + Raises: + RuntimeError: If the workflow times out or ends with a non-COMPLETED + status. + ValueError: If *mcpserver_name* is empty. + """ + if not mcpserver_name or not mcpserver_name.strip(): + raise ValueError("mcpserver_name must be a non-empty string") + + instance_id = str(uuid.uuid4()) + # TODO(@sicoyle): reminder to add a func like I have in durabletask-go to use for here instead of building like this! + workflow_name = f"{MCP_WORKFLOW_PREFIX}{mcpserver_name}{_MCP_METHOD_LIST_TOOLS}" + + logger.debug( + "Scheduling %s (instance=%s)", workflow_name, instance_id + ) + + await self._wf_client.schedule_new_workflow( + workflow=workflow_name, + input={"mcpServerName": mcpserver_name}, + instance_id=instance_id, + ) + + state = await self._wf_client.wait_for_workflow_completion( + instance_id=instance_id, + timeout_in_seconds=self._timeout, + fetch_payloads=True, + ) + + if state is None: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"timed out after {self._timeout}s" + ) + + if state.runtime_status != WorkflowStatus.COMPLETED: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"ended with status {state.runtime_status.name!r}: " + f"{state.serialized_output or ''}" + ) + + self._process_list_tools_result(mcpserver_name, state.serialized_output) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/mcp.py b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp.py new file mode 100644 index 000000000..ce1d038fe --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- + +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +""" +DaprMCPClient — framework-agnostic client for discovering and cataloguing +MCP tools exposed by Dapr MCPServer resources. + +The client schedules Dapr's built-in workflow orchestrations +(``dapr.internal.mcp..ListTools`` / ``CallTool``) and returns +plain :class:`MCPToolDef` dataclasses that any agent framework can consume. + +Usage:: + + from dapr.ext.workflow import DaprMCPClient + + client = DaprMCPClient() + client.connect("weather") + for tool in client.get_all_tools(): + print(tool.name, tool.description) +""" + +import json +import logging +import uuid +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Set + +from dapr.ext.workflow.dapr_workflow_client import DaprWorkflowClient +from dapr.ext.workflow.workflow_state import WorkflowStatus + +logger = logging.getLogger(__name__) + +# MCP workflow name constants — mirrors the proto enums in +# dapr/dapr/dapr/proto/workflows/v1/mcp.proto as plain strings. +MCP_WORKFLOW_PREFIX: str = "dapr.internal.mcp." +"""Prefix for all built-in MCP workflow orchestrations.""" + +_MCP_METHOD_LIST_TOOLS = ".ListTools" +_MCP_METHOD_CALL_TOOL = ".CallTool" + + +# TODO(@sicoyle): see if I can use the mcp pkg class instead for this? +@dataclass(frozen=True) +class MCPToolDef: + """Framework-agnostic description of a single MCP tool. + + Returned by :meth:`DaprMCPClient.get_all_tools` and consumed by + agent frameworks to build their own tool wrappers. + + Attributes: + name: The MCP tool name as returned by the server (e.g. ``get_weather``). + description: Human-readable description of what the tool does. + input_schema: JSON Schema dict describing the tool's input parameters. + server_name: Name of the Dapr ``MCPServer`` resource that hosts this tool. + call_tool_workflow: Pre-computed workflow name for invoking this tool + (e.g. ``dapr.internal.mcp.weather.CallTool.get_weather``). + """ + + name: str + description: str + input_schema: Dict[str, Any] = field(default_factory=dict) + server_name: str = "" + call_tool_workflow: str = "" + + +class _DaprMCPClientBase: + """Shared state and getters for sync/async MCP clients.""" + + def __init__( + self, + *, + timeout_in_seconds: int = 60, + allowed_tools: Optional[Set[str]] = None, + ) -> None: + if timeout_in_seconds <= 0: + raise ValueError("timeout_in_seconds must be a positive integer") + self._timeout = timeout_in_seconds + self._allowed_tools = allowed_tools + self._server_tools: Dict[str, List[MCPToolDef]] = {} + + def _process_list_tools_result( + self, mcpserver_name: str, serialized_output: Optional[str] + ) -> None: + """Parse a ListTools workflow output and cache the MCPToolDef list.""" + try: + result = json.loads(serialized_output) if serialized_output else {} + except json.JSONDecodeError as exc: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' returned " + f"malformed JSON: {exc}" + ) from exc + + tools: List[MCPToolDef] = [] + for tool_def in result.get("tools", []): + name = tool_def.get("name", "") + if self._allowed_tools is not None and name not in self._allowed_tools: + logger.debug("Skipping tool '%s' (not in allowed_tools)", name) + continue + # Workflow name includes the tool name for per-tool observability: + # dapr.internal.mcp..CallTool. + call_tool_wf = f"{MCP_WORKFLOW_PREFIX}{mcpserver_name}{_MCP_METHOD_CALL_TOOL}.{name}" + tools.append( + MCPToolDef( + name=name, + description=tool_def.get("description", ""), + input_schema=tool_def.get("inputSchema") or {}, + server_name=mcpserver_name, + call_tool_workflow=call_tool_wf, + ) + ) + + self._server_tools[mcpserver_name] = tools + logger.info( + "Connected to MCPServer '%s': %d tool(s) loaded", + mcpserver_name, + len(tools), + ) + + def get_all_tools(self) -> List[MCPToolDef]: + """Return all cached tools from every connected MCPServer.""" + return [t for tools in self._server_tools.values() for t in tools] + + def get_server_tools(self, server_name: str) -> List[MCPToolDef]: + """Return cached tools for a specific MCPServer.""" + return list(self._server_tools.get(server_name, [])) + + def get_connected_servers(self) -> List[str]: + """Return the names of all MCPServers connected so far.""" + return list(self._server_tools.keys()) + + +class DaprMCPClient(_DaprMCPClientBase): + """Framework-agnostic client for discovering MCP tools via Dapr workflows. + + This client schedules Dapr's built-in workflow orchestrations + (``ListTools`` / ``CallTool``) via :class:`DaprWorkflowClient`. + It returns :class:`MCPToolDef` dataclasses — plain data objects + with no framework dependencies — that any agent framework can convert + to its own tool type. + + Args: + timeout_in_seconds: Maximum seconds to wait for each ``ListTools`` + workflow to complete. Defaults to 60. + allowed_tools: Optional set of tool names to keep. When provided, + only tools whose name appears in this set are included in the + catalogue. ``None`` (default) keeps all tools. + wf_client: Optional pre-configured :class:`DaprWorkflowClient`. + If omitted, a new client is created with default settings. + + Example:: + + client = DaprMCPClient() + client.connect("weather") + tools = client.get_all_tools() # List[MCPToolDef] + + # Each framework converts MCPToolDef to its own tool type: + for t in tools: + print(f"{t.name}: {t.call_tool_workflow}") + """ + + def __init__( + self, + *, + timeout_in_seconds: int = 60, + allowed_tools: Optional[Set[str]] = None, + wf_client: Optional[DaprWorkflowClient] = None, + ) -> None: + super().__init__( + timeout_in_seconds=timeout_in_seconds, + allowed_tools=allowed_tools, + ) + self._wf_client = wf_client or DaprWorkflowClient() + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + + def connect(self, mcpserver_name: str) -> None: + """Discover tools from a Dapr MCPServer resource. + + Schedules ``dapr.internal.mcp..ListTools``, blocks until the + workflow completes, and caches the resulting :class:`MCPToolDef` list. + + Args: + mcpserver_name: Name of the ``MCPServer`` Dapr resource (must + match the ``metadata.name`` in the MCPServer YAML). + + Raises: + RuntimeError: If the workflow times out or ends with a non-COMPLETED + status. + """ + if not mcpserver_name or not mcpserver_name.strip(): + raise ValueError("mcpserver_name must be a non-empty string") + + instance_id = str(uuid.uuid4()) + # TODO(@sicoyle): reminder to add a func like I have in durabletask-go to use for here instead of building like this! + workflow_name = f"{MCP_WORKFLOW_PREFIX}{mcpserver_name}{_MCP_METHOD_LIST_TOOLS}" + + logger.debug( + "Scheduling %s (instance=%s)", workflow_name, instance_id + ) + + self._wf_client.schedule_new_workflow( + workflow=workflow_name, + input={"mcpServerName": mcpserver_name}, + instance_id=instance_id, + ) + + state = self._wf_client.wait_for_workflow_completion( + instance_id=instance_id, + timeout_in_seconds=self._timeout, + fetch_payloads=True, + ) + + if state is None: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"timed out after {self._timeout}s" + ) + + if state.runtime_status != WorkflowStatus.COMPLETED: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"ended with status {state.runtime_status.name!r}: " + f"{state.serialized_output or ''}" + ) + + self._process_list_tools_result(mcpserver_name, state.serialized_output) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/mcp_schema.py b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp_schema.py new file mode 100644 index 000000000..41eabafd5 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp_schema.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- + +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +"""Utility for converting MCP JSON Schema definitions to Pydantic models.""" + +import logging +from typing import Any, Dict, List, Optional, Type + +from pydantic import BaseModel, Field, create_model + +logger = logging.getLogger(__name__) + +# Mapping from JSON Schema types to Python types. +TYPE_MAPPING = { + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "object": dict, + "array": list, + "null": type(None), +} + +# TODO(@sicoyle): see if I can remove this and use something from official modelcontextprotocol python-sdk instead??? +def create_pydantic_model_from_schema( + schema: Dict[str, Any], model_name: str +) -> Type[BaseModel]: + """Create a Pydantic model from a JSON Schema definition. + + This function converts a JSON Schema object (commonly used in MCP tool + definitions) to a Pydantic model that can be used for argument validation. + + Args: + schema: JSON Schema dictionary containing type information. + model_name: Name for the generated model class. + + Returns: + A dynamically created Pydantic model class. + + Raises: + ValueError: If the schema is invalid or cannot be converted. + """ + logger.debug("Creating Pydantic model '%s' from schema", model_name) + + try: + properties = schema.get("properties", {}) + required = set(schema.get("required", [])) + + # Handle schemas that wrap arguments in a 'kwargs' field. + # Some MCP tools use this pattern — unwrap to accept flat arguments. + if ( + len(properties) == 1 + and "kwargs" in properties + and properties["kwargs"].get("type") == "object" + and "properties" in properties["kwargs"] + ): + logger.debug( + "Detected 'kwargs' wrapper in schema for '%s', unwrapping", model_name + ) + kwargs_schema = properties["kwargs"] + properties = kwargs_schema["properties"] + required = set(kwargs_schema.get("required", [])) + + fields: Dict[str, Any] = {} + + for field_name, field_props in properties.items(): + # Handle anyOf/oneOf for nullable/union fields. + if "anyOf" in field_props or "oneOf" in field_props: + variants = field_props.get("anyOf") or field_props.get("oneOf") + types = [v.get("type", "string") for v in variants] + has_null = "null" in types + non_null_variants = [v for v in variants if v.get("type") != "null"] + if non_null_variants: + primary_type = non_null_variants[0].get("type", "string") + field_type = TYPE_MAPPING.get(primary_type, str) + if primary_type == "array" and "items" in non_null_variants[0]: + item_type = non_null_variants[0]["items"].get("type", "string") + field_type = List[TYPE_MAPPING.get(item_type, str)] + elif primary_type == "object": + field_type = dict + else: + field_type = str + if has_null: + field_type = Optional[field_type] + else: + json_type = field_props.get("type", "string") + field_type = TYPE_MAPPING.get(json_type, str) + if json_type == "array" and "items" in field_props: + item_type = field_props["items"].get("type", "string") + field_type = List[TYPE_MAPPING.get(item_type, str)] + + if field_name in required: + default = ... + else: + default = None + if not ( + hasattr(field_type, "__origin__") + and field_type.__origin__ is Optional + ): + field_type = Optional[field_type] + + field_description = field_props.get("description", "") + fields[field_name] = ( + field_type, + Field(default, description=field_description), + ) + + return create_model(model_name, **fields) + + except Exception as e: + logger.error("Failed to create model from schema: %s", e) + raise ValueError(f"Invalid schema: {e}") from e diff --git a/ext/dapr-ext-workflow/tests/test_mcp_client.py b/ext/dapr-ext-workflow/tests/test_mcp_client.py new file mode 100644 index 000000000..a0dca3d00 --- /dev/null +++ b/ext/dapr-ext-workflow/tests/test_mcp_client.py @@ -0,0 +1,361 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import json +import unittest +from datetime import datetime +from unittest import mock +from unittest.mock import MagicMock + +from dapr.ext.workflow._durabletask import client +from dapr.ext.workflow.mcp import DaprMCPClient, MCPToolDef, MCP_WORKFLOW_PREFIX +from dapr.ext.workflow.workflow_state import WorkflowState, WorkflowStatus + + +def _make_completed_state(output_json: dict) -> WorkflowState: + """Create a WorkflowState that simulates a COMPLETED workflow.""" + inner = client.WorkflowState( + instance_id="test-id", + name="test-workflow", + runtime_status=client.OrchestrationStatus.COMPLETED, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output=json.dumps(output_json), + serialized_custom_status=None, + failure_details=None, + ) + return WorkflowState(inner) + + +def _make_failed_state() -> WorkflowState: + """Create a WorkflowState that simulates a FAILED workflow.""" + inner = client.WorkflowState( + instance_id="test-id", + name="test-workflow", + runtime_status=client.OrchestrationStatus.FAILED, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output="error details", + serialized_custom_status=None, + failure_details=None, + ) + return WorkflowState(inner) + + +SAMPLE_LIST_TOOLS_RESPONSE = { + "tools": [ + { + "name": "get_weather", + "description": "Get current weather for a location.", + "inputSchema": { + "type": "object", + "properties": { + "location": {"type": "string", "description": "City name"}, + }, + "required": ["location"], + }, + }, + { + "name": "get_forecast", + "description": "Get multi-day forecast.", + "inputSchema": { + "type": "object", + "properties": { + "location": {"type": "string"}, + "days": {"type": "integer"}, + }, + "required": ["location"], + }, + }, + ] +} + + +class TestMCPToolDef(unittest.TestCase): + """Tests for the MCPToolDef dataclass.""" + + def test_frozen(self): + tool = MCPToolDef( + name="test", + description="desc", + input_schema={"type": "object"}, + server_name="srv", + call_tool_workflow="dapr.internal.mcp.srv.CallTool", + ) + with self.assertRaises(AttributeError): + tool.name = "changed" + + def test_defaults(self): + tool = MCPToolDef(name="test", description="desc") + self.assertEqual(tool.input_schema, {}) + self.assertEqual(tool.server_name, "") + self.assertEqual(tool.call_tool_workflow, "") + + +class TestDaprMCPClientConnect(unittest.TestCase): + """Tests for DaprMCPClient.connect().""" + + def _make_client(self, wf_client: MagicMock) -> DaprMCPClient: + return DaprMCPClient(timeout_in_seconds=30, wf_client=wf_client) + + def test_connect_schedules_correct_workflow(self): + """connect() should schedule dapr.internal.mcp..ListTools.""" + mock_wf = MagicMock() + mock_wf.schedule_new_workflow.return_value = "inst-1" + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + mock_wf.schedule_new_workflow.assert_called_once() + call_kwargs = mock_wf.schedule_new_workflow.call_args + self.assertEqual( + call_kwargs.kwargs["workflow"], + "dapr.internal.mcp.weather.ListTools", + ) + self.assertEqual( + call_kwargs.kwargs["input"], + {"mcpServerName": "weather"}, + ) + + def test_connect_caches_tools(self): + """connect() should cache MCPToolDef objects.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + tools = mcp_client.get_all_tools() + self.assertEqual(len(tools), 2) + self.assertIsInstance(tools[0], MCPToolDef) + self.assertEqual(tools[0].name, "get_weather") + self.assertEqual(tools[1].name, "get_forecast") + + def test_connect_sets_server_name_and_workflow(self): + """Each MCPToolDef should have server_name and call_tool_workflow set.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + tool = mcp_client.get_all_tools()[0] + self.assertEqual(tool.server_name, "weather") + self.assertEqual( + tool.call_tool_workflow, + "dapr.internal.mcp.weather.CallTool", + ) + + def test_connect_preserves_description_and_schema(self): + """MCPToolDef should carry the original description and inputSchema.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + tool = mcp_client.get_all_tools()[0] + self.assertEqual(tool.description, "Get current weather for a location.") + self.assertIn("properties", tool.input_schema) + + def test_connect_timeout_raises(self): + """connect() should raise RuntimeError on timeout (None state).""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = None + + mcp_client = self._make_client(mock_wf) + with self.assertRaises(RuntimeError) as ctx: + mcp_client.connect("weather") + self.assertIn("timed out", str(ctx.exception)) + + def test_connect_failed_status_raises(self): + """connect() should raise RuntimeError on FAILED workflow status.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_failed_state() + + mcp_client = self._make_client(mock_wf) + with self.assertRaises(RuntimeError) as ctx: + mcp_client.connect("weather") + self.assertIn("FAILED", str(ctx.exception)) + + def test_connect_empty_tools(self): + """connect() should handle empty tools list gracefully.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + {"tools": []} + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("empty-server") + + self.assertEqual(len(mcp_client.get_all_tools()), 0) + self.assertIn("empty-server", mcp_client.get_connected_servers()) + + +class TestDaprMCPClientFiltering(unittest.TestCase): + """Tests for allowed_tools filtering.""" + + def test_allowed_tools_filters(self): + """Only tools in allowed_tools should be kept.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = DaprMCPClient( + allowed_tools={"get_weather"}, + wf_client=mock_wf, + ) + mcp_client.connect("weather") + + tools = mcp_client.get_all_tools() + self.assertEqual(len(tools), 1) + self.assertEqual(tools[0].name, "get_weather") + + def test_allowed_tools_none_keeps_all(self): + """allowed_tools=None should keep all tools.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = DaprMCPClient(allowed_tools=None, wf_client=mock_wf) + mcp_client.connect("weather") + + self.assertEqual(len(mcp_client.get_all_tools()), 2) + + +class TestDaprMCPClientMultiServer(unittest.TestCase): + """Tests for connecting to multiple MCPServer resources.""" + + def test_multiple_servers_accumulate(self): + """Tools from multiple connect() calls should accumulate.""" + mock_wf = MagicMock() + + weather_response = _make_completed_state(SAMPLE_LIST_TOOLS_RESPONSE) + local_response = _make_completed_state({ + "tools": [ + {"name": "search_files", "description": "Search files."}, + ] + }) + mock_wf.wait_for_workflow_completion.side_effect = [ + weather_response, + local_response, + ] + + mcp_client = DaprMCPClient(wf_client=mock_wf) + mcp_client.connect("weather") + mcp_client.connect("local-tools") + + self.assertEqual(len(mcp_client.get_all_tools()), 3) + self.assertEqual(len(mcp_client.get_server_tools("weather")), 2) + self.assertEqual(len(mcp_client.get_server_tools("local-tools")), 1) + self.assertEqual( + mcp_client.get_connected_servers(), + ["weather", "local-tools"], + ) + + def test_get_server_tools_unknown_returns_empty(self): + """get_server_tools() for unknown server returns empty list.""" + mock_wf = MagicMock() + mcp_client = DaprMCPClient(wf_client=mock_wf) + self.assertEqual(mcp_client.get_server_tools("nonexistent"), []) + + +class TestDaprMCPClientValidation(unittest.TestCase): + """Tests for input validation.""" + + def test_init_zero_timeout_raises(self): + with self.assertRaises(ValueError): + DaprMCPClient(timeout_in_seconds=0, wf_client=MagicMock()) + + def test_init_negative_timeout_raises(self): + with self.assertRaises(ValueError): + DaprMCPClient(timeout_in_seconds=-1, wf_client=MagicMock()) + + def test_connect_empty_server_name_raises(self): + mcp_client = DaprMCPClient(wf_client=MagicMock()) + with self.assertRaises(ValueError): + mcp_client.connect("") + + def test_connect_whitespace_server_name_raises(self): + mcp_client = DaprMCPClient(wf_client=MagicMock()) + with self.assertRaises(ValueError): + mcp_client.connect(" ") + + def test_connect_malformed_json_raises(self): + """connect() should raise RuntimeError on malformed JSON output.""" + mock_wf = MagicMock() + inner = client.WorkflowState( + instance_id="test", + name="test", + runtime_status=client.OrchestrationStatus.COMPLETED, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output="not valid json{{{", + serialized_custom_status=None, + failure_details=None, + ) + mock_wf.wait_for_workflow_completion.return_value = WorkflowState(inner) + + mcp_client = DaprMCPClient(wf_client=mock_wf) + with self.assertRaises(RuntimeError) as ctx: + mcp_client.connect("weather") + self.assertIn("malformed JSON", str(ctx.exception)) + + def test_connect_missing_tool_name_uses_empty_string(self): + """Tools without a 'name' field should use empty string.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state({ + "tools": [{"description": "No name tool"}] + }) + + mcp_client = DaprMCPClient(wf_client=mock_wf) + mcp_client.connect("server") + + tools = mcp_client.get_all_tools() + self.assertEqual(len(tools), 1) + self.assertEqual(tools[0].name, "") + + +class TestMCPWorkflowPrefix(unittest.TestCase): + """Tests for the workflow naming constant.""" + + def test_prefix_value(self): + self.assertEqual(MCP_WORKFLOW_PREFIX, "dapr.internal.mcp.") + + def test_list_tools_name(self): + name = f"{MCP_WORKFLOW_PREFIX}weather.ListTools" + self.assertEqual(name, "dapr.internal.mcp.weather.ListTools") + + def test_call_tool_name(self): + name = f"{MCP_WORKFLOW_PREFIX}weather.CallTool" + self.assertEqual(name, "dapr.internal.mcp.weather.CallTool") + + +if __name__ == "__main__": + unittest.main() diff --git a/ext/dapr-ext-workflow/tests/test_mcp_schema.py b/ext/dapr-ext-workflow/tests/test_mcp_schema.py new file mode 100644 index 000000000..328f9b104 --- /dev/null +++ b/ext/dapr-ext-workflow/tests/test_mcp_schema.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import unittest +from typing import Optional + +from pydantic import BaseModel + +from dapr.ext.workflow.mcp_schema import create_pydantic_model_from_schema + + +class TestBasicTypes(unittest.TestCase): + """Tests for basic JSON Schema type conversions.""" + + def test_string_field(self): + schema = { + "type": "object", + "properties": {"name": {"type": "string", "description": "A name"}}, + "required": ["name"], + } + Model = create_pydantic_model_from_schema(schema, "TestModel") + instance = Model(name="Alice") + self.assertEqual(instance.name, "Alice") + + def test_integer_field(self): + schema = { + "type": "object", + "properties": {"count": {"type": "integer"}}, + "required": ["count"], + } + Model = create_pydantic_model_from_schema(schema, "IntModel") + instance = Model(count=42) + self.assertEqual(instance.count, 42) + + def test_number_field(self): + schema = { + "type": "object", + "properties": {"price": {"type": "number"}}, + "required": ["price"], + } + Model = create_pydantic_model_from_schema(schema, "NumModel") + instance = Model(price=9.99) + self.assertAlmostEqual(instance.price, 9.99) + + def test_boolean_field(self): + schema = { + "type": "object", + "properties": {"active": {"type": "boolean"}}, + "required": ["active"], + } + Model = create_pydantic_model_from_schema(schema, "BoolModel") + instance = Model(active=True) + self.assertTrue(instance.active) + + def test_array_field(self): + schema = { + "type": "object", + "properties": { + "tags": {"type": "array", "items": {"type": "string"}} + }, + "required": ["tags"], + } + Model = create_pydantic_model_from_schema(schema, "ArrayModel") + instance = Model(tags=["a", "b"]) + self.assertEqual(instance.tags, ["a", "b"]) + + +class TestRequiredOptional(unittest.TestCase): + """Tests for required vs optional field handling.""" + + def test_required_field_has_no_default(self): + schema = { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": ["location"], + } + Model = create_pydantic_model_from_schema(schema, "ReqModel") + with self.assertRaises(Exception): + Model() # Missing required field + + def test_optional_field_defaults_to_none(self): + schema = { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": [], + } + Model = create_pydantic_model_from_schema(schema, "OptModel") + instance = Model() + self.assertIsNone(instance.location) + + def test_mixed_required_optional(self): + schema = { + "type": "object", + "properties": { + "location": {"type": "string"}, + "days": {"type": "integer"}, + }, + "required": ["location"], + } + Model = create_pydantic_model_from_schema(schema, "MixedModel") + instance = Model(location="Tokyo") + self.assertEqual(instance.location, "Tokyo") + self.assertIsNone(instance.days) + + +class TestAnyOfOneOf(unittest.TestCase): + """Tests for anyOf/oneOf nullable/union field handling.""" + + def test_anyof_nullable_string(self): + schema = { + "type": "object", + "properties": { + "label": { + "anyOf": [ + {"type": "string"}, + {"type": "null"}, + ] + } + }, + "required": ["label"], + } + Model = create_pydantic_model_from_schema(schema, "NullableModel") + instance = Model(label=None) + self.assertIsNone(instance.label) + instance2 = Model(label="hello") + self.assertEqual(instance2.label, "hello") + + def test_oneof_nullable_integer(self): + schema = { + "type": "object", + "properties": { + "count": { + "oneOf": [ + {"type": "integer"}, + {"type": "null"}, + ] + } + }, + "required": ["count"], + } + Model = create_pydantic_model_from_schema(schema, "OneOfModel") + instance = Model(count=5) + self.assertEqual(instance.count, 5) + + +class TestKwargsUnwrapping(unittest.TestCase): + """Tests for the kwargs wrapper unwrapping pattern.""" + + def test_kwargs_wrapper_is_unwrapped(self): + """Schemas wrapping args in a 'kwargs' field should be unwrapped.""" + schema = { + "type": "object", + "properties": { + "kwargs": { + "type": "object", + "properties": { + "city": {"type": "string"}, + "units": {"type": "string"}, + }, + "required": ["city"], + } + }, + } + Model = create_pydantic_model_from_schema(schema, "KwargsModel") + instance = Model(city="Seattle") + self.assertEqual(instance.city, "Seattle") + self.assertIsNone(instance.units) + + def test_non_kwargs_not_unwrapped(self): + """Schemas without the kwargs wrapper should not be affected.""" + schema = { + "type": "object", + "properties": { + "city": {"type": "string"}, + }, + "required": ["city"], + } + Model = create_pydantic_model_from_schema(schema, "FlatModel") + instance = Model(city="Tokyo") + self.assertEqual(instance.city, "Tokyo") + + +class TestEmptyAndEdgeCases(unittest.TestCase): + """Tests for edge cases.""" + + def test_empty_properties(self): + schema = {"type": "object", "properties": {}} + Model = create_pydantic_model_from_schema(schema, "EmptyModel") + instance = Model() + self.assertIsInstance(instance, BaseModel) + + def test_no_properties_key(self): + schema = {"type": "object"} + Model = create_pydantic_model_from_schema(schema, "NoPropsModel") + instance = Model() + self.assertIsInstance(instance, BaseModel) + + def test_description_preserved(self): + schema = { + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "The city to query", + } + }, + "required": ["city"], + } + Model = create_pydantic_model_from_schema(schema, "DescModel") + field_info = Model.model_fields["city"] + self.assertEqual(field_info.description, "The city to query") + + def test_returns_pydantic_model_subclass(self): + schema = { + "type": "object", + "properties": {"x": {"type": "integer"}}, + "required": ["x"], + } + Model = create_pydantic_model_from_schema(schema, "SubclassCheck") + self.assertTrue(issubclass(Model, BaseModel)) + + def test_model_name_set(self): + schema = { + "type": "object", + "properties": {"x": {"type": "integer"}}, + "required": ["x"], + } + Model = create_pydantic_model_from_schema(schema, "MyToolArgs") + self.assertEqual(Model.__name__, "MyToolArgs") + + +if __name__ == "__main__": + unittest.main() diff --git a/tools/requirements.txt b/tools/requirements.txt index 28b129e1c..ce49911fc 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1,2 +1,2 @@ -grpcio-tools==1.76.0 +grpcio-tools==1.80.0 mypy-protobuf==5.0.0