diff --git a/awsiot/greengrasscoreipc/client.py b/awsiot/greengrasscoreipc/client.py index ea80f4b9..7367b6eb 100644 --- a/awsiot/greengrasscoreipc/client.py +++ b/awsiot/greengrasscoreipc/client.py @@ -42,6 +42,40 @@ def close(self): # type: (...) -> concurrent.futures.Future[None] return super().close() +class CancelLocalDeploymentOperation(model._CancelLocalDeploymentOperation): + """ + CancelLocalDeploymentOperation + + Create with GreengrassCoreIPCClient.new_cancel_local_deployment() + """ + + def activate(self, request: model.CancelLocalDeploymentRequest): # type: (...) -> concurrent.futures.Future[None] + """ + Activate this operation by sending the initial CancelLocalDeploymentRequest message. + + Returns a Future which completes with a result of None if the + request is successfully written to the wire, or an exception if + the request fails to send. + """ + return self._activate(request) + + def get_response(self): # type: (...) -> concurrent.futures.Future[model.CancelLocalDeploymentResponse] + """ + Returns a Future which completes with a result of CancelLocalDeploymentResponse, + when the initial response is received, or an exception. + """ + return self._get_response() + + def close(self): # type: (...) -> concurrent.futures.Future[None] + """ + Close the operation, whether or not it has completed. + + Returns a Future which completes with a result of None + when the operation has closed. + """ + return super().close() + + class CreateDebugPasswordOperation(model._CreateDebugPasswordOperation): """ CreateDebugPasswordOperation @@ -1327,6 +1361,16 @@ def new_authorize_client_device_action(self) -> AuthorizeClientDeviceActionOpera """ return self._new_operation(AuthorizeClientDeviceActionOperation) + def new_cancel_local_deployment(self) -> CancelLocalDeploymentOperation: + """ + Create a new CancelLocalDeploymentOperation. + + This operation will not send or receive any data until activate() + is called. Call activate() when you're ready for callbacks and + events to fire. + """ + return self._new_operation(CancelLocalDeploymentOperation) + def new_create_debug_password(self) -> CreateDebugPasswordOperation: """ Create a new CreateDebugPasswordOperation. diff --git a/awsiot/greengrasscoreipc/clientv2.py b/awsiot/greengrasscoreipc/clientv2.py index 2ff4bfbc..a5fb7c23 100644 --- a/awsiot/greengrasscoreipc/clientv2.py +++ b/awsiot/greengrasscoreipc/clientv2.py @@ -144,6 +144,29 @@ def authorize_client_device_action_async(self, *, write_future = operation.activate(request) return self.__combine_futures(write_future, operation.get_response()) + def cancel_local_deployment(self, *, + deployment_id: typing.Optional[str] = None) -> model.CancelLocalDeploymentResponse: + """ + Perform the CancelLocalDeployment operation synchronously. + + Args: + deployment_id: + """ + return self.cancel_local_deployment_async(deployment_id=deployment_id).result() + + def cancel_local_deployment_async(self, *, + deployment_id: typing.Optional[str] = None): # type: (...) -> concurrent.futures.Future[model.CancelLocalDeploymentResponse] + """ + Perform the CancelLocalDeployment operation asynchronously. + + Args: + deployment_id: + """ + request = model.CancelLocalDeploymentRequest(deployment_id=deployment_id) + operation = self.client.new_cancel_local_deployment() + write_future = operation.activate(request) + return self.__combine_futures(write_future, operation.get_response()) + def create_debug_password(self) -> model.CreateDebugPasswordResponse: """ Perform the CreateDebugPassword operation synchronously. @@ -168,7 +191,8 @@ def create_local_deployment(self, *, component_to_configuration: typing.Optional[typing.Dict[str, typing.Dict[str, typing.Any]]] = None, component_to_run_with_info: typing.Optional[typing.Dict[str, model.RunWithInfo]] = None, recipe_directory_path: typing.Optional[str] = None, - artifacts_directory_path: typing.Optional[str] = None) -> model.CreateLocalDeploymentResponse: + artifacts_directory_path: typing.Optional[str] = None, + failure_handling_policy: typing.Optional[str] = None) -> model.CreateLocalDeploymentResponse: """ Perform the CreateLocalDeployment operation synchronously. @@ -180,8 +204,9 @@ def create_local_deployment(self, *, component_to_run_with_info: recipe_directory_path: artifacts_directory_path: + failure_handling_policy: FailureHandlingPolicy enum value """ - return self.create_local_deployment_async(group_name=group_name, root_component_versions_to_add=root_component_versions_to_add, root_components_to_remove=root_components_to_remove, component_to_configuration=component_to_configuration, component_to_run_with_info=component_to_run_with_info, recipe_directory_path=recipe_directory_path, artifacts_directory_path=artifacts_directory_path).result() + return self.create_local_deployment_async(group_name=group_name, root_component_versions_to_add=root_component_versions_to_add, root_components_to_remove=root_components_to_remove, component_to_configuration=component_to_configuration, component_to_run_with_info=component_to_run_with_info, recipe_directory_path=recipe_directory_path, artifacts_directory_path=artifacts_directory_path, failure_handling_policy=failure_handling_policy).result() def create_local_deployment_async(self, *, group_name: typing.Optional[str] = None, @@ -190,7 +215,8 @@ def create_local_deployment_async(self, *, component_to_configuration: typing.Optional[typing.Dict[str, typing.Dict[str, typing.Any]]] = None, component_to_run_with_info: typing.Optional[typing.Dict[str, model.RunWithInfo]] = None, recipe_directory_path: typing.Optional[str] = None, - artifacts_directory_path: typing.Optional[str] = None): # type: (...) -> concurrent.futures.Future[model.CreateLocalDeploymentResponse] + artifacts_directory_path: typing.Optional[str] = None, + failure_handling_policy: typing.Optional[str] = None): # type: (...) -> concurrent.futures.Future[model.CreateLocalDeploymentResponse] """ Perform the CreateLocalDeployment operation asynchronously. @@ -202,8 +228,9 @@ def create_local_deployment_async(self, *, component_to_run_with_info: recipe_directory_path: artifacts_directory_path: + failure_handling_policy: FailureHandlingPolicy enum value """ - request = model.CreateLocalDeploymentRequest(group_name=group_name, root_component_versions_to_add=root_component_versions_to_add, root_components_to_remove=root_components_to_remove, component_to_configuration=component_to_configuration, component_to_run_with_info=component_to_run_with_info, recipe_directory_path=recipe_directory_path, artifacts_directory_path=artifacts_directory_path) + request = model.CreateLocalDeploymentRequest(group_name=group_name, root_component_versions_to_add=root_component_versions_to_add, root_components_to_remove=root_components_to_remove, component_to_configuration=component_to_configuration, component_to_run_with_info=component_to_run_with_info, recipe_directory_path=recipe_directory_path, artifacts_directory_path=artifacts_directory_path, failure_handling_policy=failure_handling_policy) operation = self.client.new_create_local_deployment() write_future = operation.activate(request) return self.__combine_futures(write_future, operation.get_response()) diff --git a/awsiot/greengrasscoreipc/model.py b/awsiot/greengrasscoreipc/model.py index d0ff53cb..28e3d0ba 100644 --- a/awsiot/greengrasscoreipc/model.py +++ b/awsiot/greengrasscoreipc/model.py @@ -167,6 +167,19 @@ class DeploymentStatus: IN_PROGRESS = 'IN_PROGRESS' SUCCEEDED = 'SUCCEEDED' FAILED = 'FAILED' + CANCELED = 'CANCELED' + + +class DetailedDeploymentStatus: + """ + DetailedDeploymentStatus enum + """ + + SUCCESSFUL = 'SUCCESSFUL' + FAILED_NO_STATE_CHANGE = 'FAILED_NO_STATE_CHANGE' + FAILED_ROLLBACK_NOT_REQUESTED = 'FAILED_ROLLBACK_NOT_REQUESTED' + FAILED_ROLLBACK_COMPLETE = 'FAILED_ROLLBACK_COMPLETE' + REJECTED = 'REJECTED' class LifecycleState: @@ -345,18 +358,22 @@ class LocalDeployment(rpc.Shape): Keyword Args: deployment_id: The ID of the local deployment. status: DeploymentStatus enum value. The status of the local deployment. + created_on: (Optional) The timestamp at which the local deployment was created in MM/dd/yyyy hh:mm:ss format Attributes: deployment_id: The ID of the local deployment. status: DeploymentStatus enum value. The status of the local deployment. + created_on: (Optional) The timestamp at which the local deployment was created in MM/dd/yyyy hh:mm:ss format """ def __init__(self, *, deployment_id: typing.Optional[str] = None, - status: typing.Optional[str] = None): + status: typing.Optional[str] = None, + created_on: typing.Optional[str] = None): super().__init__() self.deployment_id = deployment_id # type: typing.Optional[str] self.status = status # type: typing.Optional[str] + self.created_on = created_on # type: typing.Optional[str] def set_deployment_id(self, deployment_id: str): self.deployment_id = deployment_id @@ -366,6 +383,10 @@ def set_status(self, status: str): self.status = status return self + def set_created_on(self, created_on: str): + self.created_on = created_on + return self + def _to_payload(self): payload = {} @@ -373,6 +394,8 @@ def _to_payload(self): payload['deploymentId'] = self.deployment_id if self.status is not None: payload['status'] = self.status + if self.created_on is not None: + payload['createdOn'] = self.created_on return payload @classmethod @@ -382,6 +405,8 @@ def _from_payload(cls, payload): new.deployment_id = payload['deploymentId'] if 'status' in payload: new.status = payload['status'] + if 'createdOn' in payload: + new.created_on = payload['createdOn'] return new @classmethod @@ -519,6 +544,95 @@ def __eq__(self, other): return False +class DeploymentStatusDetails(rpc.Shape): + """ + DeploymentStatusDetails + + All attributes are None by default, and may be set by keyword in the constructor. + + Keyword Args: + detailed_deployment_status: DetailedDeploymentStatus enum value. The detailed deployment status of the local deployment. + deployment_error_stack: (Optional) The list of local deployment errors + deployment_error_types: (Optional) The list of local deployment error types + deployment_failure_cause: (Optional) The cause of local deployment failure + + Attributes: + detailed_deployment_status: DetailedDeploymentStatus enum value. The detailed deployment status of the local deployment. + deployment_error_stack: (Optional) The list of local deployment errors + deployment_error_types: (Optional) The list of local deployment error types + deployment_failure_cause: (Optional) The cause of local deployment failure + """ + + def __init__(self, *, + detailed_deployment_status: typing.Optional[str] = None, + deployment_error_stack: typing.Optional[typing.List[str]] = None, + deployment_error_types: typing.Optional[typing.List[str]] = None, + deployment_failure_cause: typing.Optional[str] = None): + super().__init__() + self.detailed_deployment_status = detailed_deployment_status # type: typing.Optional[str] + self.deployment_error_stack = deployment_error_stack # type: typing.Optional[typing.List[str]] + self.deployment_error_types = deployment_error_types # type: typing.Optional[typing.List[str]] + self.deployment_failure_cause = deployment_failure_cause # type: typing.Optional[str] + + def set_detailed_deployment_status(self, detailed_deployment_status: str): + self.detailed_deployment_status = detailed_deployment_status + return self + + def set_deployment_error_stack(self, deployment_error_stack: typing.List[str]): + self.deployment_error_stack = deployment_error_stack + return self + + def set_deployment_error_types(self, deployment_error_types: typing.List[str]): + self.deployment_error_types = deployment_error_types + return self + + def set_deployment_failure_cause(self, deployment_failure_cause: str): + self.deployment_failure_cause = deployment_failure_cause + return self + + + def _to_payload(self): + payload = {} + if self.detailed_deployment_status is not None: + payload['detailedDeploymentStatus'] = self.detailed_deployment_status + if self.deployment_error_stack is not None: + payload['deploymentErrorStack'] = self.deployment_error_stack + if self.deployment_error_types is not None: + payload['deploymentErrorTypes'] = self.deployment_error_types + if self.deployment_failure_cause is not None: + payload['deploymentFailureCause'] = self.deployment_failure_cause + return payload + + @classmethod + def _from_payload(cls, payload): + new = cls() + if 'detailedDeploymentStatus' in payload: + new.detailed_deployment_status = payload['detailedDeploymentStatus'] + if 'deploymentErrorStack' in payload: + new.deployment_error_stack = payload['deploymentErrorStack'] + if 'deploymentErrorTypes' in payload: + new.deployment_error_types = payload['deploymentErrorTypes'] + if 'deploymentFailureCause' in payload: + new.deployment_failure_cause = payload['deploymentFailureCause'] + return new + + @classmethod + def _model_name(cls): + return 'aws.greengrass#DeploymentStatusDetails' + + def __repr__(self): + attrs = [] + for attr, val in self.__dict__.items(): + if val is not None: + attrs.append('%s=%r' % (attr, val)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(attrs)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + class ConfigurationValidityStatus: """ ConfigurationValidityStatus enum @@ -1301,6 +1415,15 @@ def __eq__(self, other): return False +class FailureHandlingPolicy: + """ + FailureHandlingPolicy enum + """ + + ROLLBACK = 'ROLLBACK' + DO_NOTHING = 'DO_NOTHING' + + class RequestStatus: """ RequestStatus enum @@ -1457,6 +1580,95 @@ def __eq__(self, other): return False +class LocalDeploymentStatus(rpc.Shape): + """ + LocalDeploymentStatus + + All attributes are None by default, and may be set by keyword in the constructor. + + Keyword Args: + deployment_id: THe ID of the local deployment. + status: DeploymentStatus enum value. The status of the local deployment. + created_on: (Optional) The timestamp at which the local deployment was created in MM/dd/yyyy hh:mm:ss format + deployment_status_details: (Optional) The status details of the local deployment. + + Attributes: + deployment_id: THe ID of the local deployment. + status: DeploymentStatus enum value. The status of the local deployment. + created_on: (Optional) The timestamp at which the local deployment was created in MM/dd/yyyy hh:mm:ss format + deployment_status_details: (Optional) The status details of the local deployment. + """ + + def __init__(self, *, + deployment_id: typing.Optional[str] = None, + status: typing.Optional[str] = None, + created_on: typing.Optional[str] = None, + deployment_status_details: typing.Optional[DeploymentStatusDetails] = None): + super().__init__() + self.deployment_id = deployment_id # type: typing.Optional[str] + self.status = status # type: typing.Optional[str] + self.created_on = created_on # type: typing.Optional[str] + self.deployment_status_details = deployment_status_details # type: typing.Optional[DeploymentStatusDetails] + + def set_deployment_id(self, deployment_id: str): + self.deployment_id = deployment_id + return self + + def set_status(self, status: str): + self.status = status + return self + + def set_created_on(self, created_on: str): + self.created_on = created_on + return self + + def set_deployment_status_details(self, deployment_status_details: DeploymentStatusDetails): + self.deployment_status_details = deployment_status_details + return self + + + def _to_payload(self): + payload = {} + if self.deployment_id is not None: + payload['deploymentId'] = self.deployment_id + if self.status is not None: + payload['status'] = self.status + if self.created_on is not None: + payload['createdOn'] = self.created_on + if self.deployment_status_details is not None: + payload['deploymentStatusDetails'] = self.deployment_status_details._to_payload() + return payload + + @classmethod + def _from_payload(cls, payload): + new = cls() + if 'deploymentId' in payload: + new.deployment_id = payload['deploymentId'] + if 'status' in payload: + new.status = payload['status'] + if 'createdOn' in payload: + new.created_on = payload['createdOn'] + if 'deploymentStatusDetails' in payload: + new.deployment_status_details = DeploymentStatusDetails._from_payload(payload['deploymentStatusDetails']) + return new + + @classmethod + def _model_name(cls): + return 'aws.greengrass#LocalDeploymentStatus' + + def __repr__(self): + attrs = [] + for attr, val in self.__dict__.items(): + if val is not None: + attrs.append('%s=%r' % (attr, val)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(attrs)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + class ConfigurationValidityReport(rpc.Shape): """ ConfigurationValidityReport @@ -2372,6 +2584,7 @@ class CreateLocalDeploymentRequest(rpc.Shape): component_to_run_with_info: Map of component names to component run as info. recipe_directory_path: All recipes files in this directory will be copied over to the Greengrass package store. artifacts_directory_path: All artifact files in this directory will be copied over to the Greengrass package store. + failure_handling_policy: FailureHandlingPolicy enum value. Deployment failure handling policy. Attributes: group_name: The thing group name the deployment is targeting. If the group name is not specified, "LOCAL_DEPLOYMENT" will be used. @@ -2381,6 +2594,7 @@ class CreateLocalDeploymentRequest(rpc.Shape): component_to_run_with_info: Map of component names to component run as info. recipe_directory_path: All recipes files in this directory will be copied over to the Greengrass package store. artifacts_directory_path: All artifact files in this directory will be copied over to the Greengrass package store. + failure_handling_policy: FailureHandlingPolicy enum value. Deployment failure handling policy. """ def __init__(self, *, @@ -2390,7 +2604,8 @@ def __init__(self, *, component_to_configuration: typing.Optional[typing.Dict[str, typing.Dict[str, typing.Any]]] = None, component_to_run_with_info: typing.Optional[typing.Dict[str, RunWithInfo]] = None, recipe_directory_path: typing.Optional[str] = None, - artifacts_directory_path: typing.Optional[str] = None): + artifacts_directory_path: typing.Optional[str] = None, + failure_handling_policy: typing.Optional[str] = None): super().__init__() self.group_name = group_name # type: typing.Optional[str] self.root_component_versions_to_add = root_component_versions_to_add # type: typing.Optional[typing.Dict[str, str]] @@ -2399,6 +2614,7 @@ def __init__(self, *, self.component_to_run_with_info = component_to_run_with_info # type: typing.Optional[typing.Dict[str, RunWithInfo]] self.recipe_directory_path = recipe_directory_path # type: typing.Optional[str] self.artifacts_directory_path = artifacts_directory_path # type: typing.Optional[str] + self.failure_handling_policy = failure_handling_policy # type: typing.Optional[str] def set_group_name(self, group_name: str): self.group_name = group_name @@ -2428,6 +2644,10 @@ def set_artifacts_directory_path(self, artifacts_directory_path: str): self.artifacts_directory_path = artifacts_directory_path return self + def set_failure_handling_policy(self, failure_handling_policy: str): + self.failure_handling_policy = failure_handling_policy + return self + def _to_payload(self): payload = {} @@ -2445,6 +2665,8 @@ def _to_payload(self): payload['recipeDirectoryPath'] = self.recipe_directory_path if self.artifacts_directory_path is not None: payload['artifactsDirectoryPath'] = self.artifacts_directory_path + if self.failure_handling_policy is not None: + payload['failureHandlingPolicy'] = self.failure_handling_policy return payload @classmethod @@ -2464,6 +2686,8 @@ def _from_payload(cls, payload): new.recipe_directory_path = payload['recipeDirectoryPath'] if 'artifactsDirectoryPath' in payload: new.artifacts_directory_path = payload['artifactsDirectoryPath'] + if 'failureHandlingPolicy' in payload: + new.failure_handling_policy = payload['failureHandlingPolicy'] return new @classmethod @@ -3193,6 +3417,112 @@ def __eq__(self, other): return False +class CancelLocalDeploymentResponse(rpc.Shape): + """ + CancelLocalDeploymentResponse + + All attributes are None by default, and may be set by keyword in the constructor. + + Keyword Args: + message: + + Attributes: + message: + """ + + def __init__(self, *, + message: typing.Optional[str] = None): + super().__init__() + self.message = message # type: typing.Optional[str] + + def set_message(self, message: str): + self.message = message + return self + + + def _to_payload(self): + payload = {} + if self.message is not None: + payload['message'] = self.message + return payload + + @classmethod + def _from_payload(cls, payload): + new = cls() + if 'message' in payload: + new.message = payload['message'] + return new + + @classmethod + def _model_name(cls): + return 'aws.greengrass#CancelLocalDeploymentResponse' + + def __repr__(self): + attrs = [] + for attr, val in self.__dict__.items(): + if val is not None: + attrs.append('%s=%r' % (attr, val)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(attrs)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + +class CancelLocalDeploymentRequest(rpc.Shape): + """ + CancelLocalDeploymentRequest + + All attributes are None by default, and may be set by keyword in the constructor. + + Keyword Args: + deployment_id: (Optional) The ID of the local deployment to cancel. + + Attributes: + deployment_id: (Optional) The ID of the local deployment to cancel. + """ + + def __init__(self, *, + deployment_id: typing.Optional[str] = None): + super().__init__() + self.deployment_id = deployment_id # type: typing.Optional[str] + + def set_deployment_id(self, deployment_id: str): + self.deployment_id = deployment_id + return self + + + def _to_payload(self): + payload = {} + if self.deployment_id is not None: + payload['deploymentId'] = self.deployment_id + return payload + + @classmethod + def _from_payload(cls, payload): + new = cls() + if 'deploymentId' in payload: + new.deployment_id = payload['deploymentId'] + return new + + @classmethod + def _model_name(cls): + return 'aws.greengrass#CancelLocalDeploymentRequest' + + def __repr__(self): + attrs = [] + for attr, val in self.__dict__.items(): + if val is not None: + attrs.append('%s=%r' % (attr, val)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(attrs)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + class UpdateStateResponse(rpc.Shape): """ UpdateStateResponse @@ -3461,11 +3791,11 @@ class GetLocalDeploymentStatusResponse(rpc.Shape): """ def __init__(self, *, - deployment: typing.Optional[LocalDeployment] = None): + deployment: typing.Optional[LocalDeploymentStatus] = None): super().__init__() - self.deployment = deployment # type: typing.Optional[LocalDeployment] + self.deployment = deployment # type: typing.Optional[LocalDeploymentStatus] - def set_deployment(self, deployment: LocalDeployment): + def set_deployment(self, deployment: LocalDeploymentStatus): self.deployment = deployment return self @@ -3480,7 +3810,7 @@ def _to_payload(self): def _from_payload(cls, payload): new = cls() if 'deployment' in payload: - new.deployment = LocalDeployment._from_payload(payload['deployment']) + new.deployment = LocalDeploymentStatus._from_payload(payload['deployment']) return new @classmethod @@ -6517,6 +6847,7 @@ def __eq__(self, other): LocalDeployment, PostComponentUpdateEvent, PreComponentUpdateEvent, + DeploymentStatusDetails, ComponentDetails, CertificateUpdate, BinaryMessage, @@ -6526,6 +6857,7 @@ def __eq__(self, other): Metric, ConfigurationUpdateEvent, MQTTMessage, + LocalDeploymentStatus, ConfigurationValidityReport, CertificateOptions, InvalidArgumentsError, @@ -6547,6 +6879,8 @@ def __eq__(self, other): SubscribeToComponentUpdatesRequest, ListNamedShadowsForThingResponse, ListNamedShadowsForThingRequest, + CancelLocalDeploymentResponse, + CancelLocalDeploymentRequest, UpdateStateResponse, UpdateStateRequest, GetSecretValueResponse, @@ -6631,6 +6965,28 @@ def _response_stream_type(cls): return None +class _CancelLocalDeploymentOperation(rpc.ClientOperation): + @classmethod + def _model_name(cls): + return 'aws.greengrass#CancelLocalDeployment' + + @classmethod + def _request_type(cls): + return CancelLocalDeploymentRequest + + @classmethod + def _request_stream_type(cls): + return None + + @classmethod + def _response_type(cls): + return CancelLocalDeploymentResponse + + @classmethod + def _response_stream_type(cls): + return None + + class _CreateDebugPasswordOperation(rpc.ClientOperation): @classmethod def _model_name(cls): diff --git a/test/echotestrpc/clientv2.py b/test/echotestrpc/clientv2.py new file mode 100644 index 00000000..0a5d4a92 --- /dev/null +++ b/test/echotestrpc/clientv2.py @@ -0,0 +1,300 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0. + +# This file is generated + +from . import model +from .client import EchoTestRPCClient +from . import client + +import concurrent.futures +import datetime +import typing + + +class EchoTestRPCClientV2: + """ + V2 client for the EchoTestRPC service. + + Args: + client: Connection that this client will use. If you do not provide one, it will be made automatically. + executor: Executor used to run on_stream_event and on_stream_closed callbacks to avoid blocking the networking + thread. By default, a ThreadPoolExecutor will be created and used. Use None to run callbacks in the + networking thread, but understand that your code can deadlock the networking thread if it performs a + synchronous network call. + """ + + def __init__(self, client: typing.Optional[EchoTestRPCClient] = None, + executor: typing.Optional[concurrent.futures.Executor] = True): + if client is None: + import awsiot.greengrasscoreipc + client = awsiot.greengrasscoreipc.connect() + self.client = client + if executor is True: + executor = concurrent.futures.ThreadPoolExecutor() + self.executor = executor + + def close(self, *, executor_wait=True) -> concurrent.futures.Future: + """ + Close the underlying connection and shutdown the event executor (if any) + + Args: + executor_wait: If true (default), then this method will block until the executor finishes running + all tasks and shuts down. + + Returns: + The future which will complete + when the shutdown process is done. The future will have an + exception if shutdown was caused by an error, or a result + of None if the shutdown was clean and user-initiated. + """ + fut = self.client.close() + if self.executor is not None: + self.executor.shutdown(wait=executor_wait) + return fut + + def __combine_futures(self, future1: concurrent.futures.Future, + future2: concurrent.futures.Future) -> concurrent.futures.Future: + def callback(*args, **kwargs): + try: + future1.result() + except Exception as e: + future2.set_exception(e) + future1.add_done_callback(callback) + return future2 + + @staticmethod + def __handle_error(): + import sys + import traceback + traceback.print_exc(file=sys.stderr) + + def __wrap_error(self, func): + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + self.__handle_error() + raise e + return wrapper + + def __create_stream_handler(real_self, operation, on_stream_event, on_stream_error, on_stream_closed): + stream_handler_type = type(operation + 'Handler', (getattr(client, operation + "StreamHandler"),), {}) + if on_stream_event is not None: + on_stream_event = real_self.__wrap_error(on_stream_event) + def handler(self, event): + if real_self.executor is not None: + real_self.executor.submit(on_stream_event, event) + else: + on_stream_event(event) + setattr(stream_handler_type, "on_stream_event", handler) + if on_stream_error is not None: + on_stream_error = real_self.__wrap_error(on_stream_error) + def handler(self, error): + return on_stream_error(error) + setattr(stream_handler_type, "on_stream_error", handler) + if on_stream_closed is not None: + on_stream_closed = real_self.__wrap_error(on_stream_closed) + def handler(self): + if real_self.executor is not None: + real_self.executor.submit(on_stream_closed) + else: + on_stream_closed() + setattr(stream_handler_type, "on_stream_closed", handler) + return stream_handler_type() + + def __handle_stream_handler(real_self, operation, stream_handler, on_stream_event, on_stream_error, on_stream_closed): + if stream_handler is not None and (on_stream_event is not None or on_stream_error is not None or on_stream_closed is not None): + raise ValueError("Must choose either stream_handler or on_stream_event/on_stream_error/on_stream_closed") + if stream_handler is not None and real_self.executor is not None: + return real_self.__create_stream_handler(operation, stream_handler.on_stream_event, + stream_handler.on_stream_error, stream_handler.on_stream_closed) + if stream_handler is None: + return real_self.__create_stream_handler(operation, on_stream_event, on_stream_error, on_stream_closed) + return stream_handler + + def cause_service_error(self) -> model.CauseServiceErrorResponse: + """ + Perform the CauseServiceError operation synchronously. + + """ + return self.cause_service_error_async().result() + + def cause_service_error_async(self): # type: (...) -> concurrent.futures.Future[model.CauseServiceErrorResponse] + """ + Perform the CauseServiceError operation asynchronously. + + """ + request = model.CauseServiceErrorRequest() + operation = self.client.new_cause_service_error() + write_future = operation.activate(request) + return self.__combine_futures(write_future, operation.get_response()) + + def cause_stream_service_to_error(self, *, + stream_handler: typing.Optional[client.CauseStreamServiceToErrorStreamHandler] = None, + on_stream_event: typing.Optional[typing.Callable[[model.EchoStreamingMessage], None]] = None, + on_stream_error: typing.Optional[typing.Callable[[Exception], bool]] = None, + on_stream_closed: typing.Optional[typing.Callable[[], None]] = None +) -> typing.Tuple[model.EchoStreamingResponse, client.CauseStreamServiceToErrorOperation]: + """ + Perform the CauseStreamServiceToError operation synchronously. + The initial response or error will be returned synchronously, further events will arrive via the streaming + callbacks + + Args: + stream_handler: Methods on this object will be called as stream events happen on this operation. If an + executor is provided, the on_stream_event and on_stream_closed methods will run in the executor. + on_stream_event: Callback for stream events. Mutually exclusive with stream_handler. If an executor is + provided, this method will run in the executor. + on_stream_error: Callback for stream errors. Return true to close the stream, return false to keep the + stream open. Mutually exclusive with stream_handler. Even if an executor is provided, this method + will not run in the executor. + on_stream_closed: Callback for when the stream closes. Mutually exclusive with stream_handler. If an + executor is provided, this method will run in the executor. + """ + (fut, op) = self.cause_stream_service_to_error_async( + stream_handler=stream_handler, on_stream_event=on_stream_event, on_stream_error=on_stream_error, + on_stream_closed=on_stream_closed) + return fut.result(), op + + def cause_stream_service_to_error_async(self, *, + stream_handler: client.CauseStreamServiceToErrorStreamHandler = None, + on_stream_event: typing.Optional[typing.Callable[[model.EchoStreamingMessage], None]] = None, + on_stream_error: typing.Optional[typing.Callable[[Exception], bool]] = None, + on_stream_closed: typing.Optional[typing.Callable[[], None]] = None + ): # type: (...) -> typing.Tuple[concurrent.futures.Future[model.EchoStreamingResponse], client.CauseStreamServiceToErrorOperation] + """ + Perform the CauseStreamServiceToError operation asynchronously. + The initial response or error will be returned as the result of the asynchronous future, further events will + arrive via the streaming callbacks + + Args: + stream_handler: Methods on this object will be called as stream events happen on this operation. If an + executor is provided, the on_stream_event and on_stream_closed methods will run in the executor. + on_stream_event: Callback for stream events. Mutually exclusive with stream_handler. If an executor is + provided, this method will run in the executor. + on_stream_error: Callback for stream errors. Return true to close the stream, return false to keep the + stream open. Mutually exclusive with stream_handler. Even if an executor is provided, this method + will not run in the executor. + on_stream_closed: Callback for when the stream closes. Mutually exclusive with stream_handler. If an + executor is provided, this method will run in the executor. + """ + stream_handler = self.__handle_stream_handler("CauseStreamServiceToError", stream_handler, + on_stream_event, on_stream_error, on_stream_closed) + request = model.EchoStreamingRequest() + operation = self.client.new_cause_stream_service_to_error(stream_handler) + write_future = operation.activate(request) + return self.__combine_futures(write_future, operation.get_response()), operation + + def echo_message(self, *, + message: typing.Optional[model.MessageData] = None) -> model.EchoMessageResponse: + """ + Perform the EchoMessage operation synchronously. + + Args: + message: + """ + return self.echo_message_async(message=message).result() + + def echo_message_async(self, *, + message: typing.Optional[model.MessageData] = None): # type: (...) -> concurrent.futures.Future[model.EchoMessageResponse] + """ + Perform the EchoMessage operation asynchronously. + + Args: + message: + """ + request = model.EchoMessageRequest(message=message) + operation = self.client.new_echo_message() + write_future = operation.activate(request) + return self.__combine_futures(write_future, operation.get_response()) + + def echo_stream_messages(self, *, + stream_handler: typing.Optional[client.EchoStreamMessagesStreamHandler] = None, + on_stream_event: typing.Optional[typing.Callable[[model.EchoStreamingMessage], None]] = None, + on_stream_error: typing.Optional[typing.Callable[[Exception], bool]] = None, + on_stream_closed: typing.Optional[typing.Callable[[], None]] = None +) -> typing.Tuple[model.EchoStreamingResponse, client.EchoStreamMessagesOperation]: + """ + Perform the EchoStreamMessages operation synchronously. + The initial response or error will be returned synchronously, further events will arrive via the streaming + callbacks + + Args: + stream_handler: Methods on this object will be called as stream events happen on this operation. If an + executor is provided, the on_stream_event and on_stream_closed methods will run in the executor. + on_stream_event: Callback for stream events. Mutually exclusive with stream_handler. If an executor is + provided, this method will run in the executor. + on_stream_error: Callback for stream errors. Return true to close the stream, return false to keep the + stream open. Mutually exclusive with stream_handler. Even if an executor is provided, this method + will not run in the executor. + on_stream_closed: Callback for when the stream closes. Mutually exclusive with stream_handler. If an + executor is provided, this method will run in the executor. + """ + (fut, op) = self.echo_stream_messages_async( + stream_handler=stream_handler, on_stream_event=on_stream_event, on_stream_error=on_stream_error, + on_stream_closed=on_stream_closed) + return fut.result(), op + + def echo_stream_messages_async(self, *, + stream_handler: client.EchoStreamMessagesStreamHandler = None, + on_stream_event: typing.Optional[typing.Callable[[model.EchoStreamingMessage], None]] = None, + on_stream_error: typing.Optional[typing.Callable[[Exception], bool]] = None, + on_stream_closed: typing.Optional[typing.Callable[[], None]] = None + ): # type: (...) -> typing.Tuple[concurrent.futures.Future[model.EchoStreamingResponse], client.EchoStreamMessagesOperation] + """ + Perform the EchoStreamMessages operation asynchronously. + The initial response or error will be returned as the result of the asynchronous future, further events will + arrive via the streaming callbacks + + Args: + stream_handler: Methods on this object will be called as stream events happen on this operation. If an + executor is provided, the on_stream_event and on_stream_closed methods will run in the executor. + on_stream_event: Callback for stream events. Mutually exclusive with stream_handler. If an executor is + provided, this method will run in the executor. + on_stream_error: Callback for stream errors. Return true to close the stream, return false to keep the + stream open. Mutually exclusive with stream_handler. Even if an executor is provided, this method + will not run in the executor. + on_stream_closed: Callback for when the stream closes. Mutually exclusive with stream_handler. If an + executor is provided, this method will run in the executor. + """ + stream_handler = self.__handle_stream_handler("EchoStreamMessages", stream_handler, + on_stream_event, on_stream_error, on_stream_closed) + request = model.EchoStreamingRequest() + operation = self.client.new_echo_stream_messages(stream_handler) + write_future = operation.activate(request) + return self.__combine_futures(write_future, operation.get_response()), operation + + def get_all_customers(self) -> model.GetAllCustomersResponse: + """ + Perform the GetAllCustomers operation synchronously. + + """ + return self.get_all_customers_async().result() + + def get_all_customers_async(self): # type: (...) -> concurrent.futures.Future[model.GetAllCustomersResponse] + """ + Perform the GetAllCustomers operation asynchronously. + + """ + request = model.GetAllCustomersRequest() + operation = self.client.new_get_all_customers() + write_future = operation.activate(request) + return self.__combine_futures(write_future, operation.get_response()) + + def get_all_products(self) -> model.GetAllProductsResponse: + """ + Perform the GetAllProducts operation synchronously. + + """ + return self.get_all_products_async().result() + + def get_all_products_async(self): # type: (...) -> concurrent.futures.Future[model.GetAllProductsResponse] + """ + Perform the GetAllProducts operation asynchronously. + + """ + request = model.GetAllProductsRequest() + operation = self.client.new_get_all_products() + write_future = operation.activate(request) + return self.__combine_futures(write_future, operation.get_response()) diff --git a/test/echotestrpc/model.py b/test/echotestrpc/model.py index fab19f9e..09445dec 100644 --- a/test/echotestrpc/model.py +++ b/test/echotestrpc/model.py @@ -35,12 +35,12 @@ class Product(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - name: - price: + name: The product's name + price: How much the product costs Attributes: - name: - price: + name: The product's name + price: How much the product costs """ def __init__(self, *, @@ -100,12 +100,12 @@ class Pair(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - key: - value: + key: Pair.key as a string + value: Pair.value also a string! Attributes: - key: - value: + key: Pair.key as a string + value: Pair.value also a string! """ def __init__(self, *, @@ -176,14 +176,14 @@ class Customer(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - id: - first_name: - last_name: + id: Opaque customer identifier + first_name: First name of the customer + last_name: Last name of the customer Attributes: - id: - first_name: - last_name: + id: Opaque customer identifier + first_name: First name of the customer + last_name: Last name of the customer """ def __init__(self, *, @@ -253,26 +253,26 @@ class MessageData(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - string_message: - boolean_message: - time_message: - document_message: - enum_message: FruitEnum enum value. - blob_message: - string_list_message: - key_value_pair_list: - string_to_value: + string_message: Some string data + boolean_message: Some boolean data + time_message: Some timestamp data + document_message: Some document data + enum_message: FruitEnum enum value. Some FruitEnum data + blob_message: Some blob data + string_list_message: Some list of strings data + key_value_pair_list: A list of key-value pairs + string_to_value: A map from strings to Product shapes Attributes: - string_message: - boolean_message: - time_message: - document_message: - enum_message: FruitEnum enum value. - blob_message: - string_list_message: - key_value_pair_list: - string_to_value: + string_message: Some string data + boolean_message: Some boolean data + time_message: Some timestamp data + document_message: Some document data + enum_message: FruitEnum enum value. Some FruitEnum data + blob_message: Some blob data + string_list_message: Some list of strings data + key_value_pair_list: A list of key-value pairs + string_to_value: A map from strings to Product shapes """ def __init__(self, *, @@ -408,12 +408,12 @@ class EchoStreamingMessage(rpc.Shape): All other attributes will be None. Keyword Args: - stream_message: - key_value_pair: + stream_message: A message data record + key_value_pair: A key value pair Attributes: - stream_message: - key_value_pair: + stream_message: A message data record + key_value_pair: A key value pair """ def __init__(self, *, @@ -473,12 +473,12 @@ class ServiceError(EchoTestRPCError): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - message: - value: + message: An error message + value: Some auxiliary value Attributes: - message: - value: + message: An error message + value: Some auxiliary value """ def __init__(self, *, @@ -541,10 +541,10 @@ class GetAllCustomersResponse(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - customers: + customers: A list of all known customers Attributes: - customers: + customers: A list of all known customers """ def __init__(self, *, @@ -629,10 +629,10 @@ class EchoMessageResponse(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - message: + message: Some message data Attributes: - message: + message: Some message data """ def __init__(self, *, @@ -682,10 +682,10 @@ class EchoMessageRequest(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - message: + message: Some message data Attributes: - message: + message: Some message data """ def __init__(self, *, @@ -875,10 +875,10 @@ class GetAllProductsResponse(rpc.Shape): All attributes are None by default, and may be set by keyword in the constructor. Keyword Args: - products: + products: A map from strings to products Attributes: - products: + products: A map from strings to products """ def __init__(self, *,