diff --git a/azure-servicefabric/azure/servicefabric/models/__init__.py b/azure-servicefabric/azure/servicefabric/models/__init__.py index dc2cb2fd10fb..7ad01455809b 100644 --- a/azure-servicefabric/azure/servicefabric/models/__init__.py +++ b/azure-servicefabric/azure/servicefabric/models/__init__.py @@ -10,6 +10,9 @@ # -------------------------------------------------------------------------- try: + from .fabric_error_error_py3 import FabricErrorError + from .fabric_error_py3 import FabricError, FabricErrorException + from .container_logs_py3 import ContainerLogs from .aad_metadata_py3 import AadMetadata from .aad_metadata_object_py3 import AadMetadataObject from .analysis_event_metadata_py3 import AnalysisEventMetadata @@ -77,6 +80,7 @@ from .cluster_health_chunk_query_description_py3 import ClusterHealthChunkQueryDescription from .cluster_health_policies_py3 import ClusterHealthPolicies from .cluster_manifest_py3 import ClusterManifest + from .cluster_version_py3 import ClusterVersion from .container_api_request_body_py3 import ContainerApiRequestBody from .container_api_result_py3 import ContainerApiResult from .container_api_response_py3 import ContainerApiResponse @@ -104,13 +108,10 @@ from .entity_health_state_chunk_py3 import EntityHealthStateChunk from .entity_health_state_chunk_list_py3 import EntityHealthStateChunkList from .epoch_py3 import Epoch - from .backup_epoch_py3 import BackupEpoch from .event_health_evaluation_py3 import EventHealthEvaluation from .fabric_event_py3 import FabricEvent from .fabric_code_version_info_py3 import FabricCodeVersionInfo from .fabric_config_version_info_py3 import FabricConfigVersionInfo - from .fabric_error_error_py3 import FabricErrorError - from .fabric_error_py3 import FabricError, FabricErrorException from .cluster_configuration_upgrade_status_info_py3 import ClusterConfigurationUpgradeStatusInfo from .health_information_py3 import HealthInformation from .int64_range_partition_information_py3 import Int64RangePartitionInformation @@ -318,8 +319,11 @@ from .failed_property_batch_info_py3 import FailedPropertyBatchInfo from .backup_schedule_description_py3 import BackupScheduleDescription from .backup_storage_description_py3 import BackupStorageDescription + from .retention_policy_description_py3 import RetentionPolicyDescription from .backup_policy_description_py3 import BackupPolicyDescription from .paged_backup_policy_description_list_py3 import PagedBackupPolicyDescriptionList + from .basic_retention_policy_description_py3 import BasicRetentionPolicyDescription + from .disable_backup_description_py3 import DisableBackupDescription from .application_backup_configuration_info_py3 import ApplicationBackupConfigurationInfo from .service_backup_configuration_info_py3 import ServiceBackupConfigurationInfo from .backup_suspension_info_py3 import BackupSuspensionInfo @@ -358,80 +362,97 @@ from .upload_chunk_range_py3 import UploadChunkRange from .upload_session_info_py3 import UploadSessionInfo from .upload_session_py3 import UploadSession - from .container_logs_py3 import ContainerLogs from .average_partition_load_scaling_trigger_py3 import AveragePartitionLoadScalingTrigger from .average_service_load_scaling_trigger_py3 import AverageServiceLoadScalingTrigger from .partition_instance_count_scale_mechanism_py3 import PartitionInstanceCountScaleMechanism from .add_remove_incremental_named_partition_scaling_mechanism_py3 import AddRemoveIncrementalNamedPartitionScalingMechanism from .application_created_event_py3 import ApplicationCreatedEvent from .application_deleted_event_py3 import ApplicationDeletedEvent - from .application_health_report_created_event_py3 import ApplicationHealthReportCreatedEvent + from .application_new_health_report_event_py3 import ApplicationNewHealthReportEvent from .application_health_report_expired_event_py3 import ApplicationHealthReportExpiredEvent - from .application_upgrade_complete_event_py3 import ApplicationUpgradeCompleteEvent - from .application_upgrade_domain_complete_event_py3 import ApplicationUpgradeDomainCompleteEvent - from .application_upgrade_rollback_complete_event_py3 import ApplicationUpgradeRollbackCompleteEvent - from .application_upgrade_rollback_start_event_py3 import ApplicationUpgradeRollbackStartEvent - from .application_upgrade_start_event_py3 import ApplicationUpgradeStartEvent - from .deployed_application_health_report_created_event_py3 import DeployedApplicationHealthReportCreatedEvent + from .application_upgrade_completed_event_py3 import ApplicationUpgradeCompletedEvent + from .application_upgrade_domain_completed_event_py3 import ApplicationUpgradeDomainCompletedEvent + from .application_upgrade_rollback_completed_event_py3 import ApplicationUpgradeRollbackCompletedEvent + from .application_upgrade_rollback_started_event_py3 import ApplicationUpgradeRollbackStartedEvent + from .application_upgrade_started_event_py3 import ApplicationUpgradeStartedEvent + from .deployed_application_new_health_report_event_py3 import DeployedApplicationNewHealthReportEvent from .deployed_application_health_report_expired_event_py3 import DeployedApplicationHealthReportExpiredEvent - from .process_deactivated_event_py3 import ProcessDeactivatedEvent - from .container_deactivated_event_py3 import ContainerDeactivatedEvent + from .application_process_exited_event_py3 import ApplicationProcessExitedEvent + from .application_container_instance_exited_event_py3 import ApplicationContainerInstanceExitedEvent from .node_aborted_event_py3 import NodeAbortedEvent - from .node_aborting_event_py3 import NodeAbortingEvent - from .node_added_event_py3 import NodeAddedEvent - from .node_close_event_py3 import NodeCloseEvent - from .node_closing_event_py3 import NodeClosingEvent - from .node_deactivate_complete_event_py3 import NodeDeactivateCompleteEvent - from .node_deactivate_start_event_py3 import NodeDeactivateStartEvent + from .node_added_to_cluster_event_py3 import NodeAddedToClusterEvent + from .node_closed_event_py3 import NodeClosedEvent + from .node_deactivate_completed_event_py3 import NodeDeactivateCompletedEvent + from .node_deactivate_started_event_py3 import NodeDeactivateStartedEvent from .node_down_event_py3 import NodeDownEvent - from .node_health_report_created_event_py3 import NodeHealthReportCreatedEvent + from .node_new_health_report_event_py3 import NodeNewHealthReportEvent from .node_health_report_expired_event_py3 import NodeHealthReportExpiredEvent - from .node_opened_success_event_py3 import NodeOpenedSuccessEvent + from .node_open_succeeded_event_py3 import NodeOpenSucceededEvent from .node_open_failed_event_py3 import NodeOpenFailedEvent - from .node_opening_event_py3 import NodeOpeningEvent - from .node_removed_event_py3 import NodeRemovedEvent + from .node_removed_from_cluster_event_py3 import NodeRemovedFromClusterEvent from .node_up_event_py3 import NodeUpEvent - from .partition_health_report_created_event_py3 import PartitionHealthReportCreatedEvent + from .partition_new_health_report_event_py3 import PartitionNewHealthReportEvent from .partition_health_report_expired_event_py3 import PartitionHealthReportExpiredEvent - from .partition_reconfiguration_completed_event_py3 import PartitionReconfigurationCompletedEvent + from .partition_reconfigured_event_py3 import PartitionReconfiguredEvent from .partition_primary_move_analysis_event_py3 import PartitionPrimaryMoveAnalysisEvent from .service_created_event_py3 import ServiceCreatedEvent from .service_deleted_event_py3 import ServiceDeletedEvent - from .service_health_report_created_event_py3 import ServiceHealthReportCreatedEvent + from .service_new_health_report_event_py3 import ServiceNewHealthReportEvent from .service_health_report_expired_event_py3 import ServiceHealthReportExpiredEvent - from .deployed_service_health_report_created_event_py3 import DeployedServiceHealthReportCreatedEvent - from .deployed_service_health_report_expired_event_py3 import DeployedServiceHealthReportExpiredEvent - from .stateful_replica_health_report_created_event_py3 import StatefulReplicaHealthReportCreatedEvent + from .deployed_service_package_new_health_report_event_py3 import DeployedServicePackageNewHealthReportEvent + from .deployed_service_package_health_report_expired_event_py3 import DeployedServicePackageHealthReportExpiredEvent + from .stateful_replica_new_health_report_event_py3 import StatefulReplicaNewHealthReportEvent from .stateful_replica_health_report_expired_event_py3 import StatefulReplicaHealthReportExpiredEvent - from .stateless_replica_health_report_created_event_py3 import StatelessReplicaHealthReportCreatedEvent + from .stateless_replica_new_health_report_event_py3 import StatelessReplicaNewHealthReportEvent from .stateless_replica_health_report_expired_event_py3 import StatelessReplicaHealthReportExpiredEvent - from .cluster_health_report_created_event_py3 import ClusterHealthReportCreatedEvent + from .cluster_new_health_report_event_py3 import ClusterNewHealthReportEvent from .cluster_health_report_expired_event_py3 import ClusterHealthReportExpiredEvent - from .cluster_upgrade_complete_event_py3 import ClusterUpgradeCompleteEvent - from .cluster_upgrade_domain_complete_event_py3 import ClusterUpgradeDomainCompleteEvent - from .cluster_upgrade_rollback_complete_event_py3 import ClusterUpgradeRollbackCompleteEvent - from .cluster_upgrade_rollback_start_event_py3 import ClusterUpgradeRollbackStartEvent - from .cluster_upgrade_start_event_py3 import ClusterUpgradeStartEvent + from .cluster_upgrade_completed_event_py3 import ClusterUpgradeCompletedEvent + from .cluster_upgrade_domain_completed_event_py3 import ClusterUpgradeDomainCompletedEvent + from .cluster_upgrade_rollback_completed_event_py3 import ClusterUpgradeRollbackCompletedEvent + from .cluster_upgrade_rollback_started_event_py3 import ClusterUpgradeRollbackStartedEvent + from .cluster_upgrade_started_event_py3 import ClusterUpgradeStartedEvent from .chaos_stopped_event_py3 import ChaosStoppedEvent from .chaos_started_event_py3 import ChaosStartedEvent - from .chaos_restart_node_fault_completed_event_py3 import ChaosRestartNodeFaultCompletedEvent - from .chaos_restart_code_package_fault_scheduled_event_py3 import ChaosRestartCodePackageFaultScheduledEvent - from .chaos_restart_code_package_fault_completed_event_py3 import ChaosRestartCodePackageFaultCompletedEvent - from .chaos_remove_replica_fault_scheduled_event_py3 import ChaosRemoveReplicaFaultScheduledEvent - from .chaos_remove_replica_fault_completed_event_py3 import ChaosRemoveReplicaFaultCompletedEvent - from .chaos_move_secondary_fault_scheduled_event_py3 import ChaosMoveSecondaryFaultScheduledEvent - from .chaos_move_primary_fault_scheduled_event_py3 import ChaosMovePrimaryFaultScheduledEvent - from .chaos_restart_replica_fault_scheduled_event_py3 import ChaosRestartReplicaFaultScheduledEvent - from .chaos_restart_node_fault_scheduled_event_py3 import ChaosRestartNodeFaultScheduledEvent - from .service_resource_description_py3 import ServiceResourceDescription - from .diagnostics_sink_properties_py3 import DiagnosticsSinkProperties - from .diagnostics_description_py3 import DiagnosticsDescription - from .application_resource_description_py3 import ApplicationResourceDescription - from .paged_service_resource_description_list_py3 import PagedServiceResourceDescriptionList - from .service_resource_replica_description_py3 import ServiceResourceReplicaDescription - from .paged_service_resource_replica_description_list_py3 import PagedServiceResourceReplicaDescriptionList + from .chaos_code_package_restart_scheduled_event_py3 import ChaosCodePackageRestartScheduledEvent + from .chaos_replica_removal_scheduled_event_py3 import ChaosReplicaRemovalScheduledEvent + from .chaos_partition_secondary_move_scheduled_event_py3 import ChaosPartitionSecondaryMoveScheduledEvent + from .chaos_partition_primary_move_scheduled_event_py3 import ChaosPartitionPrimaryMoveScheduledEvent + from .chaos_replica_restart_scheduled_event_py3 import ChaosReplicaRestartScheduledEvent + from .chaos_node_restart_scheduled_event_py3 import ChaosNodeRestartScheduledEvent + from .secret_resource_properties_py3 import SecretResourceProperties + from .inlined_value_secret_resource_properties_py3 import InlinedValueSecretResourceProperties + from .secret_resource_description_py3 import SecretResourceDescription + from .paged_secret_resource_description_list_py3 import PagedSecretResourceDescriptionList + from .secret_resource_properties_base_py3 import SecretResourcePropertiesBase + from .secret_value_py3 import SecretValue + from .secret_value_properties_py3 import SecretValueProperties + from .secret_value_resource_description_py3 import SecretValueResourceDescription + from .paged_secret_value_resource_description_list_py3 import PagedSecretValueResourceDescriptionList from .volume_provider_parameters_azure_file_py3 import VolumeProviderParametersAzureFile + from .volume_reference_py3 import VolumeReference + from .application_scoped_volume_creation_parameters_py3 import ApplicationScopedVolumeCreationParameters + from .application_scoped_volume_py3 import ApplicationScopedVolume + from .application_scoped_volume_creation_parameters_service_fabric_volume_disk_py3 import ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk from .volume_resource_description_py3 import VolumeResourceDescription + from .paged_volume_resource_description_list_py3 import PagedVolumeResourceDescriptionList + from .network_resource_properties_py3 import NetworkResourceProperties + from .local_network_resource_properties_py3 import LocalNetworkResourceProperties + from .endpoint_ref_py3 import EndpointRef + from .network_ref_py3 import NetworkRef + from .network_resource_description_py3 import NetworkResourceDescription + from .network_resource_properties_base_py3 import NetworkResourcePropertiesBase + from .paged_network_resource_description_list_py3 import PagedNetworkResourceDescriptionList + from .gateway_destination_py3 import GatewayDestination + from .tcp_config_py3 import TcpConfig + from .http_route_match_path_py3 import HttpRouteMatchPath + from .http_route_match_header_py3 import HttpRouteMatchHeader + from .http_route_match_rule_py3 import HttpRouteMatchRule + from .http_route_config_py3 import HttpRouteConfig + from .http_host_config_py3 import HttpHostConfig + from .http_config_py3 import HttpConfig + from .gateway_resource_description_py3 import GatewayResourceDescription + from .paged_gateway_resource_description_list_py3 import PagedGatewayResourceDescriptionList from .image_registry_credential_py3 import ImageRegistryCredential from .environment_variable_py3 import EnvironmentVariable from .setting_py3 import Setting @@ -440,15 +461,34 @@ from .resource_requests_py3 import ResourceRequests from .resource_limits_py3 import ResourceLimits from .resource_requirements_py3 import ResourceRequirements - from .container_volume_py3 import ContainerVolume + from .diagnostics_ref_py3 import DiagnosticsRef + from .reliable_collections_ref_py3 import ReliableCollectionsRef from .container_state_py3 import ContainerState from .container_event_py3 import ContainerEvent from .container_instance_view_py3 import ContainerInstanceView - from .diagnostics_ref_py3 import DiagnosticsRef from .container_code_package_properties_py3 import ContainerCodePackageProperties - from .network_ref_py3 import NetworkRef + from .auto_scaling_trigger_py3 import AutoScalingTrigger + from .auto_scaling_mechanism_py3 import AutoScalingMechanism + from .auto_scaling_policy_py3 import AutoScalingPolicy + from .service_resource_description_py3 import ServiceResourceDescription + from .diagnostics_sink_properties_py3 import DiagnosticsSinkProperties + from .diagnostics_description_py3 import DiagnosticsDescription + from .azure_internal_monitoring_pipeline_sink_description_py3 import AzureInternalMonitoringPipelineSinkDescription + from .add_remove_replica_scaling_mechanism_py3 import AddRemoveReplicaScalingMechanism + from .auto_scaling_metric_py3 import AutoScalingMetric + from .auto_scaling_resource_metric_py3 import AutoScalingResourceMetric + from .service_properties_py3 import ServiceProperties from .service_replica_properties_py3 import ServiceReplicaProperties + from .service_replica_description_py3 import ServiceReplicaDescription + from .average_load_scaling_trigger_py3 import AverageLoadScalingTrigger + from .paged_service_resource_description_list_py3 import PagedServiceResourceDescriptionList + from .paged_service_replica_description_list_py3 import PagedServiceReplicaDescriptionList + from .application_resource_description_py3 import ApplicationResourceDescription + from .paged_application_resource_description_list_py3 import PagedApplicationResourceDescriptionList except (SyntaxError, ImportError): + from .fabric_error_error import FabricErrorError + from .fabric_error import FabricError, FabricErrorException + from .container_logs import ContainerLogs from .aad_metadata import AadMetadata from .aad_metadata_object import AadMetadataObject from .analysis_event_metadata import AnalysisEventMetadata @@ -516,6 +556,7 @@ from .cluster_health_chunk_query_description import ClusterHealthChunkQueryDescription from .cluster_health_policies import ClusterHealthPolicies from .cluster_manifest import ClusterManifest + from .cluster_version import ClusterVersion from .container_api_request_body import ContainerApiRequestBody from .container_api_result import ContainerApiResult from .container_api_response import ContainerApiResponse @@ -543,13 +584,10 @@ from .entity_health_state_chunk import EntityHealthStateChunk from .entity_health_state_chunk_list import EntityHealthStateChunkList from .epoch import Epoch - from .backup_epoch import BackupEpoch from .event_health_evaluation import EventHealthEvaluation from .fabric_event import FabricEvent from .fabric_code_version_info import FabricCodeVersionInfo from .fabric_config_version_info import FabricConfigVersionInfo - from .fabric_error_error import FabricErrorError - from .fabric_error import FabricError, FabricErrorException from .cluster_configuration_upgrade_status_info import ClusterConfigurationUpgradeStatusInfo from .health_information import HealthInformation from .int64_range_partition_information import Int64RangePartitionInformation @@ -757,8 +795,11 @@ from .failed_property_batch_info import FailedPropertyBatchInfo from .backup_schedule_description import BackupScheduleDescription from .backup_storage_description import BackupStorageDescription + from .retention_policy_description import RetentionPolicyDescription from .backup_policy_description import BackupPolicyDescription from .paged_backup_policy_description_list import PagedBackupPolicyDescriptionList + from .basic_retention_policy_description import BasicRetentionPolicyDescription + from .disable_backup_description import DisableBackupDescription from .application_backup_configuration_info import ApplicationBackupConfigurationInfo from .service_backup_configuration_info import ServiceBackupConfigurationInfo from .backup_suspension_info import BackupSuspensionInfo @@ -797,80 +838,97 @@ from .upload_chunk_range import UploadChunkRange from .upload_session_info import UploadSessionInfo from .upload_session import UploadSession - from .container_logs import ContainerLogs from .average_partition_load_scaling_trigger import AveragePartitionLoadScalingTrigger from .average_service_load_scaling_trigger import AverageServiceLoadScalingTrigger from .partition_instance_count_scale_mechanism import PartitionInstanceCountScaleMechanism from .add_remove_incremental_named_partition_scaling_mechanism import AddRemoveIncrementalNamedPartitionScalingMechanism from .application_created_event import ApplicationCreatedEvent from .application_deleted_event import ApplicationDeletedEvent - from .application_health_report_created_event import ApplicationHealthReportCreatedEvent + from .application_new_health_report_event import ApplicationNewHealthReportEvent from .application_health_report_expired_event import ApplicationHealthReportExpiredEvent - from .application_upgrade_complete_event import ApplicationUpgradeCompleteEvent - from .application_upgrade_domain_complete_event import ApplicationUpgradeDomainCompleteEvent - from .application_upgrade_rollback_complete_event import ApplicationUpgradeRollbackCompleteEvent - from .application_upgrade_rollback_start_event import ApplicationUpgradeRollbackStartEvent - from .application_upgrade_start_event import ApplicationUpgradeStartEvent - from .deployed_application_health_report_created_event import DeployedApplicationHealthReportCreatedEvent + from .application_upgrade_completed_event import ApplicationUpgradeCompletedEvent + from .application_upgrade_domain_completed_event import ApplicationUpgradeDomainCompletedEvent + from .application_upgrade_rollback_completed_event import ApplicationUpgradeRollbackCompletedEvent + from .application_upgrade_rollback_started_event import ApplicationUpgradeRollbackStartedEvent + from .application_upgrade_started_event import ApplicationUpgradeStartedEvent + from .deployed_application_new_health_report_event import DeployedApplicationNewHealthReportEvent from .deployed_application_health_report_expired_event import DeployedApplicationHealthReportExpiredEvent - from .process_deactivated_event import ProcessDeactivatedEvent - from .container_deactivated_event import ContainerDeactivatedEvent + from .application_process_exited_event import ApplicationProcessExitedEvent + from .application_container_instance_exited_event import ApplicationContainerInstanceExitedEvent from .node_aborted_event import NodeAbortedEvent - from .node_aborting_event import NodeAbortingEvent - from .node_added_event import NodeAddedEvent - from .node_close_event import NodeCloseEvent - from .node_closing_event import NodeClosingEvent - from .node_deactivate_complete_event import NodeDeactivateCompleteEvent - from .node_deactivate_start_event import NodeDeactivateStartEvent + from .node_added_to_cluster_event import NodeAddedToClusterEvent + from .node_closed_event import NodeClosedEvent + from .node_deactivate_completed_event import NodeDeactivateCompletedEvent + from .node_deactivate_started_event import NodeDeactivateStartedEvent from .node_down_event import NodeDownEvent - from .node_health_report_created_event import NodeHealthReportCreatedEvent + from .node_new_health_report_event import NodeNewHealthReportEvent from .node_health_report_expired_event import NodeHealthReportExpiredEvent - from .node_opened_success_event import NodeOpenedSuccessEvent + from .node_open_succeeded_event import NodeOpenSucceededEvent from .node_open_failed_event import NodeOpenFailedEvent - from .node_opening_event import NodeOpeningEvent - from .node_removed_event import NodeRemovedEvent + from .node_removed_from_cluster_event import NodeRemovedFromClusterEvent from .node_up_event import NodeUpEvent - from .partition_health_report_created_event import PartitionHealthReportCreatedEvent + from .partition_new_health_report_event import PartitionNewHealthReportEvent from .partition_health_report_expired_event import PartitionHealthReportExpiredEvent - from .partition_reconfiguration_completed_event import PartitionReconfigurationCompletedEvent + from .partition_reconfigured_event import PartitionReconfiguredEvent from .partition_primary_move_analysis_event import PartitionPrimaryMoveAnalysisEvent from .service_created_event import ServiceCreatedEvent from .service_deleted_event import ServiceDeletedEvent - from .service_health_report_created_event import ServiceHealthReportCreatedEvent + from .service_new_health_report_event import ServiceNewHealthReportEvent from .service_health_report_expired_event import ServiceHealthReportExpiredEvent - from .deployed_service_health_report_created_event import DeployedServiceHealthReportCreatedEvent - from .deployed_service_health_report_expired_event import DeployedServiceHealthReportExpiredEvent - from .stateful_replica_health_report_created_event import StatefulReplicaHealthReportCreatedEvent + from .deployed_service_package_new_health_report_event import DeployedServicePackageNewHealthReportEvent + from .deployed_service_package_health_report_expired_event import DeployedServicePackageHealthReportExpiredEvent + from .stateful_replica_new_health_report_event import StatefulReplicaNewHealthReportEvent from .stateful_replica_health_report_expired_event import StatefulReplicaHealthReportExpiredEvent - from .stateless_replica_health_report_created_event import StatelessReplicaHealthReportCreatedEvent + from .stateless_replica_new_health_report_event import StatelessReplicaNewHealthReportEvent from .stateless_replica_health_report_expired_event import StatelessReplicaHealthReportExpiredEvent - from .cluster_health_report_created_event import ClusterHealthReportCreatedEvent + from .cluster_new_health_report_event import ClusterNewHealthReportEvent from .cluster_health_report_expired_event import ClusterHealthReportExpiredEvent - from .cluster_upgrade_complete_event import ClusterUpgradeCompleteEvent - from .cluster_upgrade_domain_complete_event import ClusterUpgradeDomainCompleteEvent - from .cluster_upgrade_rollback_complete_event import ClusterUpgradeRollbackCompleteEvent - from .cluster_upgrade_rollback_start_event import ClusterUpgradeRollbackStartEvent - from .cluster_upgrade_start_event import ClusterUpgradeStartEvent + from .cluster_upgrade_completed_event import ClusterUpgradeCompletedEvent + from .cluster_upgrade_domain_completed_event import ClusterUpgradeDomainCompletedEvent + from .cluster_upgrade_rollback_completed_event import ClusterUpgradeRollbackCompletedEvent + from .cluster_upgrade_rollback_started_event import ClusterUpgradeRollbackStartedEvent + from .cluster_upgrade_started_event import ClusterUpgradeStartedEvent from .chaos_stopped_event import ChaosStoppedEvent from .chaos_started_event import ChaosStartedEvent - from .chaos_restart_node_fault_completed_event import ChaosRestartNodeFaultCompletedEvent - from .chaos_restart_code_package_fault_scheduled_event import ChaosRestartCodePackageFaultScheduledEvent - from .chaos_restart_code_package_fault_completed_event import ChaosRestartCodePackageFaultCompletedEvent - from .chaos_remove_replica_fault_scheduled_event import ChaosRemoveReplicaFaultScheduledEvent - from .chaos_remove_replica_fault_completed_event import ChaosRemoveReplicaFaultCompletedEvent - from .chaos_move_secondary_fault_scheduled_event import ChaosMoveSecondaryFaultScheduledEvent - from .chaos_move_primary_fault_scheduled_event import ChaosMovePrimaryFaultScheduledEvent - from .chaos_restart_replica_fault_scheduled_event import ChaosRestartReplicaFaultScheduledEvent - from .chaos_restart_node_fault_scheduled_event import ChaosRestartNodeFaultScheduledEvent - from .service_resource_description import ServiceResourceDescription - from .diagnostics_sink_properties import DiagnosticsSinkProperties - from .diagnostics_description import DiagnosticsDescription - from .application_resource_description import ApplicationResourceDescription - from .paged_service_resource_description_list import PagedServiceResourceDescriptionList - from .service_resource_replica_description import ServiceResourceReplicaDescription - from .paged_service_resource_replica_description_list import PagedServiceResourceReplicaDescriptionList + from .chaos_code_package_restart_scheduled_event import ChaosCodePackageRestartScheduledEvent + from .chaos_replica_removal_scheduled_event import ChaosReplicaRemovalScheduledEvent + from .chaos_partition_secondary_move_scheduled_event import ChaosPartitionSecondaryMoveScheduledEvent + from .chaos_partition_primary_move_scheduled_event import ChaosPartitionPrimaryMoveScheduledEvent + from .chaos_replica_restart_scheduled_event import ChaosReplicaRestartScheduledEvent + from .chaos_node_restart_scheduled_event import ChaosNodeRestartScheduledEvent + from .secret_resource_properties import SecretResourceProperties + from .inlined_value_secret_resource_properties import InlinedValueSecretResourceProperties + from .secret_resource_description import SecretResourceDescription + from .paged_secret_resource_description_list import PagedSecretResourceDescriptionList + from .secret_resource_properties_base import SecretResourcePropertiesBase + from .secret_value import SecretValue + from .secret_value_properties import SecretValueProperties + from .secret_value_resource_description import SecretValueResourceDescription + from .paged_secret_value_resource_description_list import PagedSecretValueResourceDescriptionList from .volume_provider_parameters_azure_file import VolumeProviderParametersAzureFile + from .volume_reference import VolumeReference + from .application_scoped_volume_creation_parameters import ApplicationScopedVolumeCreationParameters + from .application_scoped_volume import ApplicationScopedVolume + from .application_scoped_volume_creation_parameters_service_fabric_volume_disk import ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk from .volume_resource_description import VolumeResourceDescription + from .paged_volume_resource_description_list import PagedVolumeResourceDescriptionList + from .network_resource_properties import NetworkResourceProperties + from .local_network_resource_properties import LocalNetworkResourceProperties + from .endpoint_ref import EndpointRef + from .network_ref import NetworkRef + from .network_resource_description import NetworkResourceDescription + from .network_resource_properties_base import NetworkResourcePropertiesBase + from .paged_network_resource_description_list import PagedNetworkResourceDescriptionList + from .gateway_destination import GatewayDestination + from .tcp_config import TcpConfig + from .http_route_match_path import HttpRouteMatchPath + from .http_route_match_header import HttpRouteMatchHeader + from .http_route_match_rule import HttpRouteMatchRule + from .http_route_config import HttpRouteConfig + from .http_host_config import HttpHostConfig + from .http_config import HttpConfig + from .gateway_resource_description import GatewayResourceDescription + from .paged_gateway_resource_description_list import PagedGatewayResourceDescriptionList from .image_registry_credential import ImageRegistryCredential from .environment_variable import EnvironmentVariable from .setting import Setting @@ -879,17 +937,34 @@ from .resource_requests import ResourceRequests from .resource_limits import ResourceLimits from .resource_requirements import ResourceRequirements - from .container_volume import ContainerVolume + from .diagnostics_ref import DiagnosticsRef + from .reliable_collections_ref import ReliableCollectionsRef from .container_state import ContainerState from .container_event import ContainerEvent from .container_instance_view import ContainerInstanceView - from .diagnostics_ref import DiagnosticsRef from .container_code_package_properties import ContainerCodePackageProperties - from .network_ref import NetworkRef + from .auto_scaling_trigger import AutoScalingTrigger + from .auto_scaling_mechanism import AutoScalingMechanism + from .auto_scaling_policy import AutoScalingPolicy + from .service_resource_description import ServiceResourceDescription + from .diagnostics_sink_properties import DiagnosticsSinkProperties + from .diagnostics_description import DiagnosticsDescription + from .azure_internal_monitoring_pipeline_sink_description import AzureInternalMonitoringPipelineSinkDescription + from .add_remove_replica_scaling_mechanism import AddRemoveReplicaScalingMechanism + from .auto_scaling_metric import AutoScalingMetric + from .auto_scaling_resource_metric import AutoScalingResourceMetric + from .service_properties import ServiceProperties from .service_replica_properties import ServiceReplicaProperties + from .service_replica_description import ServiceReplicaDescription + from .average_load_scaling_trigger import AverageLoadScalingTrigger + from .paged_service_resource_description_list import PagedServiceResourceDescriptionList + from .paged_service_replica_description_list import PagedServiceReplicaDescriptionList + from .application_resource_description import ApplicationResourceDescription + from .paged_application_resource_description_list import PagedApplicationResourceDescriptionList from .service_fabric_client_ap_is_enums import ( - ApplicationDefinitionKind, HealthState, + FabricErrorCodes, + ApplicationDefinitionKind, ApplicationStatus, ApplicationPackageCleanupPolicy, ApplicationTypeDefinitionKind, @@ -908,7 +983,6 @@ ReconfigurationPhase, ReconfigurationType, EntityKind, - FabricErrorCodes, FabricEventKind, HealthEvaluationKind, NodeDeactivationIntent, @@ -951,6 +1025,7 @@ PropertyValueKind, PropertyBatchOperationKind, PropertyBatchInfoKind, + RetentionPolicyType, BackupStorageKind, BackupScheduleKind, BackupPolicyScope, @@ -969,10 +1044,19 @@ RepairTaskHealthCheckState, ScalingTriggerKind, ScalingMechanismKind, - ServiceResourceStatus, - ApplicationResourceStatus, + ResourceStatus, + SecretKind, + VolumeProvider, + SizeTypes, + ApplicationScopedVolumeKind, + NetworkKind, + HeaderMatchType, + OperatingSystemType, DiagnosticsSinkKind, - OperatingSystemTypes, + AutoScalingMechanismKind, + AutoScalingMetricKind, + AutoScalingResourceMetricName, + AutoScalingTriggerKind, NodeStatusFilter, ReplicaHealthReportServiceKind, DataLossMode, @@ -982,6 +1066,9 @@ ) __all__ = [ + 'FabricErrorError', + 'FabricError', 'FabricErrorException', + 'ContainerLogs', 'AadMetadata', 'AadMetadataObject', 'AnalysisEventMetadata', @@ -1049,6 +1136,7 @@ 'ClusterHealthChunkQueryDescription', 'ClusterHealthPolicies', 'ClusterManifest', + 'ClusterVersion', 'ContainerApiRequestBody', 'ContainerApiResult', 'ContainerApiResponse', @@ -1076,13 +1164,10 @@ 'EntityHealthStateChunk', 'EntityHealthStateChunkList', 'Epoch', - 'BackupEpoch', 'EventHealthEvaluation', 'FabricEvent', 'FabricCodeVersionInfo', 'FabricConfigVersionInfo', - 'FabricErrorError', - 'FabricError', 'FabricErrorException', 'ClusterConfigurationUpgradeStatusInfo', 'HealthInformation', 'Int64RangePartitionInformation', @@ -1290,8 +1375,11 @@ 'FailedPropertyBatchInfo', 'BackupScheduleDescription', 'BackupStorageDescription', + 'RetentionPolicyDescription', 'BackupPolicyDescription', 'PagedBackupPolicyDescriptionList', + 'BasicRetentionPolicyDescription', + 'DisableBackupDescription', 'ApplicationBackupConfigurationInfo', 'ServiceBackupConfigurationInfo', 'BackupSuspensionInfo', @@ -1330,80 +1418,97 @@ 'UploadChunkRange', 'UploadSessionInfo', 'UploadSession', - 'ContainerLogs', 'AveragePartitionLoadScalingTrigger', 'AverageServiceLoadScalingTrigger', 'PartitionInstanceCountScaleMechanism', 'AddRemoveIncrementalNamedPartitionScalingMechanism', 'ApplicationCreatedEvent', 'ApplicationDeletedEvent', - 'ApplicationHealthReportCreatedEvent', + 'ApplicationNewHealthReportEvent', 'ApplicationHealthReportExpiredEvent', - 'ApplicationUpgradeCompleteEvent', - 'ApplicationUpgradeDomainCompleteEvent', - 'ApplicationUpgradeRollbackCompleteEvent', - 'ApplicationUpgradeRollbackStartEvent', - 'ApplicationUpgradeStartEvent', - 'DeployedApplicationHealthReportCreatedEvent', + 'ApplicationUpgradeCompletedEvent', + 'ApplicationUpgradeDomainCompletedEvent', + 'ApplicationUpgradeRollbackCompletedEvent', + 'ApplicationUpgradeRollbackStartedEvent', + 'ApplicationUpgradeStartedEvent', + 'DeployedApplicationNewHealthReportEvent', 'DeployedApplicationHealthReportExpiredEvent', - 'ProcessDeactivatedEvent', - 'ContainerDeactivatedEvent', + 'ApplicationProcessExitedEvent', + 'ApplicationContainerInstanceExitedEvent', 'NodeAbortedEvent', - 'NodeAbortingEvent', - 'NodeAddedEvent', - 'NodeCloseEvent', - 'NodeClosingEvent', - 'NodeDeactivateCompleteEvent', - 'NodeDeactivateStartEvent', + 'NodeAddedToClusterEvent', + 'NodeClosedEvent', + 'NodeDeactivateCompletedEvent', + 'NodeDeactivateStartedEvent', 'NodeDownEvent', - 'NodeHealthReportCreatedEvent', + 'NodeNewHealthReportEvent', 'NodeHealthReportExpiredEvent', - 'NodeOpenedSuccessEvent', + 'NodeOpenSucceededEvent', 'NodeOpenFailedEvent', - 'NodeOpeningEvent', - 'NodeRemovedEvent', + 'NodeRemovedFromClusterEvent', 'NodeUpEvent', - 'PartitionHealthReportCreatedEvent', + 'PartitionNewHealthReportEvent', 'PartitionHealthReportExpiredEvent', - 'PartitionReconfigurationCompletedEvent', + 'PartitionReconfiguredEvent', 'PartitionPrimaryMoveAnalysisEvent', 'ServiceCreatedEvent', 'ServiceDeletedEvent', - 'ServiceHealthReportCreatedEvent', + 'ServiceNewHealthReportEvent', 'ServiceHealthReportExpiredEvent', - 'DeployedServiceHealthReportCreatedEvent', - 'DeployedServiceHealthReportExpiredEvent', - 'StatefulReplicaHealthReportCreatedEvent', + 'DeployedServicePackageNewHealthReportEvent', + 'DeployedServicePackageHealthReportExpiredEvent', + 'StatefulReplicaNewHealthReportEvent', 'StatefulReplicaHealthReportExpiredEvent', - 'StatelessReplicaHealthReportCreatedEvent', + 'StatelessReplicaNewHealthReportEvent', 'StatelessReplicaHealthReportExpiredEvent', - 'ClusterHealthReportCreatedEvent', + 'ClusterNewHealthReportEvent', 'ClusterHealthReportExpiredEvent', - 'ClusterUpgradeCompleteEvent', - 'ClusterUpgradeDomainCompleteEvent', - 'ClusterUpgradeRollbackCompleteEvent', - 'ClusterUpgradeRollbackStartEvent', - 'ClusterUpgradeStartEvent', + 'ClusterUpgradeCompletedEvent', + 'ClusterUpgradeDomainCompletedEvent', + 'ClusterUpgradeRollbackCompletedEvent', + 'ClusterUpgradeRollbackStartedEvent', + 'ClusterUpgradeStartedEvent', 'ChaosStoppedEvent', 'ChaosStartedEvent', - 'ChaosRestartNodeFaultCompletedEvent', - 'ChaosRestartCodePackageFaultScheduledEvent', - 'ChaosRestartCodePackageFaultCompletedEvent', - 'ChaosRemoveReplicaFaultScheduledEvent', - 'ChaosRemoveReplicaFaultCompletedEvent', - 'ChaosMoveSecondaryFaultScheduledEvent', - 'ChaosMovePrimaryFaultScheduledEvent', - 'ChaosRestartReplicaFaultScheduledEvent', - 'ChaosRestartNodeFaultScheduledEvent', - 'ServiceResourceDescription', - 'DiagnosticsSinkProperties', - 'DiagnosticsDescription', - 'ApplicationResourceDescription', - 'PagedServiceResourceDescriptionList', - 'ServiceResourceReplicaDescription', - 'PagedServiceResourceReplicaDescriptionList', + 'ChaosCodePackageRestartScheduledEvent', + 'ChaosReplicaRemovalScheduledEvent', + 'ChaosPartitionSecondaryMoveScheduledEvent', + 'ChaosPartitionPrimaryMoveScheduledEvent', + 'ChaosReplicaRestartScheduledEvent', + 'ChaosNodeRestartScheduledEvent', + 'SecretResourceProperties', + 'InlinedValueSecretResourceProperties', + 'SecretResourceDescription', + 'PagedSecretResourceDescriptionList', + 'SecretResourcePropertiesBase', + 'SecretValue', + 'SecretValueProperties', + 'SecretValueResourceDescription', + 'PagedSecretValueResourceDescriptionList', 'VolumeProviderParametersAzureFile', + 'VolumeReference', + 'ApplicationScopedVolumeCreationParameters', + 'ApplicationScopedVolume', + 'ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk', 'VolumeResourceDescription', + 'PagedVolumeResourceDescriptionList', + 'NetworkResourceProperties', + 'LocalNetworkResourceProperties', + 'EndpointRef', + 'NetworkRef', + 'NetworkResourceDescription', + 'NetworkResourcePropertiesBase', + 'PagedNetworkResourceDescriptionList', + 'GatewayDestination', + 'TcpConfig', + 'HttpRouteMatchPath', + 'HttpRouteMatchHeader', + 'HttpRouteMatchRule', + 'HttpRouteConfig', + 'HttpHostConfig', + 'HttpConfig', + 'GatewayResourceDescription', + 'PagedGatewayResourceDescriptionList', 'ImageRegistryCredential', 'EnvironmentVariable', 'Setting', @@ -1412,16 +1517,33 @@ 'ResourceRequests', 'ResourceLimits', 'ResourceRequirements', - 'ContainerVolume', + 'DiagnosticsRef', + 'ReliableCollectionsRef', 'ContainerState', 'ContainerEvent', 'ContainerInstanceView', - 'DiagnosticsRef', 'ContainerCodePackageProperties', - 'NetworkRef', + 'AutoScalingTrigger', + 'AutoScalingMechanism', + 'AutoScalingPolicy', + 'ServiceResourceDescription', + 'DiagnosticsSinkProperties', + 'DiagnosticsDescription', + 'AzureInternalMonitoringPipelineSinkDescription', + 'AddRemoveReplicaScalingMechanism', + 'AutoScalingMetric', + 'AutoScalingResourceMetric', + 'ServiceProperties', 'ServiceReplicaProperties', - 'ApplicationDefinitionKind', + 'ServiceReplicaDescription', + 'AverageLoadScalingTrigger', + 'PagedServiceResourceDescriptionList', + 'PagedServiceReplicaDescriptionList', + 'ApplicationResourceDescription', + 'PagedApplicationResourceDescriptionList', 'HealthState', + 'FabricErrorCodes', + 'ApplicationDefinitionKind', 'ApplicationStatus', 'ApplicationPackageCleanupPolicy', 'ApplicationTypeDefinitionKind', @@ -1440,7 +1562,6 @@ 'ReconfigurationPhase', 'ReconfigurationType', 'EntityKind', - 'FabricErrorCodes', 'FabricEventKind', 'HealthEvaluationKind', 'NodeDeactivationIntent', @@ -1483,6 +1604,7 @@ 'PropertyValueKind', 'PropertyBatchOperationKind', 'PropertyBatchInfoKind', + 'RetentionPolicyType', 'BackupStorageKind', 'BackupScheduleKind', 'BackupPolicyScope', @@ -1501,10 +1623,19 @@ 'RepairTaskHealthCheckState', 'ScalingTriggerKind', 'ScalingMechanismKind', - 'ServiceResourceStatus', - 'ApplicationResourceStatus', + 'ResourceStatus', + 'SecretKind', + 'VolumeProvider', + 'SizeTypes', + 'ApplicationScopedVolumeKind', + 'NetworkKind', + 'HeaderMatchType', + 'OperatingSystemType', 'DiagnosticsSinkKind', - 'OperatingSystemTypes', + 'AutoScalingMechanismKind', + 'AutoScalingMetricKind', + 'AutoScalingResourceMetricName', + 'AutoScalingTriggerKind', 'NodeStatusFilter', 'ReplicaHealthReportServiceKind', 'DataLossMode', diff --git a/azure-servicefabric/azure/servicefabric/models/add_remove_replica_scaling_mechanism.py b/azure-servicefabric/azure/servicefabric/models/add_remove_replica_scaling_mechanism.py new file mode 100644 index 000000000000..6332ff38fa34 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/add_remove_replica_scaling_mechanism.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .auto_scaling_mechanism import AutoScalingMechanism + + +class AddRemoveReplicaScalingMechanism(AutoScalingMechanism): + """Describes the horizontal auto scaling mechanism that adds or removes + replicas (containers or container groups). + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param min_count: Required. Minimum number of containers (scale down won't + be performed below this number). + :type min_count: int + :param max_count: Required. Maximum number of containers (scale up won't + be performed above this number). + :type max_count: int + :param scale_increment: Required. Each time auto scaling is performed, + this number of containers will be added or removed. + :type scale_increment: int + """ + + _validation = { + 'kind': {'required': True}, + 'min_count': {'required': True}, + 'max_count': {'required': True}, + 'scale_increment': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'min_count': {'key': 'minCount', 'type': 'int'}, + 'max_count': {'key': 'maxCount', 'type': 'int'}, + 'scale_increment': {'key': 'scaleIncrement', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AddRemoveReplicaScalingMechanism, self).__init__(**kwargs) + self.min_count = kwargs.get('min_count', None) + self.max_count = kwargs.get('max_count', None) + self.scale_increment = kwargs.get('scale_increment', None) + self.kind = 'AddRemoveReplica' diff --git a/azure-servicefabric/azure/servicefabric/models/add_remove_replica_scaling_mechanism_py3.py b/azure-servicefabric/azure/servicefabric/models/add_remove_replica_scaling_mechanism_py3.py new file mode 100644 index 000000000000..ce862f88deb9 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/add_remove_replica_scaling_mechanism_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .auto_scaling_mechanism_py3 import AutoScalingMechanism + + +class AddRemoveReplicaScalingMechanism(AutoScalingMechanism): + """Describes the horizontal auto scaling mechanism that adds or removes + replicas (containers or container groups). + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param min_count: Required. Minimum number of containers (scale down won't + be performed below this number). + :type min_count: int + :param max_count: Required. Maximum number of containers (scale up won't + be performed above this number). + :type max_count: int + :param scale_increment: Required. Each time auto scaling is performed, + this number of containers will be added or removed. + :type scale_increment: int + """ + + _validation = { + 'kind': {'required': True}, + 'min_count': {'required': True}, + 'max_count': {'required': True}, + 'scale_increment': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'min_count': {'key': 'minCount', 'type': 'int'}, + 'max_count': {'key': 'maxCount', 'type': 'int'}, + 'scale_increment': {'key': 'scaleIncrement', 'type': 'int'}, + } + + def __init__(self, *, min_count: int, max_count: int, scale_increment: int, **kwargs) -> None: + super(AddRemoveReplicaScalingMechanism, self).__init__(**kwargs) + self.min_count = min_count + self.max_count = max_count + self.scale_increment = scale_increment + self.kind = 'AddRemoveReplica' diff --git a/azure-servicefabric/azure/servicefabric/models/container_deactivated_event.py b/azure-servicefabric/azure/servicefabric/models/application_container_instance_exited_event.py similarity index 93% rename from azure-servicefabric/azure/servicefabric/models/container_deactivated_event.py rename to azure-servicefabric/azure/servicefabric/models/application_container_instance_exited_event.py index 4eb13b32cd63..8556c736888b 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_deactivated_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_container_instance_exited_event.py @@ -12,14 +12,16 @@ from .application_event import ApplicationEvent -class ContainerDeactivatedEvent(ApplicationEvent): - """Container Deactivated event. +class ApplicationContainerInstanceExitedEvent(ApplicationEvent): + """Container Exited event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -84,6 +86,7 @@ class ContainerDeactivatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -103,7 +106,7 @@ class ContainerDeactivatedEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ContainerDeactivatedEvent, self).__init__(**kwargs) + super(ApplicationContainerInstanceExitedEvent, self).__init__(**kwargs) self.service_name = kwargs.get('service_name', None) self.service_package_name = kwargs.get('service_package_name', None) self.service_package_activation_id = kwargs.get('service_package_activation_id', None) @@ -116,4 +119,4 @@ def __init__(self, **kwargs): self.exit_code = kwargs.get('exit_code', None) self.unexpected_termination = kwargs.get('unexpected_termination', None) self.start_time = kwargs.get('start_time', None) - self.kind = 'ContainerDeactivated' + self.kind = 'ApplicationContainerInstanceExited' diff --git a/azure-servicefabric/azure/servicefabric/models/container_deactivated_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_container_instance_exited_event_py3.py similarity index 90% rename from azure-servicefabric/azure/servicefabric/models/container_deactivated_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_container_instance_exited_event_py3.py index 3c352b57c7f3..0f0451ca2485 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_deactivated_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_container_instance_exited_event_py3.py @@ -12,14 +12,16 @@ from .application_event_py3 import ApplicationEvent -class ContainerDeactivatedEvent(ApplicationEvent): - """Container Deactivated event. +class ApplicationContainerInstanceExitedEvent(ApplicationEvent): + """Container Exited event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -84,6 +86,7 @@ class ContainerDeactivatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -102,8 +105,8 @@ class ContainerDeactivatedEvent(ApplicationEvent): 'start_time': {'key': 'StartTime', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_name: str, service_package_name: str, service_package_activation_id: str, is_exclusive: bool, code_package_name: str, entry_point_type: str, image_name: str, container_name: str, host_id: str, exit_code: int, unexpected_termination: bool, start_time, has_correlated_events: bool=None, **kwargs) -> None: - super(ContainerDeactivatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_name: str, service_package_name: str, service_package_activation_id: str, is_exclusive: bool, code_package_name: str, entry_point_type: str, image_name: str, container_name: str, host_id: str, exit_code: int, unexpected_termination: bool, start_time, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationContainerInstanceExitedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.service_name = service_name self.service_package_name = service_package_name self.service_package_activation_id = service_package_activation_id @@ -116,4 +119,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, s self.exit_code = exit_code self.unexpected_termination = unexpected_termination self.start_time = start_time - self.kind = 'ContainerDeactivated' + self.kind = 'ApplicationContainerInstanceExited' diff --git a/azure-servicefabric/azure/servicefabric/models/application_created_event.py b/azure-servicefabric/azure/servicefabric/models/application_created_event.py index 6ef425b974f6..bd190e771776 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_created_event.py @@ -20,6 +20,8 @@ class ApplicationCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -55,6 +57,7 @@ class ApplicationCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/application_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_created_event_py3.py index e9d7a11459a9..849b60bac24e 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_created_event_py3.py @@ -20,6 +20,8 @@ class ApplicationCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -55,6 +57,7 @@ class ApplicationCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -64,8 +67,8 @@ class ApplicationCreatedEvent(ApplicationEvent): 'application_definition_kind': {'key': 'ApplicationDefinitionKind', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, application_definition_kind: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, application_definition_kind: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationCreatedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_type_name = application_type_name self.application_type_version = application_type_version self.application_definition_kind = application_definition_kind diff --git a/azure-servicefabric/azure/servicefabric/models/application_deleted_event.py b/azure-servicefabric/azure/servicefabric/models/application_deleted_event.py index ea39af05955a..440bd67aa311 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_deleted_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_deleted_event.py @@ -20,6 +20,8 @@ class ApplicationDeletedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -52,6 +54,7 @@ class ApplicationDeletedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/application_deleted_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_deleted_event_py3.py index 8b53a2fdc0a0..b6aba0733ef3 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_deleted_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_deleted_event_py3.py @@ -20,6 +20,8 @@ class ApplicationDeletedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -52,6 +54,7 @@ class ApplicationDeletedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -60,8 +63,8 @@ class ApplicationDeletedEvent(ApplicationEvent): 'application_type_version': {'key': 'ApplicationTypeVersion', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationDeletedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationDeletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_type_name = application_type_name self.application_type_version = application_type_version self.kind = 'ApplicationDeleted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_event.py b/azure-servicefabric/azure/servicefabric/models/application_event.py index 512b5552c189..d85913a077a7 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_event.py @@ -17,22 +17,24 @@ class ApplicationEvent(FabricEvent): You probably want to use the sub-classes and not this class directly. Known sub-classes are: ApplicationCreatedEvent, ApplicationDeletedEvent, - ApplicationHealthReportCreatedEvent, ApplicationHealthReportExpiredEvent, - ApplicationUpgradeCompleteEvent, ApplicationUpgradeDomainCompleteEvent, - ApplicationUpgradeRollbackCompleteEvent, - ApplicationUpgradeRollbackStartEvent, ApplicationUpgradeStartEvent, - DeployedApplicationHealthReportCreatedEvent, - DeployedApplicationHealthReportExpiredEvent, ProcessDeactivatedEvent, - ContainerDeactivatedEvent, DeployedServiceHealthReportCreatedEvent, - DeployedServiceHealthReportExpiredEvent, - ChaosRestartCodePackageFaultScheduledEvent, - ChaosRestartCodePackageFaultCompletedEvent + ApplicationNewHealthReportEvent, ApplicationHealthReportExpiredEvent, + ApplicationUpgradeCompletedEvent, ApplicationUpgradeDomainCompletedEvent, + ApplicationUpgradeRollbackCompletedEvent, + ApplicationUpgradeRollbackStartedEvent, ApplicationUpgradeStartedEvent, + DeployedApplicationNewHealthReportEvent, + DeployedApplicationHealthReportExpiredEvent, ApplicationProcessExitedEvent, + ApplicationContainerInstanceExitedEvent, + DeployedServicePackageNewHealthReportEvent, + DeployedServicePackageHealthReportExpiredEvent, + ChaosCodePackageRestartScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -59,6 +61,7 @@ class ApplicationEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -66,7 +69,7 @@ class ApplicationEvent(FabricEvent): } _subtype_map = { - 'kind': {'ApplicationCreated': 'ApplicationCreatedEvent', 'ApplicationDeleted': 'ApplicationDeletedEvent', 'ApplicationHealthReportCreated': 'ApplicationHealthReportCreatedEvent', 'ApplicationHealthReportExpired': 'ApplicationHealthReportExpiredEvent', 'ApplicationUpgradeComplete': 'ApplicationUpgradeCompleteEvent', 'ApplicationUpgradeDomainComplete': 'ApplicationUpgradeDomainCompleteEvent', 'ApplicationUpgradeRollbackComplete': 'ApplicationUpgradeRollbackCompleteEvent', 'ApplicationUpgradeRollbackStart': 'ApplicationUpgradeRollbackStartEvent', 'ApplicationUpgradeStart': 'ApplicationUpgradeStartEvent', 'DeployedApplicationHealthReportCreated': 'DeployedApplicationHealthReportCreatedEvent', 'DeployedApplicationHealthReportExpired': 'DeployedApplicationHealthReportExpiredEvent', 'ProcessDeactivated': 'ProcessDeactivatedEvent', 'ContainerDeactivated': 'ContainerDeactivatedEvent', 'DeployedServiceHealthReportCreated': 'DeployedServiceHealthReportCreatedEvent', 'DeployedServiceHealthReportExpired': 'DeployedServiceHealthReportExpiredEvent', 'ChaosRestartCodePackageFaultScheduled': 'ChaosRestartCodePackageFaultScheduledEvent', 'ChaosRestartCodePackageFaultCompleted': 'ChaosRestartCodePackageFaultCompletedEvent'} + 'kind': {'ApplicationCreated': 'ApplicationCreatedEvent', 'ApplicationDeleted': 'ApplicationDeletedEvent', 'ApplicationNewHealthReport': 'ApplicationNewHealthReportEvent', 'ApplicationHealthReportExpired': 'ApplicationHealthReportExpiredEvent', 'ApplicationUpgradeCompleted': 'ApplicationUpgradeCompletedEvent', 'ApplicationUpgradeDomainCompleted': 'ApplicationUpgradeDomainCompletedEvent', 'ApplicationUpgradeRollbackCompleted': 'ApplicationUpgradeRollbackCompletedEvent', 'ApplicationUpgradeRollbackStarted': 'ApplicationUpgradeRollbackStartedEvent', 'ApplicationUpgradeStarted': 'ApplicationUpgradeStartedEvent', 'DeployedApplicationNewHealthReport': 'DeployedApplicationNewHealthReportEvent', 'DeployedApplicationHealthReportExpired': 'DeployedApplicationHealthReportExpiredEvent', 'ApplicationProcessExited': 'ApplicationProcessExitedEvent', 'ApplicationContainerInstanceExited': 'ApplicationContainerInstanceExitedEvent', 'DeployedServicePackageNewHealthReport': 'DeployedServicePackageNewHealthReportEvent', 'DeployedServicePackageHealthReportExpired': 'DeployedServicePackageHealthReportExpiredEvent', 'ChaosCodePackageRestartScheduled': 'ChaosCodePackageRestartScheduledEvent'} } def __init__(self, **kwargs): diff --git a/azure-servicefabric/azure/servicefabric/models/application_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_event_py3.py index 5c0f80993ae9..2383eef84059 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_event_py3.py @@ -17,22 +17,24 @@ class ApplicationEvent(FabricEvent): You probably want to use the sub-classes and not this class directly. Known sub-classes are: ApplicationCreatedEvent, ApplicationDeletedEvent, - ApplicationHealthReportCreatedEvent, ApplicationHealthReportExpiredEvent, - ApplicationUpgradeCompleteEvent, ApplicationUpgradeDomainCompleteEvent, - ApplicationUpgradeRollbackCompleteEvent, - ApplicationUpgradeRollbackStartEvent, ApplicationUpgradeStartEvent, - DeployedApplicationHealthReportCreatedEvent, - DeployedApplicationHealthReportExpiredEvent, ProcessDeactivatedEvent, - ContainerDeactivatedEvent, DeployedServiceHealthReportCreatedEvent, - DeployedServiceHealthReportExpiredEvent, - ChaosRestartCodePackageFaultScheduledEvent, - ChaosRestartCodePackageFaultCompletedEvent + ApplicationNewHealthReportEvent, ApplicationHealthReportExpiredEvent, + ApplicationUpgradeCompletedEvent, ApplicationUpgradeDomainCompletedEvent, + ApplicationUpgradeRollbackCompletedEvent, + ApplicationUpgradeRollbackStartedEvent, ApplicationUpgradeStartedEvent, + DeployedApplicationNewHealthReportEvent, + DeployedApplicationHealthReportExpiredEvent, ApplicationProcessExitedEvent, + ApplicationContainerInstanceExitedEvent, + DeployedServicePackageNewHealthReportEvent, + DeployedServicePackageHealthReportExpiredEvent, + ChaosCodePackageRestartScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -59,6 +61,7 @@ class ApplicationEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -66,10 +69,10 @@ class ApplicationEvent(FabricEvent): } _subtype_map = { - 'kind': {'ApplicationCreated': 'ApplicationCreatedEvent', 'ApplicationDeleted': 'ApplicationDeletedEvent', 'ApplicationHealthReportCreated': 'ApplicationHealthReportCreatedEvent', 'ApplicationHealthReportExpired': 'ApplicationHealthReportExpiredEvent', 'ApplicationUpgradeComplete': 'ApplicationUpgradeCompleteEvent', 'ApplicationUpgradeDomainComplete': 'ApplicationUpgradeDomainCompleteEvent', 'ApplicationUpgradeRollbackComplete': 'ApplicationUpgradeRollbackCompleteEvent', 'ApplicationUpgradeRollbackStart': 'ApplicationUpgradeRollbackStartEvent', 'ApplicationUpgradeStart': 'ApplicationUpgradeStartEvent', 'DeployedApplicationHealthReportCreated': 'DeployedApplicationHealthReportCreatedEvent', 'DeployedApplicationHealthReportExpired': 'DeployedApplicationHealthReportExpiredEvent', 'ProcessDeactivated': 'ProcessDeactivatedEvent', 'ContainerDeactivated': 'ContainerDeactivatedEvent', 'DeployedServiceHealthReportCreated': 'DeployedServiceHealthReportCreatedEvent', 'DeployedServiceHealthReportExpired': 'DeployedServiceHealthReportExpiredEvent', 'ChaosRestartCodePackageFaultScheduled': 'ChaosRestartCodePackageFaultScheduledEvent', 'ChaosRestartCodePackageFaultCompleted': 'ChaosRestartCodePackageFaultCompletedEvent'} + 'kind': {'ApplicationCreated': 'ApplicationCreatedEvent', 'ApplicationDeleted': 'ApplicationDeletedEvent', 'ApplicationNewHealthReport': 'ApplicationNewHealthReportEvent', 'ApplicationHealthReportExpired': 'ApplicationHealthReportExpiredEvent', 'ApplicationUpgradeCompleted': 'ApplicationUpgradeCompletedEvent', 'ApplicationUpgradeDomainCompleted': 'ApplicationUpgradeDomainCompletedEvent', 'ApplicationUpgradeRollbackCompleted': 'ApplicationUpgradeRollbackCompletedEvent', 'ApplicationUpgradeRollbackStarted': 'ApplicationUpgradeRollbackStartedEvent', 'ApplicationUpgradeStarted': 'ApplicationUpgradeStartedEvent', 'DeployedApplicationNewHealthReport': 'DeployedApplicationNewHealthReportEvent', 'DeployedApplicationHealthReportExpired': 'DeployedApplicationHealthReportExpiredEvent', 'ApplicationProcessExited': 'ApplicationProcessExitedEvent', 'ApplicationContainerInstanceExited': 'ApplicationContainerInstanceExitedEvent', 'DeployedServicePackageNewHealthReport': 'DeployedServicePackageNewHealthReportEvent', 'DeployedServicePackageHealthReportExpired': 'DeployedServicePackageHealthReportExpiredEvent', 'ChaosCodePackageRestartScheduled': 'ChaosCodePackageRestartScheduledEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.application_id = application_id self.kind = 'ApplicationEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event.py index 24a2105ef9f5..d2771d14aeab 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event.py @@ -20,6 +20,8 @@ class ApplicationHealthReportExpiredEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ApplicationHealthReportExpiredEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event_py3.py index 635fd274de03..f4beb2ceda7c 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class ApplicationHealthReportExpiredEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ApplicationHealthReportExpiredEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -89,8 +92,8 @@ class ApplicationHealthReportExpiredEvent(ApplicationEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_instance_id = application_instance_id self.source_id = source_id self.property = property diff --git a/azure-servicefabric/azure/servicefabric/models/application_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/application_new_health_report_event.py similarity index 93% rename from azure-servicefabric/azure/servicefabric/models/application_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/application_new_health_report_event.py index 0f8fd5493458..e45d2b4f76ea 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_new_health_report_event.py @@ -12,7 +12,7 @@ from .application_event import ApplicationEvent -class ApplicationHealthReportCreatedEvent(ApplicationEvent): +class ApplicationNewHealthReportEvent(ApplicationEvent): """Application Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ApplicationHealthReportCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ApplicationHealthReportCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -90,7 +93,7 @@ class ApplicationHealthReportCreatedEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ApplicationHealthReportCreatedEvent, self).__init__(**kwargs) + super(ApplicationNewHealthReportEvent, self).__init__(**kwargs) self.application_instance_id = kwargs.get('application_instance_id', None) self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) @@ -100,4 +103,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'ApplicationHealthReportCreated' + self.kind = 'ApplicationNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/application_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_new_health_report_event_py3.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/application_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_new_health_report_event_py3.py index 774b8a6f26cb..912b7d4f21d3 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .application_event_py3 import ApplicationEvent -class ApplicationHealthReportCreatedEvent(ApplicationEvent): +class ApplicationNewHealthReportEvent(ApplicationEvent): """Application Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ApplicationHealthReportCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ApplicationHealthReportCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -89,8 +92,8 @@ class ApplicationHealthReportCreatedEvent(ApplicationEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_instance_id = application_instance_id self.source_id = source_id self.property = property @@ -100,4 +103,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, a self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'ApplicationHealthReportCreated' + self.kind = 'ApplicationNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/process_deactivated_event.py b/azure-servicefabric/azure/servicefabric/models/application_process_exited_event.py similarity index 94% rename from azure-servicefabric/azure/servicefabric/models/process_deactivated_event.py rename to azure-servicefabric/azure/servicefabric/models/application_process_exited_event.py index f1b245f1d488..56578ff3ca7a 100644 --- a/azure-servicefabric/azure/servicefabric/models/process_deactivated_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_process_exited_event.py @@ -12,14 +12,16 @@ from .application_event import ApplicationEvent -class ProcessDeactivatedEvent(ApplicationEvent): - """Process Deactivated event. +class ApplicationProcessExitedEvent(ApplicationEvent): + """Process Exited event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -84,6 +86,7 @@ class ProcessDeactivatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -103,7 +106,7 @@ class ProcessDeactivatedEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ProcessDeactivatedEvent, self).__init__(**kwargs) + super(ApplicationProcessExitedEvent, self).__init__(**kwargs) self.service_name = kwargs.get('service_name', None) self.service_package_name = kwargs.get('service_package_name', None) self.service_package_activation_id = kwargs.get('service_package_activation_id', None) @@ -116,4 +119,4 @@ def __init__(self, **kwargs): self.exit_code = kwargs.get('exit_code', None) self.unexpected_termination = kwargs.get('unexpected_termination', None) self.start_time = kwargs.get('start_time', None) - self.kind = 'ProcessDeactivated' + self.kind = 'ApplicationProcessExited' diff --git a/azure-servicefabric/azure/servicefabric/models/process_deactivated_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_process_exited_event_py3.py similarity index 90% rename from azure-servicefabric/azure/servicefabric/models/process_deactivated_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_process_exited_event_py3.py index 431acd549361..26e7215fa476 100644 --- a/azure-servicefabric/azure/servicefabric/models/process_deactivated_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_process_exited_event_py3.py @@ -12,14 +12,16 @@ from .application_event_py3 import ApplicationEvent -class ProcessDeactivatedEvent(ApplicationEvent): - """Process Deactivated event. +class ApplicationProcessExitedEvent(ApplicationEvent): + """Process Exited event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -84,6 +86,7 @@ class ProcessDeactivatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -102,8 +105,8 @@ class ProcessDeactivatedEvent(ApplicationEvent): 'start_time': {'key': 'StartTime', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_name: str, service_package_name: str, service_package_activation_id: str, is_exclusive: bool, code_package_name: str, entry_point_type: str, exe_name: str, process_id: int, host_id: str, exit_code: int, unexpected_termination: bool, start_time, has_correlated_events: bool=None, **kwargs) -> None: - super(ProcessDeactivatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_name: str, service_package_name: str, service_package_activation_id: str, is_exclusive: bool, code_package_name: str, entry_point_type: str, exe_name: str, process_id: int, host_id: str, exit_code: int, unexpected_termination: bool, start_time, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationProcessExitedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.service_name = service_name self.service_package_name = service_package_name self.service_package_activation_id = service_package_activation_id @@ -116,4 +119,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, s self.exit_code = exit_code self.unexpected_termination = unexpected_termination self.start_time = start_time - self.kind = 'ProcessDeactivated' + self.kind = 'ApplicationProcessExited' diff --git a/azure-servicefabric/azure/servicefabric/models/application_resource_description.py b/azure-servicefabric/azure/servicefabric/models/application_resource_description.py index 52b38c02c723..9515a3479ede 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_resource_description.py +++ b/azure-servicefabric/azure/servicefabric/models/application_resource_description.py @@ -13,20 +13,37 @@ class ApplicationResourceDescription(Model): - """Describes a service fabric application resource. + """This type describes a application resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. + :param name: Required. Name of the Application resource. + :type name: str :param description: User readable description of the application. :type description: str - :param debug_params: Internal use. - :type debug_params: str - :param services: describes the services in the application. + :param services: Describes the services in the application. This property + is used to create or modify services of the application. On get only the + name of the service is returned. The service description can be obtained + by querying for the service resource. :type services: list[~azure.servicefabric.models.ServiceResourceDescription] + :param diagnostics: Describes the diagnostics definition and usage for an + application resource. + :type diagnostics: ~azure.servicefabric.models.DiagnosticsDescription + :param debug_params: Internal - used by Visual Studio to setup the + debugging session on the local development environment. + :type debug_params: str + :ivar service_names: Names of the services in the application. + :vartype service_names: list[str] + :ivar status: Status of the application. Possible values include: + 'Unknown', 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the application. + :vartype status_details: str :ivar health_state: Describes the health state of an application resource. Possible values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' :vartype health_state: str or ~azure.servicefabric.models.HealthState @@ -34,53 +51,39 @@ class ApplicationResourceDescription(Model): 'Ok', this additional details from service fabric Health Manager for the user to know why the application is marked unhealthy. :vartype unhealthy_evaluation: str - :ivar status: Status of the application resource. Possible values include: - 'Invalid', 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' - :vartype status: str or - ~azure.servicefabric.models.ApplicationResourceStatus - :ivar status_details: Gives additional information about the current - status of the application deployment. - :vartype status_details: str - :ivar service_names: Names of the services in the application. - :vartype service_names: list[str] - :param diagnostics: Describes the diagnostics definition and usage for an - application resource. - :type diagnostics: ~azure.servicefabric.models.DiagnosticsDescription - :param name: Required. Application resource name. - :type name: str """ _validation = { - 'health_state': {'readonly': True}, - 'unhealthy_evaluation': {'readonly': True}, + 'name': {'required': True}, + 'service_names': {'readonly': True}, 'status': {'readonly': True}, 'status_details': {'readonly': True}, - 'service_names': {'readonly': True}, - 'name': {'required': True}, + 'health_state': {'readonly': True}, + 'unhealthy_evaluation': {'readonly': True}, } _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, - 'debug_params': {'key': 'properties.debugParams', 'type': 'str'}, 'services': {'key': 'properties.services', 'type': '[ServiceResourceDescription]'}, - 'health_state': {'key': 'properties.healthState', 'type': 'str'}, - 'unhealthy_evaluation': {'key': 'properties.unhealthyEvaluation', 'type': 'str'}, + 'diagnostics': {'key': 'properties.diagnostics', 'type': 'DiagnosticsDescription'}, + 'debug_params': {'key': 'properties.debugParams', 'type': 'str'}, + 'service_names': {'key': 'properties.serviceNames', 'type': '[str]'}, 'status': {'key': 'properties.status', 'type': 'str'}, 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, - 'service_names': {'key': 'properties.serviceNames', 'type': '[str]'}, - 'diagnostics': {'key': 'properties.diagnostics', 'type': 'DiagnosticsDescription'}, - 'name': {'key': 'name', 'type': 'str'}, + 'health_state': {'key': 'properties.healthState', 'type': 'str'}, + 'unhealthy_evaluation': {'key': 'properties.unhealthyEvaluation', 'type': 'str'}, } def __init__(self, **kwargs): super(ApplicationResourceDescription, self).__init__(**kwargs) + self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) - self.debug_params = kwargs.get('debug_params', None) self.services = kwargs.get('services', None) - self.health_state = None - self.unhealthy_evaluation = None + self.diagnostics = kwargs.get('diagnostics', None) + self.debug_params = kwargs.get('debug_params', None) + self.service_names = None self.status = None self.status_details = None - self.service_names = None - self.diagnostics = kwargs.get('diagnostics', None) - self.name = kwargs.get('name', None) + self.health_state = None + self.unhealthy_evaluation = None diff --git a/azure-servicefabric/azure/servicefabric/models/application_resource_description_py3.py b/azure-servicefabric/azure/servicefabric/models/application_resource_description_py3.py index 8f763cd5c83a..6fe90cd50cc7 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_resource_description_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_resource_description_py3.py @@ -13,20 +13,37 @@ class ApplicationResourceDescription(Model): - """Describes a service fabric application resource. + """This type describes a application resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. + :param name: Required. Name of the Application resource. + :type name: str :param description: User readable description of the application. :type description: str - :param debug_params: Internal use. - :type debug_params: str - :param services: describes the services in the application. + :param services: Describes the services in the application. This property + is used to create or modify services of the application. On get only the + name of the service is returned. The service description can be obtained + by querying for the service resource. :type services: list[~azure.servicefabric.models.ServiceResourceDescription] + :param diagnostics: Describes the diagnostics definition and usage for an + application resource. + :type diagnostics: ~azure.servicefabric.models.DiagnosticsDescription + :param debug_params: Internal - used by Visual Studio to setup the + debugging session on the local development environment. + :type debug_params: str + :ivar service_names: Names of the services in the application. + :vartype service_names: list[str] + :ivar status: Status of the application. Possible values include: + 'Unknown', 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the application. + :vartype status_details: str :ivar health_state: Describes the health state of an application resource. Possible values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' :vartype health_state: str or ~azure.servicefabric.models.HealthState @@ -34,53 +51,39 @@ class ApplicationResourceDescription(Model): 'Ok', this additional details from service fabric Health Manager for the user to know why the application is marked unhealthy. :vartype unhealthy_evaluation: str - :ivar status: Status of the application resource. Possible values include: - 'Invalid', 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' - :vartype status: str or - ~azure.servicefabric.models.ApplicationResourceStatus - :ivar status_details: Gives additional information about the current - status of the application deployment. - :vartype status_details: str - :ivar service_names: Names of the services in the application. - :vartype service_names: list[str] - :param diagnostics: Describes the diagnostics definition and usage for an - application resource. - :type diagnostics: ~azure.servicefabric.models.DiagnosticsDescription - :param name: Required. Application resource name. - :type name: str """ _validation = { - 'health_state': {'readonly': True}, - 'unhealthy_evaluation': {'readonly': True}, + 'name': {'required': True}, + 'service_names': {'readonly': True}, 'status': {'readonly': True}, 'status_details': {'readonly': True}, - 'service_names': {'readonly': True}, - 'name': {'required': True}, + 'health_state': {'readonly': True}, + 'unhealthy_evaluation': {'readonly': True}, } _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, - 'debug_params': {'key': 'properties.debugParams', 'type': 'str'}, 'services': {'key': 'properties.services', 'type': '[ServiceResourceDescription]'}, - 'health_state': {'key': 'properties.healthState', 'type': 'str'}, - 'unhealthy_evaluation': {'key': 'properties.unhealthyEvaluation', 'type': 'str'}, + 'diagnostics': {'key': 'properties.diagnostics', 'type': 'DiagnosticsDescription'}, + 'debug_params': {'key': 'properties.debugParams', 'type': 'str'}, + 'service_names': {'key': 'properties.serviceNames', 'type': '[str]'}, 'status': {'key': 'properties.status', 'type': 'str'}, 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, - 'service_names': {'key': 'properties.serviceNames', 'type': '[str]'}, - 'diagnostics': {'key': 'properties.diagnostics', 'type': 'DiagnosticsDescription'}, - 'name': {'key': 'name', 'type': 'str'}, + 'health_state': {'key': 'properties.healthState', 'type': 'str'}, + 'unhealthy_evaluation': {'key': 'properties.unhealthyEvaluation', 'type': 'str'}, } - def __init__(self, *, name: str, description: str=None, debug_params: str=None, services=None, diagnostics=None, **kwargs) -> None: + def __init__(self, *, name: str, description: str=None, services=None, diagnostics=None, debug_params: str=None, **kwargs) -> None: super(ApplicationResourceDescription, self).__init__(**kwargs) + self.name = name self.description = description - self.debug_params = debug_params self.services = services - self.health_state = None - self.unhealthy_evaluation = None + self.diagnostics = diagnostics + self.debug_params = debug_params + self.service_names = None self.status = None self.status_details = None - self.service_names = None - self.diagnostics = diagnostics - self.name = name + self.health_state = None + self.unhealthy_evaluation = None diff --git a/azure-servicefabric/azure/servicefabric/models/application_scoped_volume.py b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume.py new file mode 100644 index 000000000000..267c4a06d4ff --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .volume_reference import VolumeReference + + +class ApplicationScopedVolume(VolumeReference): + """Describes a volume whose lifetime is scoped to the application's lifetime. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the volume being referenced. + :type name: str + :param read_only: The flag indicating whether the volume is read only. + Default is 'false'. + :type read_only: bool + :param destination_path: Required. The path within the container at which + the volume should be mounted. Only valid path characters are allowed. + :type destination_path: str + :param creation_parameters: Required. Describes parameters for creating + application-scoped volumes. + :type creation_parameters: + ~azure.servicefabric.models.ApplicationScopedVolumeCreationParameters + """ + + _validation = { + 'name': {'required': True}, + 'destination_path': {'required': True}, + 'creation_parameters': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'read_only': {'key': 'readOnly', 'type': 'bool'}, + 'destination_path': {'key': 'destinationPath', 'type': 'str'}, + 'creation_parameters': {'key': 'creationParameters', 'type': 'ApplicationScopedVolumeCreationParameters'}, + } + + def __init__(self, **kwargs): + super(ApplicationScopedVolume, self).__init__(**kwargs) + self.creation_parameters = kwargs.get('creation_parameters', None) diff --git a/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters.py b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters.py new file mode 100644 index 000000000000..b55aeb7bfb93 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ApplicationScopedVolumeCreationParameters(Model): + """Describes parameters for creating application-scoped volumes. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: + ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk + + All required parameters must be populated in order to send to Azure. + + :param description: User readable description of the volume. + :type description: str + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'ServiceFabricVolumeDisk': 'ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk'} + } + + def __init__(self, **kwargs): + super(ApplicationScopedVolumeCreationParameters, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_py3.py b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_py3.py new file mode 100644 index 000000000000..27c8018f3bde --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ApplicationScopedVolumeCreationParameters(Model): + """Describes parameters for creating application-scoped volumes. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: + ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk + + All required parameters must be populated in order to send to Azure. + + :param description: User readable description of the volume. + :type description: str + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'ServiceFabricVolumeDisk': 'ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk'} + } + + def __init__(self, *, description: str=None, **kwargs) -> None: + super(ApplicationScopedVolumeCreationParameters, self).__init__(**kwargs) + self.description = description + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_service_fabric_volume_disk.py b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_service_fabric_volume_disk.py new file mode 100644 index 000000000000..945d283d39a9 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_service_fabric_volume_disk.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .application_scoped_volume_creation_parameters import ApplicationScopedVolumeCreationParameters + + +class ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk(ApplicationScopedVolumeCreationParameters): + """Describes parameters for creating application-scoped volumes provided by + Service Fabric Volume Disks. + + All required parameters must be populated in order to send to Azure. + + :param description: User readable description of the volume. + :type description: str + :param kind: Required. Constant filled by server. + :type kind: str + :param size_disk: Required. Volume size. Possible values include: 'Small', + 'Medium', 'Large' + :type size_disk: str or ~azure.servicefabric.models.SizeTypes + """ + + _validation = { + 'kind': {'required': True}, + 'size_disk': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'size_disk': {'key': 'sizeDisk', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk, self).__init__(**kwargs) + self.size_disk = kwargs.get('size_disk', None) + self.kind = 'ServiceFabricVolumeDisk' diff --git a/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_service_fabric_volume_disk_py3.py b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_service_fabric_volume_disk_py3.py new file mode 100644 index 000000000000..279e036168cc --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_creation_parameters_service_fabric_volume_disk_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .application_scoped_volume_creation_parameters_py3 import ApplicationScopedVolumeCreationParameters + + +class ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk(ApplicationScopedVolumeCreationParameters): + """Describes parameters for creating application-scoped volumes provided by + Service Fabric Volume Disks. + + All required parameters must be populated in order to send to Azure. + + :param description: User readable description of the volume. + :type description: str + :param kind: Required. Constant filled by server. + :type kind: str + :param size_disk: Required. Volume size. Possible values include: 'Small', + 'Medium', 'Large' + :type size_disk: str or ~azure.servicefabric.models.SizeTypes + """ + + _validation = { + 'kind': {'required': True}, + 'size_disk': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'size_disk': {'key': 'sizeDisk', 'type': 'str'}, + } + + def __init__(self, *, size_disk, description: str=None, **kwargs) -> None: + super(ApplicationScopedVolumeCreationParametersServiceFabricVolumeDisk, self).__init__(description=description, **kwargs) + self.size_disk = size_disk + self.kind = 'ServiceFabricVolumeDisk' diff --git a/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_py3.py b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_py3.py new file mode 100644 index 000000000000..0c07eaf0c9fe --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/application_scoped_volume_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .volume_reference_py3 import VolumeReference + + +class ApplicationScopedVolume(VolumeReference): + """Describes a volume whose lifetime is scoped to the application's lifetime. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the volume being referenced. + :type name: str + :param read_only: The flag indicating whether the volume is read only. + Default is 'false'. + :type read_only: bool + :param destination_path: Required. The path within the container at which + the volume should be mounted. Only valid path characters are allowed. + :type destination_path: str + :param creation_parameters: Required. Describes parameters for creating + application-scoped volumes. + :type creation_parameters: + ~azure.servicefabric.models.ApplicationScopedVolumeCreationParameters + """ + + _validation = { + 'name': {'required': True}, + 'destination_path': {'required': True}, + 'creation_parameters': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'read_only': {'key': 'readOnly', 'type': 'bool'}, + 'destination_path': {'key': 'destinationPath', 'type': 'str'}, + 'creation_parameters': {'key': 'creationParameters', 'type': 'ApplicationScopedVolumeCreationParameters'}, + } + + def __init__(self, *, name: str, destination_path: str, creation_parameters, read_only: bool=None, **kwargs) -> None: + super(ApplicationScopedVolume, self).__init__(name=name, read_only=read_only, destination_path=destination_path, **kwargs) + self.creation_parameters = creation_parameters diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_complete_event.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_completed_event.py similarity index 90% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_complete_event.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_completed_event.py index 5b6ac13a69c8..ed48f2f62cfe 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_complete_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_completed_event.py @@ -12,14 +12,16 @@ from .application_event import ApplicationEvent -class ApplicationUpgradeCompleteEvent(ApplicationEvent): - """Application Upgrade Complete event. +class ApplicationUpgradeCompletedEvent(ApplicationEvent): + """Application Upgrade Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -56,6 +58,7 @@ class ApplicationUpgradeCompleteEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -66,8 +69,8 @@ class ApplicationUpgradeCompleteEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ApplicationUpgradeCompleteEvent, self).__init__(**kwargs) + super(ApplicationUpgradeCompletedEvent, self).__init__(**kwargs) self.application_type_name = kwargs.get('application_type_name', None) self.application_type_version = kwargs.get('application_type_version', None) self.overall_upgrade_elapsed_time_in_ms = kwargs.get('overall_upgrade_elapsed_time_in_ms', None) - self.kind = 'ApplicationUpgradeComplete' + self.kind = 'ApplicationUpgradeCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_complete_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_completed_event_py3.py similarity index 83% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_complete_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_completed_event_py3.py index 1df9ee3fd20c..d3a3f0a705d7 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_complete_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_completed_event_py3.py @@ -12,14 +12,16 @@ from .application_event_py3 import ApplicationEvent -class ApplicationUpgradeCompleteEvent(ApplicationEvent): - """Application Upgrade Complete event. +class ApplicationUpgradeCompletedEvent(ApplicationEvent): + """Application Upgrade Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -56,6 +58,7 @@ class ApplicationUpgradeCompleteEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -65,9 +68,9 @@ class ApplicationUpgradeCompleteEvent(ApplicationEvent): 'overall_upgrade_elapsed_time_in_ms': {'key': 'OverallUpgradeElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, overall_upgrade_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationUpgradeCompleteEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, overall_upgrade_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationUpgradeCompletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_type_name = application_type_name self.application_type_version = application_type_version self.overall_upgrade_elapsed_time_in_ms = overall_upgrade_elapsed_time_in_ms - self.kind = 'ApplicationUpgradeComplete' + self.kind = 'ApplicationUpgradeCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_complete_event.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_completed_event.py similarity index 91% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_complete_event.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_completed_event.py index bc8ed044f826..292ec1c86e3d 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_complete_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_completed_event.py @@ -12,14 +12,16 @@ from .application_event import ApplicationEvent -class ApplicationUpgradeDomainCompleteEvent(ApplicationEvent): - """Application Upgrade Domain Complete event. +class ApplicationUpgradeDomainCompletedEvent(ApplicationEvent): + """Application Upgrade Domain Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -67,6 +69,7 @@ class ApplicationUpgradeDomainCompleteEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -80,11 +83,11 @@ class ApplicationUpgradeDomainCompleteEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ApplicationUpgradeDomainCompleteEvent, self).__init__(**kwargs) + super(ApplicationUpgradeDomainCompletedEvent, self).__init__(**kwargs) self.application_type_name = kwargs.get('application_type_name', None) self.current_application_type_version = kwargs.get('current_application_type_version', None) self.application_type_version = kwargs.get('application_type_version', None) self.upgrade_state = kwargs.get('upgrade_state', None) self.upgrade_domains = kwargs.get('upgrade_domains', None) self.upgrade_domain_elapsed_time_in_ms = kwargs.get('upgrade_domain_elapsed_time_in_ms', None) - self.kind = 'ApplicationUpgradeDomainComplete' + self.kind = 'ApplicationUpgradeDomainCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_complete_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_completed_event_py3.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_complete_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_completed_event_py3.py index eefdd83e9f95..eb424ead9a87 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_complete_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_domain_completed_event_py3.py @@ -12,14 +12,16 @@ from .application_event_py3 import ApplicationEvent -class ApplicationUpgradeDomainCompleteEvent(ApplicationEvent): - """Application Upgrade Domain Complete event. +class ApplicationUpgradeDomainCompletedEvent(ApplicationEvent): + """Application Upgrade Domain Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -67,6 +69,7 @@ class ApplicationUpgradeDomainCompleteEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -79,12 +82,12 @@ class ApplicationUpgradeDomainCompleteEvent(ApplicationEvent): 'upgrade_domain_elapsed_time_in_ms': {'key': 'UpgradeDomainElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, current_application_type_version: str, application_type_version: str, upgrade_state: str, upgrade_domains: str, upgrade_domain_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationUpgradeDomainCompleteEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, current_application_type_version: str, application_type_version: str, upgrade_state: str, upgrade_domains: str, upgrade_domain_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationUpgradeDomainCompletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_type_name = application_type_name self.current_application_type_version = current_application_type_version self.application_type_version = application_type_version self.upgrade_state = upgrade_state self.upgrade_domains = upgrade_domains self.upgrade_domain_elapsed_time_in_ms = upgrade_domain_elapsed_time_in_ms - self.kind = 'ApplicationUpgradeDomainComplete' + self.kind = 'ApplicationUpgradeDomainCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_complete_event.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_completed_event.py similarity index 90% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_complete_event.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_completed_event.py index 91f8c2bf7863..a28a48ade3af 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_complete_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_completed_event.py @@ -12,14 +12,16 @@ from .application_event import ApplicationEvent -class ApplicationUpgradeRollbackCompleteEvent(ApplicationEvent): - """Application Upgrade Rollback Complete event. +class ApplicationUpgradeRollbackCompletedEvent(ApplicationEvent): + """Application Upgrade Rollback Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -59,6 +61,7 @@ class ApplicationUpgradeRollbackCompleteEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -70,9 +73,9 @@ class ApplicationUpgradeRollbackCompleteEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ApplicationUpgradeRollbackCompleteEvent, self).__init__(**kwargs) + super(ApplicationUpgradeRollbackCompletedEvent, self).__init__(**kwargs) self.application_type_name = kwargs.get('application_type_name', None) self.application_type_version = kwargs.get('application_type_version', None) self.failure_reason = kwargs.get('failure_reason', None) self.overall_upgrade_elapsed_time_in_ms = kwargs.get('overall_upgrade_elapsed_time_in_ms', None) - self.kind = 'ApplicationUpgradeRollbackComplete' + self.kind = 'ApplicationUpgradeRollbackCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_complete_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_completed_event_py3.py similarity index 85% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_complete_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_completed_event_py3.py index c40f39d16f3f..2cb37f77a032 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_complete_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_completed_event_py3.py @@ -12,14 +12,16 @@ from .application_event_py3 import ApplicationEvent -class ApplicationUpgradeRollbackCompleteEvent(ApplicationEvent): - """Application Upgrade Rollback Complete event. +class ApplicationUpgradeRollbackCompletedEvent(ApplicationEvent): + """Application Upgrade Rollback Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -59,6 +61,7 @@ class ApplicationUpgradeRollbackCompleteEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -69,10 +72,10 @@ class ApplicationUpgradeRollbackCompleteEvent(ApplicationEvent): 'overall_upgrade_elapsed_time_in_ms': {'key': 'OverallUpgradeElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationUpgradeRollbackCompleteEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, application_type_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationUpgradeRollbackCompletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_type_name = application_type_name self.application_type_version = application_type_version self.failure_reason = failure_reason self.overall_upgrade_elapsed_time_in_ms = overall_upgrade_elapsed_time_in_ms - self.kind = 'ApplicationUpgradeRollbackComplete' + self.kind = 'ApplicationUpgradeRollbackCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_start_event.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_started_event.py similarity index 91% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_start_event.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_started_event.py index 8e0e1cfefe4d..23887afab208 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_start_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_started_event.py @@ -12,14 +12,16 @@ from .application_event import ApplicationEvent -class ApplicationUpgradeRollbackStartEvent(ApplicationEvent): - """Application Upgrade Rollback Start event. +class ApplicationUpgradeRollbackStartedEvent(ApplicationEvent): + """Application Upgrade Rollback Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -64,6 +66,7 @@ class ApplicationUpgradeRollbackStartEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -76,10 +79,10 @@ class ApplicationUpgradeRollbackStartEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ApplicationUpgradeRollbackStartEvent, self).__init__(**kwargs) + super(ApplicationUpgradeRollbackStartedEvent, self).__init__(**kwargs) self.application_type_name = kwargs.get('application_type_name', None) self.current_application_type_version = kwargs.get('current_application_type_version', None) self.application_type_version = kwargs.get('application_type_version', None) self.failure_reason = kwargs.get('failure_reason', None) self.overall_upgrade_elapsed_time_in_ms = kwargs.get('overall_upgrade_elapsed_time_in_ms', None) - self.kind = 'ApplicationUpgradeRollbackStart' + self.kind = 'ApplicationUpgradeRollbackStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_start_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_started_event_py3.py similarity index 85% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_start_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_started_event_py3.py index b770876d1dd8..95488c569b80 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_start_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_rollback_started_event_py3.py @@ -12,14 +12,16 @@ from .application_event_py3 import ApplicationEvent -class ApplicationUpgradeRollbackStartEvent(ApplicationEvent): - """Application Upgrade Rollback Start event. +class ApplicationUpgradeRollbackStartedEvent(ApplicationEvent): + """Application Upgrade Rollback Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -64,6 +66,7 @@ class ApplicationUpgradeRollbackStartEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -75,11 +78,11 @@ class ApplicationUpgradeRollbackStartEvent(ApplicationEvent): 'overall_upgrade_elapsed_time_in_ms': {'key': 'OverallUpgradeElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, current_application_type_version: str, application_type_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationUpgradeRollbackStartEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, current_application_type_version: str, application_type_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationUpgradeRollbackStartedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_type_name = application_type_name self.current_application_type_version = current_application_type_version self.application_type_version = application_type_version self.failure_reason = failure_reason self.overall_upgrade_elapsed_time_in_ms = overall_upgrade_elapsed_time_in_ms - self.kind = 'ApplicationUpgradeRollbackStart' + self.kind = 'ApplicationUpgradeRollbackStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_start_event.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_started_event.py similarity index 92% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_start_event.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_started_event.py index 243507c7b5a1..4b8254f786a5 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_start_event.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_started_event.py @@ -12,14 +12,16 @@ from .application_event import ApplicationEvent -class ApplicationUpgradeStartEvent(ApplicationEvent): - """Application Upgrade Start event. +class ApplicationUpgradeStartedEvent(ApplicationEvent): + """Application Upgrade Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -66,6 +68,7 @@ class ApplicationUpgradeStartEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -79,11 +82,11 @@ class ApplicationUpgradeStartEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ApplicationUpgradeStartEvent, self).__init__(**kwargs) + super(ApplicationUpgradeStartedEvent, self).__init__(**kwargs) self.application_type_name = kwargs.get('application_type_name', None) self.current_application_type_version = kwargs.get('current_application_type_version', None) self.application_type_version = kwargs.get('application_type_version', None) self.upgrade_type = kwargs.get('upgrade_type', None) self.rolling_upgrade_mode = kwargs.get('rolling_upgrade_mode', None) self.failure_action = kwargs.get('failure_action', None) - self.kind = 'ApplicationUpgradeStart' + self.kind = 'ApplicationUpgradeStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/application_upgrade_start_event_py3.py b/azure-servicefabric/azure/servicefabric/models/application_upgrade_started_event_py3.py similarity index 86% rename from azure-servicefabric/azure/servicefabric/models/application_upgrade_start_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/application_upgrade_started_event_py3.py index 7df679fc68bc..aaf80f7c6733 100644 --- a/azure-servicefabric/azure/servicefabric/models/application_upgrade_start_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/application_upgrade_started_event_py3.py @@ -12,14 +12,16 @@ from .application_event_py3 import ApplicationEvent -class ApplicationUpgradeStartEvent(ApplicationEvent): - """Application Upgrade Start event. +class ApplicationUpgradeStartedEvent(ApplicationEvent): + """Application Upgrade Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -66,6 +68,7 @@ class ApplicationUpgradeStartEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -78,12 +81,12 @@ class ApplicationUpgradeStartEvent(ApplicationEvent): 'failure_action': {'key': 'FailureAction', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, current_application_type_version: str, application_type_version: str, upgrade_type: str, rolling_upgrade_mode: str, failure_action: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ApplicationUpgradeStartEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_type_name: str, current_application_type_version: str, application_type_version: str, upgrade_type: str, rolling_upgrade_mode: str, failure_action: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ApplicationUpgradeStartedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_type_name = application_type_name self.current_application_type_version = current_application_type_version self.application_type_version = application_type_version self.upgrade_type = upgrade_type self.rolling_upgrade_mode = rolling_upgrade_mode self.failure_action = failure_action - self.kind = 'ApplicationUpgradeStart' + self.kind = 'ApplicationUpgradeStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_mechanism.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_mechanism.py new file mode 100644 index 000000000000..474bcd9d338b --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_mechanism.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingMechanism(Model): + """Describes the mechanism for performing auto scaling operation. Derived + classes will describe the actual mechanism. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AddRemoveReplicaScalingMechanism + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'AddRemoveReplica': 'AddRemoveReplicaScalingMechanism'} + } + + def __init__(self, **kwargs): + super(AutoScalingMechanism, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_mechanism_py3.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_mechanism_py3.py new file mode 100644 index 000000000000..69129cbaf346 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_mechanism_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingMechanism(Model): + """Describes the mechanism for performing auto scaling operation. Derived + classes will describe the actual mechanism. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AddRemoveReplicaScalingMechanism + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'AddRemoveReplica': 'AddRemoveReplicaScalingMechanism'} + } + + def __init__(self, **kwargs) -> None: + super(AutoScalingMechanism, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_metric.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_metric.py new file mode 100644 index 000000000000..c100d3c82ae3 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_metric.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingMetric(Model): + """Describes the metric that is used for triggering auto scaling operation. + Derived classes will describe resources or metrics. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoScalingResourceMetric + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'Resource': 'AutoScalingResourceMetric'} + } + + def __init__(self, **kwargs): + super(AutoScalingMetric, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_metric_py3.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_metric_py3.py new file mode 100644 index 000000000000..dd68e4e3447c --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_metric_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingMetric(Model): + """Describes the metric that is used for triggering auto scaling operation. + Derived classes will describe resources or metrics. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoScalingResourceMetric + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'Resource': 'AutoScalingResourceMetric'} + } + + def __init__(self, **kwargs) -> None: + super(AutoScalingMetric, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_policy.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_policy.py new file mode 100644 index 000000000000..764f7908ef8e --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_policy.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingPolicy(Model): + """Describes the auto scaling policy. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the auto scaling policy. + :type name: str + :param trigger: Required. Determines when auto scaling operation will be + invoked. + :type trigger: ~azure.servicefabric.models.AutoScalingTrigger + :param mechanism: Required. The mechanism that is used to scale when auto + scaling operation is invoked. + :type mechanism: ~azure.servicefabric.models.AutoScalingMechanism + """ + + _validation = { + 'name': {'required': True}, + 'trigger': {'required': True}, + 'mechanism': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'trigger': {'key': 'trigger', 'type': 'AutoScalingTrigger'}, + 'mechanism': {'key': 'mechanism', 'type': 'AutoScalingMechanism'}, + } + + def __init__(self, **kwargs): + super(AutoScalingPolicy, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.trigger = kwargs.get('trigger', None) + self.mechanism = kwargs.get('mechanism', None) diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_policy_py3.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_policy_py3.py new file mode 100644 index 000000000000..292c0fd50488 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_policy_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingPolicy(Model): + """Describes the auto scaling policy. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the auto scaling policy. + :type name: str + :param trigger: Required. Determines when auto scaling operation will be + invoked. + :type trigger: ~azure.servicefabric.models.AutoScalingTrigger + :param mechanism: Required. The mechanism that is used to scale when auto + scaling operation is invoked. + :type mechanism: ~azure.servicefabric.models.AutoScalingMechanism + """ + + _validation = { + 'name': {'required': True}, + 'trigger': {'required': True}, + 'mechanism': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'trigger': {'key': 'trigger', 'type': 'AutoScalingTrigger'}, + 'mechanism': {'key': 'mechanism', 'type': 'AutoScalingMechanism'}, + } + + def __init__(self, *, name: str, trigger, mechanism, **kwargs) -> None: + super(AutoScalingPolicy, self).__init__(**kwargs) + self.name = name + self.trigger = trigger + self.mechanism = mechanism diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_resource_metric.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_resource_metric.py new file mode 100644 index 000000000000..f853bf858c76 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_resource_metric.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .auto_scaling_metric import AutoScalingMetric + + +class AutoScalingResourceMetric(AutoScalingMetric): + """Describes the resource that is used for triggering auto scaling. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param name: Required. Name of the resource. Possible values include: + 'cpu', 'memoryInGB' + :type name: str or + ~azure.servicefabric.models.AutoScalingResourceMetricName + """ + + _validation = { + 'kind': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AutoScalingResourceMetric, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.kind = 'Resource' diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_resource_metric_py3.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_resource_metric_py3.py new file mode 100644 index 000000000000..ad6ea3fab5c5 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_resource_metric_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .auto_scaling_metric_py3 import AutoScalingMetric + + +class AutoScalingResourceMetric(AutoScalingMetric): + """Describes the resource that is used for triggering auto scaling. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param name: Required. Name of the resource. Possible values include: + 'cpu', 'memoryInGB' + :type name: str or + ~azure.servicefabric.models.AutoScalingResourceMetricName + """ + + _validation = { + 'kind': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name, **kwargs) -> None: + super(AutoScalingResourceMetric, self).__init__(**kwargs) + self.name = name + self.kind = 'Resource' diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_trigger.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_trigger.py new file mode 100644 index 000000000000..b3754545a214 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_trigger.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingTrigger(Model): + """Describes the trigger for performing auto scaling operation. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AverageLoadScalingTrigger + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'AverageLoad': 'AverageLoadScalingTrigger'} + } + + def __init__(self, **kwargs): + super(AutoScalingTrigger, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/auto_scaling_trigger_py3.py b/azure-servicefabric/azure/servicefabric/models/auto_scaling_trigger_py3.py new file mode 100644 index 000000000000..cbf43355dbd0 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/auto_scaling_trigger_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AutoScalingTrigger(Model): + """Describes the trigger for performing auto scaling operation. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AverageLoadScalingTrigger + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'AverageLoad': 'AverageLoadScalingTrigger'} + } + + def __init__(self, **kwargs) -> None: + super(AutoScalingTrigger, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/average_load_scaling_trigger.py b/azure-servicefabric/azure/servicefabric/models/average_load_scaling_trigger.py new file mode 100644 index 000000000000..975597bb960d --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/average_load_scaling_trigger.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .auto_scaling_trigger import AutoScalingTrigger + + +class AverageLoadScalingTrigger(AutoScalingTrigger): + """Describes the average load trigger used for auto scaling. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param metric: Required. Description of the metric that is used for + scaling. + :type metric: ~azure.servicefabric.models.AutoScalingMetric + :param lower_load_threshold: Required. Lower load threshold (if average + load is below this threshold, service will scale down). + :type lower_load_threshold: float + :param upper_load_threshold: Required. Upper load threshold (if average + load is above this threshold, service will scale up). + :type upper_load_threshold: float + :param scale_interval_in_seconds: Required. Scale interval that indicates + how often will this trigger be checked. + :type scale_interval_in_seconds: int + """ + + _validation = { + 'kind': {'required': True}, + 'metric': {'required': True}, + 'lower_load_threshold': {'required': True}, + 'upper_load_threshold': {'required': True}, + 'scale_interval_in_seconds': {'required': True, 'minimum': 60}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'metric': {'key': 'metric', 'type': 'AutoScalingMetric'}, + 'lower_load_threshold': {'key': 'lowerLoadThreshold', 'type': 'float'}, + 'upper_load_threshold': {'key': 'upperLoadThreshold', 'type': 'float'}, + 'scale_interval_in_seconds': {'key': 'scaleIntervalInSeconds', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(AverageLoadScalingTrigger, self).__init__(**kwargs) + self.metric = kwargs.get('metric', None) + self.lower_load_threshold = kwargs.get('lower_load_threshold', None) + self.upper_load_threshold = kwargs.get('upper_load_threshold', None) + self.scale_interval_in_seconds = kwargs.get('scale_interval_in_seconds', None) + self.kind = 'AverageLoad' diff --git a/azure-servicefabric/azure/servicefabric/models/average_load_scaling_trigger_py3.py b/azure-servicefabric/azure/servicefabric/models/average_load_scaling_trigger_py3.py new file mode 100644 index 000000000000..b6a20ae17a5c --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/average_load_scaling_trigger_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .auto_scaling_trigger_py3 import AutoScalingTrigger + + +class AverageLoadScalingTrigger(AutoScalingTrigger): + """Describes the average load trigger used for auto scaling. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param metric: Required. Description of the metric that is used for + scaling. + :type metric: ~azure.servicefabric.models.AutoScalingMetric + :param lower_load_threshold: Required. Lower load threshold (if average + load is below this threshold, service will scale down). + :type lower_load_threshold: float + :param upper_load_threshold: Required. Upper load threshold (if average + load is above this threshold, service will scale up). + :type upper_load_threshold: float + :param scale_interval_in_seconds: Required. Scale interval that indicates + how often will this trigger be checked. + :type scale_interval_in_seconds: int + """ + + _validation = { + 'kind': {'required': True}, + 'metric': {'required': True}, + 'lower_load_threshold': {'required': True}, + 'upper_load_threshold': {'required': True}, + 'scale_interval_in_seconds': {'required': True, 'minimum': 60}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'metric': {'key': 'metric', 'type': 'AutoScalingMetric'}, + 'lower_load_threshold': {'key': 'lowerLoadThreshold', 'type': 'float'}, + 'upper_load_threshold': {'key': 'upperLoadThreshold', 'type': 'float'}, + 'scale_interval_in_seconds': {'key': 'scaleIntervalInSeconds', 'type': 'int'}, + } + + def __init__(self, *, metric, lower_load_threshold: float, upper_load_threshold: float, scale_interval_in_seconds: int, **kwargs) -> None: + super(AverageLoadScalingTrigger, self).__init__(**kwargs) + self.metric = metric + self.lower_load_threshold = lower_load_threshold + self.upper_load_threshold = upper_load_threshold + self.scale_interval_in_seconds = scale_interval_in_seconds + self.kind = 'AverageLoad' diff --git a/azure-servicefabric/azure/servicefabric/models/azure_internal_monitoring_pipeline_sink_description.py b/azure-servicefabric/azure/servicefabric/models/azure_internal_monitoring_pipeline_sink_description.py new file mode 100644 index 000000000000..07ac7ed6db19 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/azure_internal_monitoring_pipeline_sink_description.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .diagnostics_sink_properties import DiagnosticsSinkProperties + + +class AzureInternalMonitoringPipelineSinkDescription(DiagnosticsSinkProperties): + """Diagnostics settings for Geneva. + + All required parameters must be populated in order to send to Azure. + + :param name: Name of the sink. This value is referenced by + DiagnosticsReferenceDescription + :type name: str + :param description: A description of the sink. + :type description: str + :param kind: Required. Constant filled by server. + :type kind: str + :param account_name: Azure Internal monitoring pipeline account. + :type account_name: str + :param namespace: Azure Internal monitoring pipeline account namespace. + :type namespace: str + :param ma_config_url: Azure Internal monitoring agent configuration. + :type ma_config_url: str + :param fluentd_config_url: Azure Internal monitoring agent fluentd + configuration. + :type fluentd_config_url: object + :param auto_key_config_url: Azure Internal monitoring pipeline autokey + associated with the certificate. + :type auto_key_config_url: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'namespace': {'key': 'namespace', 'type': 'str'}, + 'ma_config_url': {'key': 'maConfigUrl', 'type': 'str'}, + 'fluentd_config_url': {'key': 'fluentdConfigUrl', 'type': 'object'}, + 'auto_key_config_url': {'key': 'autoKeyConfigUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureInternalMonitoringPipelineSinkDescription, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.namespace = kwargs.get('namespace', None) + self.ma_config_url = kwargs.get('ma_config_url', None) + self.fluentd_config_url = kwargs.get('fluentd_config_url', None) + self.auto_key_config_url = kwargs.get('auto_key_config_url', None) + self.kind = 'AzureInternalMonitoringPipeline' diff --git a/azure-servicefabric/azure/servicefabric/models/azure_internal_monitoring_pipeline_sink_description_py3.py b/azure-servicefabric/azure/servicefabric/models/azure_internal_monitoring_pipeline_sink_description_py3.py new file mode 100644 index 000000000000..f4771409e03a --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/azure_internal_monitoring_pipeline_sink_description_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .diagnostics_sink_properties_py3 import DiagnosticsSinkProperties + + +class AzureInternalMonitoringPipelineSinkDescription(DiagnosticsSinkProperties): + """Diagnostics settings for Geneva. + + All required parameters must be populated in order to send to Azure. + + :param name: Name of the sink. This value is referenced by + DiagnosticsReferenceDescription + :type name: str + :param description: A description of the sink. + :type description: str + :param kind: Required. Constant filled by server. + :type kind: str + :param account_name: Azure Internal monitoring pipeline account. + :type account_name: str + :param namespace: Azure Internal monitoring pipeline account namespace. + :type namespace: str + :param ma_config_url: Azure Internal monitoring agent configuration. + :type ma_config_url: str + :param fluentd_config_url: Azure Internal monitoring agent fluentd + configuration. + :type fluentd_config_url: object + :param auto_key_config_url: Azure Internal monitoring pipeline autokey + associated with the certificate. + :type auto_key_config_url: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'namespace': {'key': 'namespace', 'type': 'str'}, + 'ma_config_url': {'key': 'maConfigUrl', 'type': 'str'}, + 'fluentd_config_url': {'key': 'fluentdConfigUrl', 'type': 'object'}, + 'auto_key_config_url': {'key': 'autoKeyConfigUrl', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, description: str=None, account_name: str=None, namespace: str=None, ma_config_url: str=None, fluentd_config_url=None, auto_key_config_url: str=None, **kwargs) -> None: + super(AzureInternalMonitoringPipelineSinkDescription, self).__init__(name=name, description=description, **kwargs) + self.account_name = account_name + self.namespace = namespace + self.ma_config_url = ma_config_url + self.fluentd_config_url = fluentd_config_url + self.auto_key_config_url = auto_key_config_url + self.kind = 'AzureInternalMonitoringPipeline' diff --git a/azure-servicefabric/azure/servicefabric/models/backup_epoch.py b/azure-servicefabric/azure/servicefabric/models/backup_epoch.py deleted file mode 100644 index 3598ddb162eb..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/backup_epoch.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class BackupEpoch(Model): - """An Epoch is a configuration number for the partition as a whole. When the - configuration of the replica set changes, for example when the Primary - replica changes, the operations that are replicated from the new Primary - replica are said to be a new Epoch from the ones which were sent by the old - Primary replica. - - :param configuration_number: The current configuration number of this - Epoch. The configuration number is an increasing value that is updated - whenever the configuration of this replica set changes. - :type configuration_number: str - :param data_loss_number: The current dataloss number of this Epoch. The - data loss number property is an increasing value which is updated whenever - data loss is suspected, as when loss of a quorum of replicas in the - replica set that includes the Primary replica. - :type data_loss_number: str - """ - - _attribute_map = { - 'configuration_number': {'key': 'ConfigurationNumber', 'type': 'str'}, - 'data_loss_number': {'key': 'DataLossNumber', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(BackupEpoch, self).__init__(**kwargs) - self.configuration_number = kwargs.get('configuration_number', None) - self.data_loss_number = kwargs.get('data_loss_number', None) diff --git a/azure-servicefabric/azure/servicefabric/models/backup_epoch_py3.py b/azure-servicefabric/azure/servicefabric/models/backup_epoch_py3.py deleted file mode 100644 index 9747dea89099..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/backup_epoch_py3.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from msrest.serialization import Model - - -class BackupEpoch(Model): - """An Epoch is a configuration number for the partition as a whole. When the - configuration of the replica set changes, for example when the Primary - replica changes, the operations that are replicated from the new Primary - replica are said to be a new Epoch from the ones which were sent by the old - Primary replica. - - :param configuration_number: The current configuration number of this - Epoch. The configuration number is an increasing value that is updated - whenever the configuration of this replica set changes. - :type configuration_number: str - :param data_loss_number: The current dataloss number of this Epoch. The - data loss number property is an increasing value which is updated whenever - data loss is suspected, as when loss of a quorum of replicas in the - replica set that includes the Primary replica. - :type data_loss_number: str - """ - - _attribute_map = { - 'configuration_number': {'key': 'ConfigurationNumber', 'type': 'str'}, - 'data_loss_number': {'key': 'DataLossNumber', 'type': 'str'}, - } - - def __init__(self, *, configuration_number: str=None, data_loss_number: str=None, **kwargs) -> None: - super(BackupEpoch, self).__init__(**kwargs) - self.configuration_number = configuration_number - self.data_loss_number = data_loss_number diff --git a/azure-servicefabric/azure/servicefabric/models/backup_info.py b/azure-servicefabric/azure/servicefabric/models/backup_info.py index 239b95f95913..ca205e541a4d 100644 --- a/azure-servicefabric/azure/servicefabric/models/backup_info.py +++ b/azure-servicefabric/azure/servicefabric/models/backup_info.py @@ -39,11 +39,14 @@ class BackupInfo(Model): :type backup_type: str or ~azure.servicefabric.models.BackupType :param epoch_of_last_backup_record: Epoch of the last record in this backup. - :type epoch_of_last_backup_record: ~azure.servicefabric.models.BackupEpoch + :type epoch_of_last_backup_record: ~azure.servicefabric.models.Epoch :param lsn_of_last_backup_record: LSN of the last record in this backup. :type lsn_of_last_backup_record: str :param creation_time_utc: The date time when this backup was taken. :type creation_time_utc: datetime + :param service_manifest_version: Manifest Version of the service this + partition backup belongs to. + :type service_manifest_version: str :param failure_error: Denotes the failure encountered in getting backup point information. :type failure_error: ~azure.servicefabric.models.FabricErrorError @@ -57,9 +60,10 @@ class BackupInfo(Model): 'partition_information': {'key': 'PartitionInformation', 'type': 'PartitionInformation'}, 'backup_location': {'key': 'BackupLocation', 'type': 'str'}, 'backup_type': {'key': 'BackupType', 'type': 'str'}, - 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'BackupEpoch'}, + 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'Epoch'}, 'lsn_of_last_backup_record': {'key': 'LsnOfLastBackupRecord', 'type': 'str'}, 'creation_time_utc': {'key': 'CreationTimeUtc', 'type': 'iso-8601'}, + 'service_manifest_version': {'key': 'ServiceManifestVersion', 'type': 'str'}, 'failure_error': {'key': 'FailureError', 'type': 'FabricErrorError'}, } @@ -75,4 +79,5 @@ def __init__(self, **kwargs): self.epoch_of_last_backup_record = kwargs.get('epoch_of_last_backup_record', None) self.lsn_of_last_backup_record = kwargs.get('lsn_of_last_backup_record', None) self.creation_time_utc = kwargs.get('creation_time_utc', None) + self.service_manifest_version = kwargs.get('service_manifest_version', None) self.failure_error = kwargs.get('failure_error', None) diff --git a/azure-servicefabric/azure/servicefabric/models/backup_info_py3.py b/azure-servicefabric/azure/servicefabric/models/backup_info_py3.py index 8fed63e3ab23..61fdc445fe4b 100644 --- a/azure-servicefabric/azure/servicefabric/models/backup_info_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/backup_info_py3.py @@ -39,11 +39,14 @@ class BackupInfo(Model): :type backup_type: str or ~azure.servicefabric.models.BackupType :param epoch_of_last_backup_record: Epoch of the last record in this backup. - :type epoch_of_last_backup_record: ~azure.servicefabric.models.BackupEpoch + :type epoch_of_last_backup_record: ~azure.servicefabric.models.Epoch :param lsn_of_last_backup_record: LSN of the last record in this backup. :type lsn_of_last_backup_record: str :param creation_time_utc: The date time when this backup was taken. :type creation_time_utc: datetime + :param service_manifest_version: Manifest Version of the service this + partition backup belongs to. + :type service_manifest_version: str :param failure_error: Denotes the failure encountered in getting backup point information. :type failure_error: ~azure.servicefabric.models.FabricErrorError @@ -57,13 +60,14 @@ class BackupInfo(Model): 'partition_information': {'key': 'PartitionInformation', 'type': 'PartitionInformation'}, 'backup_location': {'key': 'BackupLocation', 'type': 'str'}, 'backup_type': {'key': 'BackupType', 'type': 'str'}, - 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'BackupEpoch'}, + 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'Epoch'}, 'lsn_of_last_backup_record': {'key': 'LsnOfLastBackupRecord', 'type': 'str'}, 'creation_time_utc': {'key': 'CreationTimeUtc', 'type': 'iso-8601'}, + 'service_manifest_version': {'key': 'ServiceManifestVersion', 'type': 'str'}, 'failure_error': {'key': 'FailureError', 'type': 'FabricErrorError'}, } - def __init__(self, *, backup_id: str=None, backup_chain_id: str=None, application_name: str=None, service_name: str=None, partition_information=None, backup_location: str=None, backup_type=None, epoch_of_last_backup_record=None, lsn_of_last_backup_record: str=None, creation_time_utc=None, failure_error=None, **kwargs) -> None: + def __init__(self, *, backup_id: str=None, backup_chain_id: str=None, application_name: str=None, service_name: str=None, partition_information=None, backup_location: str=None, backup_type=None, epoch_of_last_backup_record=None, lsn_of_last_backup_record: str=None, creation_time_utc=None, service_manifest_version: str=None, failure_error=None, **kwargs) -> None: super(BackupInfo, self).__init__(**kwargs) self.backup_id = backup_id self.backup_chain_id = backup_chain_id @@ -75,4 +79,5 @@ def __init__(self, *, backup_id: str=None, backup_chain_id: str=None, applicatio self.epoch_of_last_backup_record = epoch_of_last_backup_record self.lsn_of_last_backup_record = lsn_of_last_backup_record self.creation_time_utc = creation_time_utc + self.service_manifest_version = service_manifest_version self.failure_error = failure_error diff --git a/azure-servicefabric/azure/servicefabric/models/backup_policy_description.py b/azure-servicefabric/azure/servicefabric/models/backup_policy_description.py index 09fd9f04d97e..d87ea5769b13 100644 --- a/azure-servicefabric/azure/servicefabric/models/backup_policy_description.py +++ b/azure-servicefabric/azure/servicefabric/models/backup_policy_description.py @@ -36,6 +36,10 @@ class BackupPolicyDescription(Model): :param storage: Required. Describes the details of backup storage where to store the periodic backups. :type storage: ~azure.servicefabric.models.BackupStorageDescription + :param retention_policy: Describes the policy to retain backups in + storage. + :type retention_policy: + ~azure.servicefabric.models.RetentionPolicyDescription """ _validation = { @@ -52,6 +56,7 @@ class BackupPolicyDescription(Model): 'max_incremental_backups': {'key': 'MaxIncrementalBackups', 'type': 'int'}, 'schedule': {'key': 'Schedule', 'type': 'BackupScheduleDescription'}, 'storage': {'key': 'Storage', 'type': 'BackupStorageDescription'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicyDescription'}, } def __init__(self, **kwargs): @@ -61,3 +66,4 @@ def __init__(self, **kwargs): self.max_incremental_backups = kwargs.get('max_incremental_backups', None) self.schedule = kwargs.get('schedule', None) self.storage = kwargs.get('storage', None) + self.retention_policy = kwargs.get('retention_policy', None) diff --git a/azure-servicefabric/azure/servicefabric/models/backup_policy_description_py3.py b/azure-servicefabric/azure/servicefabric/models/backup_policy_description_py3.py index a600851e4faf..5890d7963370 100644 --- a/azure-servicefabric/azure/servicefabric/models/backup_policy_description_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/backup_policy_description_py3.py @@ -36,6 +36,10 @@ class BackupPolicyDescription(Model): :param storage: Required. Describes the details of backup storage where to store the periodic backups. :type storage: ~azure.servicefabric.models.BackupStorageDescription + :param retention_policy: Describes the policy to retain backups in + storage. + :type retention_policy: + ~azure.servicefabric.models.RetentionPolicyDescription """ _validation = { @@ -52,12 +56,14 @@ class BackupPolicyDescription(Model): 'max_incremental_backups': {'key': 'MaxIncrementalBackups', 'type': 'int'}, 'schedule': {'key': 'Schedule', 'type': 'BackupScheduleDescription'}, 'storage': {'key': 'Storage', 'type': 'BackupStorageDescription'}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicyDescription'}, } - def __init__(self, *, name: str, auto_restore_on_data_loss: bool, max_incremental_backups: int, schedule, storage, **kwargs) -> None: + def __init__(self, *, name: str, auto_restore_on_data_loss: bool, max_incremental_backups: int, schedule, storage, retention_policy=None, **kwargs) -> None: super(BackupPolicyDescription, self).__init__(**kwargs) self.name = name self.auto_restore_on_data_loss = auto_restore_on_data_loss self.max_incremental_backups = max_incremental_backups self.schedule = schedule self.storage = storage + self.retention_policy = retention_policy diff --git a/azure-servicefabric/azure/servicefabric/models/backup_progress_info.py b/azure-servicefabric/azure/servicefabric/models/backup_progress_info.py index 2fef751c42b9..785b02ff216e 100644 --- a/azure-servicefabric/azure/servicefabric/models/backup_progress_info.py +++ b/azure-servicefabric/azure/servicefabric/models/backup_progress_info.py @@ -29,7 +29,7 @@ class BackupProgressInfo(Model): :type backup_location: str :param epoch_of_last_backup_record: Specifies the epoch of the last record included in backup. - :type epoch_of_last_backup_record: ~azure.servicefabric.models.BackupEpoch + :type epoch_of_last_backup_record: ~azure.servicefabric.models.Epoch :param lsn_of_last_backup_record: The LSN of last record included in backup. :type lsn_of_last_backup_record: str @@ -43,7 +43,7 @@ class BackupProgressInfo(Model): 'time_stamp_utc': {'key': 'TimeStampUtc', 'type': 'iso-8601'}, 'backup_id': {'key': 'BackupId', 'type': 'str'}, 'backup_location': {'key': 'BackupLocation', 'type': 'str'}, - 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'BackupEpoch'}, + 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'Epoch'}, 'lsn_of_last_backup_record': {'key': 'LsnOfLastBackupRecord', 'type': 'str'}, 'failure_error': {'key': 'FailureError', 'type': 'FabricErrorError'}, } diff --git a/azure-servicefabric/azure/servicefabric/models/backup_progress_info_py3.py b/azure-servicefabric/azure/servicefabric/models/backup_progress_info_py3.py index 71ae6cf03346..c00ce62e14d0 100644 --- a/azure-servicefabric/azure/servicefabric/models/backup_progress_info_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/backup_progress_info_py3.py @@ -29,7 +29,7 @@ class BackupProgressInfo(Model): :type backup_location: str :param epoch_of_last_backup_record: Specifies the epoch of the last record included in backup. - :type epoch_of_last_backup_record: ~azure.servicefabric.models.BackupEpoch + :type epoch_of_last_backup_record: ~azure.servicefabric.models.Epoch :param lsn_of_last_backup_record: The LSN of last record included in backup. :type lsn_of_last_backup_record: str @@ -43,7 +43,7 @@ class BackupProgressInfo(Model): 'time_stamp_utc': {'key': 'TimeStampUtc', 'type': 'iso-8601'}, 'backup_id': {'key': 'BackupId', 'type': 'str'}, 'backup_location': {'key': 'BackupLocation', 'type': 'str'}, - 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'BackupEpoch'}, + 'epoch_of_last_backup_record': {'key': 'EpochOfLastBackupRecord', 'type': 'Epoch'}, 'lsn_of_last_backup_record': {'key': 'LsnOfLastBackupRecord', 'type': 'str'}, 'failure_error': {'key': 'FailureError', 'type': 'FabricErrorError'}, } diff --git a/azure-servicefabric/azure/servicefabric/models/basic_retention_policy_description.py b/azure-servicefabric/azure/servicefabric/models/basic_retention_policy_description.py new file mode 100644 index 000000000000..653dec495c43 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/basic_retention_policy_description.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .retention_policy_description import RetentionPolicyDescription + + +class BasicRetentionPolicyDescription(RetentionPolicyDescription): + """Describes basic retention policy. + + All required parameters must be populated in order to send to Azure. + + :param retention_policy_type: Required. Constant filled by server. + :type retention_policy_type: str + :param retention_duration: Required. It is the minimum duration for which + a backup created, will remain stored in the storage and might get deleted + after that span of time. It should be specified in ISO8601 format. + :type retention_duration: timedelta + :param minimum_number_of_backups: It is the minimum number of backups to + be retained at any point of time. If specified with a non zero value, + backups will not be deleted even if the backups have gone past retention + duration and have number of backups less than or equal to it. + :type minimum_number_of_backups: int + """ + + _validation = { + 'retention_policy_type': {'required': True}, + 'retention_duration': {'required': True}, + 'minimum_number_of_backups': {'minimum': 0}, + } + + _attribute_map = { + 'retention_policy_type': {'key': 'RetentionPolicyType', 'type': 'str'}, + 'retention_duration': {'key': 'RetentionDuration', 'type': 'duration'}, + 'minimum_number_of_backups': {'key': 'MinimumNumberOfBackups', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(BasicRetentionPolicyDescription, self).__init__(**kwargs) + self.retention_duration = kwargs.get('retention_duration', None) + self.minimum_number_of_backups = kwargs.get('minimum_number_of_backups', None) + self.retention_policy_type = 'Basic' diff --git a/azure-servicefabric/azure/servicefabric/models/basic_retention_policy_description_py3.py b/azure-servicefabric/azure/servicefabric/models/basic_retention_policy_description_py3.py new file mode 100644 index 000000000000..5abb36a82dd0 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/basic_retention_policy_description_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .retention_policy_description_py3 import RetentionPolicyDescription + + +class BasicRetentionPolicyDescription(RetentionPolicyDescription): + """Describes basic retention policy. + + All required parameters must be populated in order to send to Azure. + + :param retention_policy_type: Required. Constant filled by server. + :type retention_policy_type: str + :param retention_duration: Required. It is the minimum duration for which + a backup created, will remain stored in the storage and might get deleted + after that span of time. It should be specified in ISO8601 format. + :type retention_duration: timedelta + :param minimum_number_of_backups: It is the minimum number of backups to + be retained at any point of time. If specified with a non zero value, + backups will not be deleted even if the backups have gone past retention + duration and have number of backups less than or equal to it. + :type minimum_number_of_backups: int + """ + + _validation = { + 'retention_policy_type': {'required': True}, + 'retention_duration': {'required': True}, + 'minimum_number_of_backups': {'minimum': 0}, + } + + _attribute_map = { + 'retention_policy_type': {'key': 'RetentionPolicyType', 'type': 'str'}, + 'retention_duration': {'key': 'RetentionDuration', 'type': 'duration'}, + 'minimum_number_of_backups': {'key': 'MinimumNumberOfBackups', 'type': 'int'}, + } + + def __init__(self, *, retention_duration, minimum_number_of_backups: int=None, **kwargs) -> None: + super(BasicRetentionPolicyDescription, self).__init__(**kwargs) + self.retention_duration = retention_duration + self.minimum_number_of_backups = minimum_number_of_backups + self.retention_policy_type = 'Basic' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_scheduled_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_code_package_restart_scheduled_event.py similarity index 92% rename from azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_scheduled_event.py rename to azure-servicefabric/azure/servicefabric/models/chaos_code_package_restart_scheduled_event.py index 0ec33de8b0d6..7b4396d125e1 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_scheduled_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_code_package_restart_scheduled_event.py @@ -12,7 +12,7 @@ from .application_event import ApplicationEvent -class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): +class ChaosCodePackageRestartScheduledEvent(ApplicationEvent): """Chaos Restart Code Package Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -65,6 +67,7 @@ class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -78,11 +81,11 @@ class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(ChaosRestartCodePackageFaultScheduledEvent, self).__init__(**kwargs) + super(ChaosCodePackageRestartScheduledEvent, self).__init__(**kwargs) self.fault_group_id = kwargs.get('fault_group_id', None) self.fault_id = kwargs.get('fault_id', None) self.node_name = kwargs.get('node_name', None) self.service_manifest_name = kwargs.get('service_manifest_name', None) self.code_package_name = kwargs.get('code_package_name', None) self.service_package_activation_id = kwargs.get('service_package_activation_id', None) - self.kind = 'ChaosRestartCodePackageFaultScheduled' + self.kind = 'ChaosCodePackageRestartScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_scheduled_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_code_package_restart_scheduled_event_py3.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_scheduled_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/chaos_code_package_restart_scheduled_event_py3.py index 36376390d334..a9a809823688 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_scheduled_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_code_package_restart_scheduled_event_py3.py @@ -12,7 +12,7 @@ from .application_event_py3 import ApplicationEvent -class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): +class ChaosCodePackageRestartScheduledEvent(ApplicationEvent): """Chaos Restart Code Package Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -65,6 +67,7 @@ class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -77,12 +80,12 @@ class ChaosRestartCodePackageFaultScheduledEvent(ApplicationEvent): 'service_package_activation_id': {'key': 'ServicePackageActivationId', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, fault_group_id: str, fault_id: str, node_name: str, service_manifest_name: str, code_package_name: str, service_package_activation_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosRestartCodePackageFaultScheduledEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, fault_group_id: str, fault_id: str, node_name: str, service_manifest_name: str, code_package_name: str, service_package_activation_id: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosCodePackageRestartScheduledEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.fault_group_id = fault_group_id self.fault_id = fault_id self.node_name = node_name self.service_manifest_name = service_manifest_name self.code_package_name = code_package_name self.service_package_activation_id = service_package_activation_id - self.kind = 'ChaosRestartCodePackageFaultScheduled' + self.kind = 'ChaosCodePackageRestartScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_scheduled_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_node_restart_scheduled_event.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_scheduled_event.py rename to azure-servicefabric/azure/servicefabric/models/chaos_node_restart_scheduled_event.py index f29e8bfc63d4..4bbcde8fda65 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_scheduled_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_node_restart_scheduled_event.py @@ -12,7 +12,7 @@ from .node_event import NodeEvent -class ChaosRestartNodeFaultScheduledEvent(NodeEvent): +class ChaosNodeRestartScheduledEvent(NodeEvent): """Chaos Restart Node Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRestartNodeFaultScheduledEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -49,6 +51,7 @@ class ChaosRestartNodeFaultScheduledEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -59,8 +62,8 @@ class ChaosRestartNodeFaultScheduledEvent(NodeEvent): } def __init__(self, **kwargs): - super(ChaosRestartNodeFaultScheduledEvent, self).__init__(**kwargs) + super(ChaosNodeRestartScheduledEvent, self).__init__(**kwargs) self.node_instance_id = kwargs.get('node_instance_id', None) self.fault_group_id = kwargs.get('fault_group_id', None) self.fault_id = kwargs.get('fault_id', None) - self.kind = 'ChaosRestartNodeFaultScheduled' + self.kind = 'ChaosNodeRestartScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_scheduled_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_node_restart_scheduled_event_py3.py similarity index 82% rename from azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_scheduled_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/chaos_node_restart_scheduled_event_py3.py index 6022314b8b66..780e31f33f91 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_scheduled_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_node_restart_scheduled_event_py3.py @@ -12,7 +12,7 @@ from .node_event_py3 import NodeEvent -class ChaosRestartNodeFaultScheduledEvent(NodeEvent): +class ChaosNodeRestartScheduledEvent(NodeEvent): """Chaos Restart Node Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRestartNodeFaultScheduledEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -49,6 +51,7 @@ class ChaosRestartNodeFaultScheduledEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -58,9 +61,9 @@ class ChaosRestartNodeFaultScheduledEvent(NodeEvent): 'fault_id': {'key': 'FaultId', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance_id: int, fault_group_id: str, fault_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosRestartNodeFaultScheduledEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance_id: int, fault_group_id: str, fault_id: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosNodeRestartScheduledEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance_id = node_instance_id self.fault_group_id = fault_group_id self.fault_id = fault_id - self.kind = 'ChaosRestartNodeFaultScheduled' + self.kind = 'ChaosNodeRestartScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_move_primary_fault_scheduled_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_partition_primary_move_scheduled_event.py similarity index 90% rename from azure-servicefabric/azure/servicefabric/models/chaos_move_primary_fault_scheduled_event.py rename to azure-servicefabric/azure/servicefabric/models/chaos_partition_primary_move_scheduled_event.py index eb5675b3c490..2275bc00fd2d 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_move_primary_fault_scheduled_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_partition_primary_move_scheduled_event.py @@ -12,7 +12,7 @@ from .partition_event import PartitionEvent -class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): +class ChaosPartitionPrimaryMoveScheduledEvent(PartitionEvent): """Chaos Move Primary Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -59,6 +61,7 @@ class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -71,10 +74,10 @@ class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): } def __init__(self, **kwargs): - super(ChaosMovePrimaryFaultScheduledEvent, self).__init__(**kwargs) + super(ChaosPartitionPrimaryMoveScheduledEvent, self).__init__(**kwargs) self.fault_group_id = kwargs.get('fault_group_id', None) self.fault_id = kwargs.get('fault_id', None) self.service_name = kwargs.get('service_name', None) self.node_to = kwargs.get('node_to', None) self.forced_move = kwargs.get('forced_move', None) - self.kind = 'ChaosMovePrimaryFaultScheduled' + self.kind = 'ChaosPartitionPrimaryMoveScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_move_primary_fault_scheduled_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_partition_primary_move_scheduled_event_py3.py similarity index 84% rename from azure-servicefabric/azure/servicefabric/models/chaos_move_primary_fault_scheduled_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/chaos_partition_primary_move_scheduled_event_py3.py index 9c55970213fc..e6d714d67453 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_move_primary_fault_scheduled_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_partition_primary_move_scheduled_event_py3.py @@ -12,7 +12,7 @@ from .partition_event_py3 import PartitionEvent -class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): +class ChaosPartitionPrimaryMoveScheduledEvent(PartitionEvent): """Chaos Move Primary Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -59,6 +61,7 @@ class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -70,11 +73,11 @@ class ChaosMovePrimaryFaultScheduledEvent(PartitionEvent): 'forced_move': {'key': 'ForcedMove', 'type': 'bool'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, fault_group_id: str, fault_id: str, service_name: str, node_to: str, forced_move: bool, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosMovePrimaryFaultScheduledEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, fault_group_id: str, fault_id: str, service_name: str, node_to: str, forced_move: bool, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosPartitionPrimaryMoveScheduledEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) self.fault_group_id = fault_group_id self.fault_id = fault_id self.service_name = service_name self.node_to = node_to self.forced_move = forced_move - self.kind = 'ChaosMovePrimaryFaultScheduled' + self.kind = 'ChaosPartitionPrimaryMoveScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_move_secondary_fault_scheduled_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_partition_secondary_move_scheduled_event.py similarity index 91% rename from azure-servicefabric/azure/servicefabric/models/chaos_move_secondary_fault_scheduled_event.py rename to azure-servicefabric/azure/servicefabric/models/chaos_partition_secondary_move_scheduled_event.py index afd2c1cb1686..89a701276f5d 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_move_secondary_fault_scheduled_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_partition_secondary_move_scheduled_event.py @@ -12,7 +12,7 @@ from .partition_event import PartitionEvent -class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): +class ChaosPartitionSecondaryMoveScheduledEvent(PartitionEvent): """Chaos Move Secondary Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -75,11 +78,11 @@ class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): } def __init__(self, **kwargs): - super(ChaosMoveSecondaryFaultScheduledEvent, self).__init__(**kwargs) + super(ChaosPartitionSecondaryMoveScheduledEvent, self).__init__(**kwargs) self.fault_group_id = kwargs.get('fault_group_id', None) self.fault_id = kwargs.get('fault_id', None) self.service_name = kwargs.get('service_name', None) self.source_node = kwargs.get('source_node', None) self.destination_node = kwargs.get('destination_node', None) self.forced_move = kwargs.get('forced_move', None) - self.kind = 'ChaosMoveSecondaryFaultScheduled' + self.kind = 'ChaosPartitionSecondaryMoveScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_move_secondary_fault_scheduled_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_partition_secondary_move_scheduled_event_py3.py similarity index 86% rename from azure-servicefabric/azure/servicefabric/models/chaos_move_secondary_fault_scheduled_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/chaos_partition_secondary_move_scheduled_event_py3.py index fea8d3e0eacc..c385d53d3deb 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_move_secondary_fault_scheduled_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_partition_secondary_move_scheduled_event_py3.py @@ -12,7 +12,7 @@ from .partition_event_py3 import PartitionEvent -class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): +class ChaosPartitionSecondaryMoveScheduledEvent(PartitionEvent): """Chaos Move Secondary Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -74,12 +77,12 @@ class ChaosMoveSecondaryFaultScheduledEvent(PartitionEvent): 'forced_move': {'key': 'ForcedMove', 'type': 'bool'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, fault_group_id: str, fault_id: str, service_name: str, source_node: str, destination_node: str, forced_move: bool, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosMoveSecondaryFaultScheduledEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, fault_group_id: str, fault_id: str, service_name: str, source_node: str, destination_node: str, forced_move: bool, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosPartitionSecondaryMoveScheduledEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) self.fault_group_id = fault_group_id self.fault_id = fault_id self.service_name = service_name self.source_node = source_node self.destination_node = destination_node self.forced_move = forced_move - self.kind = 'ChaosMoveSecondaryFaultScheduled' + self.kind = 'ChaosPartitionSecondaryMoveScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_completed_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_completed_event.py deleted file mode 100644 index 1cbf3fb66c33..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_completed_event.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .replica_event import ReplicaEvent - - -class ChaosRemoveReplicaFaultCompletedEvent(ReplicaEvent): - """Chaos Remove Replica Fault Completed event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param partition_id: Required. An internal ID used by Service Fabric to - uniquely identify a partition. This is a randomly generated GUID when the - service was created. The partition ID is unique and does not change for - the lifetime of the service. If the same service was deleted and recreated - the IDs of its partitions would be different. - :type partition_id: str - :param replica_id: Required. Id of a stateful service replica. ReplicaId - is used by Service Fabric to uniquely identify a replica of a partition. - It is unique within a partition and does not change for the lifetime of - the replica. If a replica gets dropped and another replica gets created on - the same node for the same partition, it will get a different value for - the id. Sometimes the id of a stateless service instance is also referred - as a replica id. - :type replica_id: long - :param fault_group_id: Required. Id of fault group. - :type fault_group_id: str - :param fault_id: Required. Id of fault. - :type fault_id: str - :param service_uri: Required. Service name. - :type service_uri: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'partition_id': {'required': True}, - 'replica_id': {'required': True}, - 'fault_group_id': {'required': True}, - 'fault_id': {'required': True}, - 'service_uri': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'partition_id': {'key': 'PartitionId', 'type': 'str'}, - 'replica_id': {'key': 'ReplicaId', 'type': 'long'}, - 'fault_group_id': {'key': 'FaultGroupId', 'type': 'str'}, - 'fault_id': {'key': 'FaultId', 'type': 'str'}, - 'service_uri': {'key': 'ServiceUri', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ChaosRemoveReplicaFaultCompletedEvent, self).__init__(**kwargs) - self.fault_group_id = kwargs.get('fault_group_id', None) - self.fault_id = kwargs.get('fault_id', None) - self.service_uri = kwargs.get('service_uri', None) - self.kind = 'ChaosRemoveReplicaFaultCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_completed_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_completed_event_py3.py deleted file mode 100644 index 690903fdadd8..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_completed_event_py3.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .replica_event_py3 import ReplicaEvent - - -class ChaosRemoveReplicaFaultCompletedEvent(ReplicaEvent): - """Chaos Remove Replica Fault Completed event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param partition_id: Required. An internal ID used by Service Fabric to - uniquely identify a partition. This is a randomly generated GUID when the - service was created. The partition ID is unique and does not change for - the lifetime of the service. If the same service was deleted and recreated - the IDs of its partitions would be different. - :type partition_id: str - :param replica_id: Required. Id of a stateful service replica. ReplicaId - is used by Service Fabric to uniquely identify a replica of a partition. - It is unique within a partition and does not change for the lifetime of - the replica. If a replica gets dropped and another replica gets created on - the same node for the same partition, it will get a different value for - the id. Sometimes the id of a stateless service instance is also referred - as a replica id. - :type replica_id: long - :param fault_group_id: Required. Id of fault group. - :type fault_group_id: str - :param fault_id: Required. Id of fault. - :type fault_id: str - :param service_uri: Required. Service name. - :type service_uri: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'partition_id': {'required': True}, - 'replica_id': {'required': True}, - 'fault_group_id': {'required': True}, - 'fault_id': {'required': True}, - 'service_uri': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'partition_id': {'key': 'PartitionId', 'type': 'str'}, - 'replica_id': {'key': 'ReplicaId', 'type': 'long'}, - 'fault_group_id': {'key': 'FaultGroupId', 'type': 'str'}, - 'fault_id': {'key': 'FaultId', 'type': 'str'}, - 'service_uri': {'key': 'ServiceUri', 'type': 'str'}, - } - - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, fault_group_id: str, fault_id: str, service_uri: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosRemoveReplicaFaultCompletedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) - self.fault_group_id = fault_group_id - self.fault_id = fault_id - self.service_uri = service_uri - self.kind = 'ChaosRemoveReplicaFaultCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_scheduled_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_replica_removal_scheduled_event.py similarity index 91% rename from azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_scheduled_event.py rename to azure-servicefabric/azure/servicefabric/models/chaos_replica_removal_scheduled_event.py index 534634afe17f..3ac78109e81b 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_scheduled_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_replica_removal_scheduled_event.py @@ -12,7 +12,7 @@ from .replica_event import ReplicaEvent -class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): +class ChaosReplicaRemovalScheduledEvent(ReplicaEvent): """Chaos Remove Replica Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -73,8 +76,8 @@ class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): } def __init__(self, **kwargs): - super(ChaosRemoveReplicaFaultScheduledEvent, self).__init__(**kwargs) + super(ChaosReplicaRemovalScheduledEvent, self).__init__(**kwargs) self.fault_group_id = kwargs.get('fault_group_id', None) self.fault_id = kwargs.get('fault_id', None) self.service_uri = kwargs.get('service_uri', None) - self.kind = 'ChaosRemoveReplicaFaultScheduled' + self.kind = 'ChaosReplicaRemovalScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_scheduled_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_replica_removal_scheduled_event_py3.py similarity index 85% rename from azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_scheduled_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/chaos_replica_removal_scheduled_event_py3.py index 07a569b12ab7..81db7b079d2c 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_remove_replica_fault_scheduled_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_replica_removal_scheduled_event_py3.py @@ -12,7 +12,7 @@ from .replica_event_py3 import ReplicaEvent -class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): +class ChaosReplicaRemovalScheduledEvent(ReplicaEvent): """Chaos Remove Replica Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -72,9 +75,9 @@ class ChaosRemoveReplicaFaultScheduledEvent(ReplicaEvent): 'service_uri': {'key': 'ServiceUri', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, fault_group_id: str, fault_id: str, service_uri: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosRemoveReplicaFaultScheduledEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, fault_group_id: str, fault_id: str, service_uri: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosReplicaRemovalScheduledEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) self.fault_group_id = fault_group_id self.fault_id = fault_id self.service_uri = service_uri - self.kind = 'ChaosRemoveReplicaFaultScheduled' + self.kind = 'ChaosReplicaRemovalScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_replica_fault_scheduled_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_replica_restart_scheduled_event.py similarity index 91% rename from azure-servicefabric/azure/servicefabric/models/chaos_restart_replica_fault_scheduled_event.py rename to azure-servicefabric/azure/servicefabric/models/chaos_replica_restart_scheduled_event.py index e2b1f250149e..c877e86cd411 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_replica_fault_scheduled_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_replica_restart_scheduled_event.py @@ -12,7 +12,7 @@ from .replica_event import ReplicaEvent -class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): +class ChaosReplicaRestartScheduledEvent(ReplicaEvent): """Chaos Restart Replica Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -73,8 +76,8 @@ class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): } def __init__(self, **kwargs): - super(ChaosRestartReplicaFaultScheduledEvent, self).__init__(**kwargs) + super(ChaosReplicaRestartScheduledEvent, self).__init__(**kwargs) self.fault_group_id = kwargs.get('fault_group_id', None) self.fault_id = kwargs.get('fault_id', None) self.service_uri = kwargs.get('service_uri', None) - self.kind = 'ChaosRestartReplicaFaultScheduled' + self.kind = 'ChaosReplicaRestartScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_replica_fault_scheduled_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_replica_restart_scheduled_event_py3.py similarity index 85% rename from azure-servicefabric/azure/servicefabric/models/chaos_restart_replica_fault_scheduled_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/chaos_replica_restart_scheduled_event_py3.py index 81537e165e0f..f8b2cbbcf6a5 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_replica_fault_scheduled_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_replica_restart_scheduled_event_py3.py @@ -12,7 +12,7 @@ from .replica_event_py3 import ReplicaEvent -class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): +class ChaosReplicaRestartScheduledEvent(ReplicaEvent): """Chaos Restart Replica Fault Scheduled event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -72,9 +75,9 @@ class ChaosRestartReplicaFaultScheduledEvent(ReplicaEvent): 'service_uri': {'key': 'ServiceUri', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, fault_group_id: str, fault_id: str, service_uri: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosRestartReplicaFaultScheduledEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, fault_group_id: str, fault_id: str, service_uri: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosReplicaRestartScheduledEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) self.fault_group_id = fault_group_id self.fault_id = fault_id self.service_uri = service_uri - self.kind = 'ChaosRestartReplicaFaultScheduled' + self.kind = 'ChaosReplicaRestartScheduled' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_completed_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_completed_event.py deleted file mode 100644 index 2e9b56c0d7ed..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_completed_event.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .application_event import ApplicationEvent - - -class ChaosRestartCodePackageFaultCompletedEvent(ApplicationEvent): - """Chaos Restart Code Package Fault Completed event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param application_id: Required. The identity of the application. This is - an encoded representation of the application name. This is used in the - REST APIs to identify the application resource. - Starting in version 6.0, hierarchical names are delimited with the "\\~" - character. For example, if the application name is "fabric:/myapp/app1", - the application identity would be "myapp\\~app1" in 6.0+ and "myapp/app1" - in previous versions. - :type application_id: str - :param fault_group_id: Required. Id of fault group. - :type fault_group_id: str - :param fault_id: Required. Id of fault. - :type fault_id: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param service_manifest_name: Required. Service manifest name. - :type service_manifest_name: str - :param code_package_name: Required. Code package name. - :type code_package_name: str - :param service_package_activation_id: Required. Id of Service package - activation. - :type service_package_activation_id: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'application_id': {'required': True}, - 'fault_group_id': {'required': True}, - 'fault_id': {'required': True}, - 'node_name': {'required': True}, - 'service_manifest_name': {'required': True}, - 'code_package_name': {'required': True}, - 'service_package_activation_id': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'application_id': {'key': 'ApplicationId', 'type': 'str'}, - 'fault_group_id': {'key': 'FaultGroupId', 'type': 'str'}, - 'fault_id': {'key': 'FaultId', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'}, - 'code_package_name': {'key': 'CodePackageName', 'type': 'str'}, - 'service_package_activation_id': {'key': 'ServicePackageActivationId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ChaosRestartCodePackageFaultCompletedEvent, self).__init__(**kwargs) - self.fault_group_id = kwargs.get('fault_group_id', None) - self.fault_id = kwargs.get('fault_id', None) - self.node_name = kwargs.get('node_name', None) - self.service_manifest_name = kwargs.get('service_manifest_name', None) - self.code_package_name = kwargs.get('code_package_name', None) - self.service_package_activation_id = kwargs.get('service_package_activation_id', None) - self.kind = 'ChaosRestartCodePackageFaultCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_completed_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_completed_event_py3.py deleted file mode 100644 index 4e80772c4126..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_code_package_fault_completed_event_py3.py +++ /dev/null @@ -1,88 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .application_event_py3 import ApplicationEvent - - -class ChaosRestartCodePackageFaultCompletedEvent(ApplicationEvent): - """Chaos Restart Code Package Fault Completed event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param application_id: Required. The identity of the application. This is - an encoded representation of the application name. This is used in the - REST APIs to identify the application resource. - Starting in version 6.0, hierarchical names are delimited with the "\\~" - character. For example, if the application name is "fabric:/myapp/app1", - the application identity would be "myapp\\~app1" in 6.0+ and "myapp/app1" - in previous versions. - :type application_id: str - :param fault_group_id: Required. Id of fault group. - :type fault_group_id: str - :param fault_id: Required. Id of fault. - :type fault_id: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param service_manifest_name: Required. Service manifest name. - :type service_manifest_name: str - :param code_package_name: Required. Code package name. - :type code_package_name: str - :param service_package_activation_id: Required. Id of Service package - activation. - :type service_package_activation_id: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'application_id': {'required': True}, - 'fault_group_id': {'required': True}, - 'fault_id': {'required': True}, - 'node_name': {'required': True}, - 'service_manifest_name': {'required': True}, - 'code_package_name': {'required': True}, - 'service_package_activation_id': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'application_id': {'key': 'ApplicationId', 'type': 'str'}, - 'fault_group_id': {'key': 'FaultGroupId', 'type': 'str'}, - 'fault_id': {'key': 'FaultId', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'}, - 'code_package_name': {'key': 'CodePackageName', 'type': 'str'}, - 'service_package_activation_id': {'key': 'ServicePackageActivationId', 'type': 'str'}, - } - - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, fault_group_id: str, fault_id: str, node_name: str, service_manifest_name: str, code_package_name: str, service_package_activation_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosRestartCodePackageFaultCompletedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) - self.fault_group_id = fault_group_id - self.fault_id = fault_id - self.node_name = node_name - self.service_manifest_name = service_manifest_name - self.code_package_name = code_package_name - self.service_package_activation_id = service_package_activation_id - self.kind = 'ChaosRestartCodePackageFaultCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_completed_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_completed_event.py deleted file mode 100644 index 139fa15396df..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_completed_event.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event import NodeEvent - - -class ChaosRestartNodeFaultCompletedEvent(NodeEvent): - """Chaos Restart Node Fault Completed event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance_id: Required. Id of Node instance. - :type node_instance_id: long - :param fault_group_id: Required. Id of fault group. - :type fault_group_id: str - :param fault_id: Required. Id of fault. - :type fault_id: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance_id': {'required': True}, - 'fault_group_id': {'required': True}, - 'fault_id': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance_id': {'key': 'NodeInstanceId', 'type': 'long'}, - 'fault_group_id': {'key': 'FaultGroupId', 'type': 'str'}, - 'fault_id': {'key': 'FaultId', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(ChaosRestartNodeFaultCompletedEvent, self).__init__(**kwargs) - self.node_instance_id = kwargs.get('node_instance_id', None) - self.fault_group_id = kwargs.get('fault_group_id', None) - self.fault_id = kwargs.get('fault_id', None) - self.kind = 'ChaosRestartNodeFaultCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_completed_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_completed_event_py3.py deleted file mode 100644 index 0960127d71a2..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/chaos_restart_node_fault_completed_event_py3.py +++ /dev/null @@ -1,66 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event_py3 import NodeEvent - - -class ChaosRestartNodeFaultCompletedEvent(NodeEvent): - """Chaos Restart Node Fault Completed event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance_id: Required. Id of Node instance. - :type node_instance_id: long - :param fault_group_id: Required. Id of fault group. - :type fault_group_id: str - :param fault_id: Required. Id of fault. - :type fault_id: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance_id': {'required': True}, - 'fault_group_id': {'required': True}, - 'fault_id': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance_id': {'key': 'NodeInstanceId', 'type': 'long'}, - 'fault_group_id': {'key': 'FaultGroupId', 'type': 'str'}, - 'fault_id': {'key': 'FaultId', 'type': 'str'}, - } - - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance_id: int, fault_group_id: str, fault_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosRestartNodeFaultCompletedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) - self.node_instance_id = node_instance_id - self.fault_group_id = fault_group_id - self.fault_id = fault_id - self.kind = 'ChaosRestartNodeFaultCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_started_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_started_event.py index 3e16a281a49c..92663b03baf8 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_started_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_started_event.py @@ -20,6 +20,8 @@ class ChaosStartedEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -72,6 +74,7 @@ class ChaosStartedEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_started_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_started_event_py3.py index cf27a30c9cf5..f80ed93f2ff7 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_started_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_started_event_py3.py @@ -20,6 +20,8 @@ class ChaosStartedEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -72,6 +74,7 @@ class ChaosStartedEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -87,8 +90,8 @@ class ChaosStartedEvent(ClusterEvent): 'chaos_context': {'key': 'ChaosContext', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, max_concurrent_faults: int, time_to_run_in_seconds: float, max_cluster_stabilization_timeout_in_seconds: float, wait_time_between_iterations_in_seconds: float, wait_time_between_fautls_in_seconds: float, move_replica_fault_enabled: bool, included_node_type_list: str, included_application_list: str, cluster_health_policy: str, chaos_context: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosStartedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, max_concurrent_faults: int, time_to_run_in_seconds: float, max_cluster_stabilization_timeout_in_seconds: float, wait_time_between_iterations_in_seconds: float, wait_time_between_fautls_in_seconds: float, move_replica_fault_enabled: bool, included_node_type_list: str, included_application_list: str, cluster_health_policy: str, chaos_context: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosStartedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.max_concurrent_faults = max_concurrent_faults self.time_to_run_in_seconds = time_to_run_in_seconds self.max_cluster_stabilization_timeout_in_seconds = max_cluster_stabilization_timeout_in_seconds diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event.py b/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event.py index ebd6b6e07639..744662f50b58 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event.py @@ -20,6 +20,8 @@ class ChaosStoppedEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -40,6 +42,7 @@ class ChaosStoppedEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event_py3.py b/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event_py3.py index f19520ddda11..70be550d44c3 100644 --- a/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/chaos_stopped_event_py3.py @@ -20,6 +20,8 @@ class ChaosStoppedEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -40,13 +42,14 @@ class ChaosStoppedEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, 'reason': {'key': 'Reason', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, reason: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ChaosStoppedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, reason: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ChaosStoppedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.reason = reason self.kind = 'ChaosStopped' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description.py b/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description.py index e5599d7e52b4..699c387fb127 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description.py @@ -17,10 +17,14 @@ class ClusterConfigurationUpgradeDescription(Model): All required parameters must be populated in order to send to Azure. - :param cluster_config: Required. The cluster configuration. + :param cluster_config: Required. The cluster configuration as a JSON + string. For example, [this + file](https://github.com/Azure-Samples/service-fabric-dotnet-standalone-cluster-configuration/blob/master/Samples/ClusterConfig.Unsecure.DevCluster.json) + contains JSON describing the [nodes and other properties of the + cluster](https://docs.microsoft.com/azure/service-fabric/service-fabric-cluster-manifest). :type cluster_config: str :param health_check_retry_timeout: The length of time between attempts to - perform a health checks if the application or cluster is not healthy. + perform health checks if the application or cluster is not healthy. Default value: "PT0H0M0S" . :type health_check_retry_timeout: timedelta :param health_check_wait_duration_in_seconds: The length of time to wait @@ -28,8 +32,8 @@ class ClusterConfigurationUpgradeDescription(Model): process. Default value: "PT0H0M0S" . :type health_check_wait_duration_in_seconds: timedelta :param health_check_stable_duration_in_seconds: The length of time that - the application or cluster must remain healthy. Default value: "PT0H0M0S" - . + the application or cluster must remain healthy before the upgrade proceeds + to the next upgrade domain. Default value: "PT0H0M0S" . :type health_check_stable_duration_in_seconds: timedelta :param upgrade_domain_timeout_in_seconds: The timeout for the upgrade domain. Default value: "PT0H0M0S" . diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description_py3.py index 594219794d72..856bd470820d 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_configuration_upgrade_description_py3.py @@ -17,10 +17,14 @@ class ClusterConfigurationUpgradeDescription(Model): All required parameters must be populated in order to send to Azure. - :param cluster_config: Required. The cluster configuration. + :param cluster_config: Required. The cluster configuration as a JSON + string. For example, [this + file](https://github.com/Azure-Samples/service-fabric-dotnet-standalone-cluster-configuration/blob/master/Samples/ClusterConfig.Unsecure.DevCluster.json) + contains JSON describing the [nodes and other properties of the + cluster](https://docs.microsoft.com/azure/service-fabric/service-fabric-cluster-manifest). :type cluster_config: str :param health_check_retry_timeout: The length of time between attempts to - perform a health checks if the application or cluster is not healthy. + perform health checks if the application or cluster is not healthy. Default value: "PT0H0M0S" . :type health_check_retry_timeout: timedelta :param health_check_wait_duration_in_seconds: The length of time to wait @@ -28,8 +32,8 @@ class ClusterConfigurationUpgradeDescription(Model): process. Default value: "PT0H0M0S" . :type health_check_wait_duration_in_seconds: timedelta :param health_check_stable_duration_in_seconds: The length of time that - the application or cluster must remain healthy. Default value: "PT0H0M0S" - . + the application or cluster must remain healthy before the upgrade proceeds + to the next upgrade domain. Default value: "PT0H0M0S" . :type health_check_stable_duration_in_seconds: timedelta :param upgrade_domain_timeout_in_seconds: The timeout for the upgrade domain. Default value: "PT0H0M0S" . diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_event.py index 61c4c454d794..d4822d348c34 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_event.py @@ -16,10 +16,10 @@ class ClusterEvent(FabricEvent): """Represents the base for all Cluster Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ClusterHealthReportCreatedEvent, - ClusterHealthReportExpiredEvent, ClusterUpgradeCompleteEvent, - ClusterUpgradeDomainCompleteEvent, ClusterUpgradeRollbackCompleteEvent, - ClusterUpgradeRollbackStartEvent, ClusterUpgradeStartEvent, + sub-classes are: ClusterNewHealthReportEvent, + ClusterHealthReportExpiredEvent, ClusterUpgradeCompletedEvent, + ClusterUpgradeDomainCompletedEvent, ClusterUpgradeRollbackCompletedEvent, + ClusterUpgradeRollbackStartedEvent, ClusterUpgradeStartedEvent, ChaosStoppedEvent, ChaosStartedEvent All required parameters must be populated in order to send to Azure. @@ -27,6 +27,8 @@ class ClusterEvent(FabricEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -44,13 +46,14 @@ class ClusterEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, } _subtype_map = { - 'kind': {'ClusterHealthReportCreated': 'ClusterHealthReportCreatedEvent', 'ClusterHealthReportExpired': 'ClusterHealthReportExpiredEvent', 'ClusterUpgradeComplete': 'ClusterUpgradeCompleteEvent', 'ClusterUpgradeDomainComplete': 'ClusterUpgradeDomainCompleteEvent', 'ClusterUpgradeRollbackComplete': 'ClusterUpgradeRollbackCompleteEvent', 'ClusterUpgradeRollbackStart': 'ClusterUpgradeRollbackStartEvent', 'ClusterUpgradeStart': 'ClusterUpgradeStartEvent', 'ChaosStopped': 'ChaosStoppedEvent', 'ChaosStarted': 'ChaosStartedEvent'} + 'kind': {'ClusterNewHealthReport': 'ClusterNewHealthReportEvent', 'ClusterHealthReportExpired': 'ClusterHealthReportExpiredEvent', 'ClusterUpgradeCompleted': 'ClusterUpgradeCompletedEvent', 'ClusterUpgradeDomainCompleted': 'ClusterUpgradeDomainCompletedEvent', 'ClusterUpgradeRollbackCompleted': 'ClusterUpgradeRollbackCompletedEvent', 'ClusterUpgradeRollbackStarted': 'ClusterUpgradeRollbackStartedEvent', 'ClusterUpgradeStarted': 'ClusterUpgradeStartedEvent', 'ChaosStopped': 'ChaosStoppedEvent', 'ChaosStarted': 'ChaosStartedEvent'} } def __init__(self, **kwargs): diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_event_py3.py index 86a036afe493..04613f0de773 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_event_py3.py @@ -16,10 +16,10 @@ class ClusterEvent(FabricEvent): """Represents the base for all Cluster Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ClusterHealthReportCreatedEvent, - ClusterHealthReportExpiredEvent, ClusterUpgradeCompleteEvent, - ClusterUpgradeDomainCompleteEvent, ClusterUpgradeRollbackCompleteEvent, - ClusterUpgradeRollbackStartEvent, ClusterUpgradeStartEvent, + sub-classes are: ClusterNewHealthReportEvent, + ClusterHealthReportExpiredEvent, ClusterUpgradeCompletedEvent, + ClusterUpgradeDomainCompletedEvent, ClusterUpgradeRollbackCompletedEvent, + ClusterUpgradeRollbackStartedEvent, ClusterUpgradeStartedEvent, ChaosStoppedEvent, ChaosStartedEvent All required parameters must be populated in order to send to Azure. @@ -27,6 +27,8 @@ class ClusterEvent(FabricEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -44,15 +46,16 @@ class ClusterEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, } _subtype_map = { - 'kind': {'ClusterHealthReportCreated': 'ClusterHealthReportCreatedEvent', 'ClusterHealthReportExpired': 'ClusterHealthReportExpiredEvent', 'ClusterUpgradeComplete': 'ClusterUpgradeCompleteEvent', 'ClusterUpgradeDomainComplete': 'ClusterUpgradeDomainCompleteEvent', 'ClusterUpgradeRollbackComplete': 'ClusterUpgradeRollbackCompleteEvent', 'ClusterUpgradeRollbackStart': 'ClusterUpgradeRollbackStartEvent', 'ClusterUpgradeStart': 'ClusterUpgradeStartEvent', 'ChaosStopped': 'ChaosStoppedEvent', 'ChaosStarted': 'ChaosStartedEvent'} + 'kind': {'ClusterNewHealthReport': 'ClusterNewHealthReportEvent', 'ClusterHealthReportExpired': 'ClusterHealthReportExpiredEvent', 'ClusterUpgradeCompleted': 'ClusterUpgradeCompletedEvent', 'ClusterUpgradeDomainCompleted': 'ClusterUpgradeDomainCompletedEvent', 'ClusterUpgradeRollbackCompleted': 'ClusterUpgradeRollbackCompletedEvent', 'ClusterUpgradeRollbackStarted': 'ClusterUpgradeRollbackStartedEvent', 'ClusterUpgradeStarted': 'ClusterUpgradeStartedEvent', 'ChaosStopped': 'ChaosStoppedEvent', 'ChaosStarted': 'ChaosStartedEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.kind = 'ClusterEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event.py index d09441c8e955..a714abca488f 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event.py @@ -20,6 +20,8 @@ class ClusterHealthReportExpiredEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ClusterHealthReportExpiredEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event_py3.py index 8f4ac4497b86..fcef06a24166 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class ClusterHealthReportExpiredEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ClusterHealthReportExpiredEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -75,8 +78,8 @@ class ClusterHealthReportExpiredEvent(ClusterEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.source_id = source_id self.property = property self.health_state = health_state diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_new_health_report_event.py similarity index 92% rename from azure-servicefabric/azure/servicefabric/models/cluster_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/cluster_new_health_report_event.py index d6893b836073..3c6b2be1c8b6 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_new_health_report_event.py @@ -12,7 +12,7 @@ from .cluster_event import ClusterEvent -class ClusterHealthReportCreatedEvent(ClusterEvent): +class ClusterNewHealthReportEvent(ClusterEvent): """Cluster Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ClusterHealthReportCreatedEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ClusterHealthReportCreatedEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -76,7 +79,7 @@ class ClusterHealthReportCreatedEvent(ClusterEvent): } def __init__(self, **kwargs): - super(ClusterHealthReportCreatedEvent, self).__init__(**kwargs) + super(ClusterNewHealthReportEvent, self).__init__(**kwargs) self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) self.health_state = kwargs.get('health_state', None) @@ -85,4 +88,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'ClusterHealthReportCreated' + self.kind = 'ClusterNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_new_health_report_event_py3.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/cluster_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/cluster_new_health_report_event_py3.py index f6c18832dde5..4f31abb55569 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .cluster_event_py3 import ClusterEvent -class ClusterHealthReportCreatedEvent(ClusterEvent): +class ClusterNewHealthReportEvent(ClusterEvent): """Cluster Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ClusterHealthReportCreatedEvent(ClusterEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class ClusterHealthReportCreatedEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -75,8 +78,8 @@ class ClusterHealthReportCreatedEvent(ClusterEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.source_id = source_id self.property = property self.health_state = health_state @@ -85,4 +88,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, source_id: str, proper self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'ClusterHealthReportCreated' + self.kind = 'ClusterNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_complete_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_completed_event.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_complete_event.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_completed_event.py index d9c02c387e1d..3b75cdd10c85 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_complete_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_completed_event.py @@ -12,14 +12,16 @@ from .cluster_event import ClusterEvent -class ClusterUpgradeCompleteEvent(ClusterEvent): - """Cluster Upgrade Complete event. +class ClusterUpgradeCompletedEvent(ClusterEvent): + """Cluster Upgrade Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -44,6 +46,7 @@ class ClusterUpgradeCompleteEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -52,7 +55,7 @@ class ClusterUpgradeCompleteEvent(ClusterEvent): } def __init__(self, **kwargs): - super(ClusterUpgradeCompleteEvent, self).__init__(**kwargs) + super(ClusterUpgradeCompletedEvent, self).__init__(**kwargs) self.target_cluster_version = kwargs.get('target_cluster_version', None) self.overall_upgrade_elapsed_time_in_ms = kwargs.get('overall_upgrade_elapsed_time_in_ms', None) - self.kind = 'ClusterUpgradeComplete' + self.kind = 'ClusterUpgradeCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_complete_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_completed_event_py3.py similarity index 81% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_complete_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_completed_event_py3.py index 332a0f192fb5..8422f87b2c7e 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_complete_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_completed_event_py3.py @@ -12,14 +12,16 @@ from .cluster_event_py3 import ClusterEvent -class ClusterUpgradeCompleteEvent(ClusterEvent): - """Cluster Upgrade Complete event. +class ClusterUpgradeCompletedEvent(ClusterEvent): + """Cluster Upgrade Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -44,6 +46,7 @@ class ClusterUpgradeCompleteEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -51,8 +54,8 @@ class ClusterUpgradeCompleteEvent(ClusterEvent): 'overall_upgrade_elapsed_time_in_ms': {'key': 'OverallUpgradeElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, overall_upgrade_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterUpgradeCompleteEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, overall_upgrade_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterUpgradeCompletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.target_cluster_version = target_cluster_version self.overall_upgrade_elapsed_time_in_ms = overall_upgrade_elapsed_time_in_ms - self.kind = 'ClusterUpgradeComplete' + self.kind = 'ClusterUpgradeCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_complete_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_completed_event.py similarity index 88% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_complete_event.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_completed_event.py index ce7d4fdeb127..42a877024969 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_complete_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_completed_event.py @@ -12,14 +12,16 @@ from .cluster_event import ClusterEvent -class ClusterUpgradeDomainCompleteEvent(ClusterEvent): - """Cluster Upgrade Domain Complete event. +class ClusterUpgradeDomainCompletedEvent(ClusterEvent): + """Cluster Upgrade Domain Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -50,6 +52,7 @@ class ClusterUpgradeDomainCompleteEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -60,9 +63,9 @@ class ClusterUpgradeDomainCompleteEvent(ClusterEvent): } def __init__(self, **kwargs): - super(ClusterUpgradeDomainCompleteEvent, self).__init__(**kwargs) + super(ClusterUpgradeDomainCompletedEvent, self).__init__(**kwargs) self.target_cluster_version = kwargs.get('target_cluster_version', None) self.upgrade_state = kwargs.get('upgrade_state', None) self.upgrade_domains = kwargs.get('upgrade_domains', None) self.upgrade_domain_elapsed_time_in_ms = kwargs.get('upgrade_domain_elapsed_time_in_ms', None) - self.kind = 'ClusterUpgradeDomainComplete' + self.kind = 'ClusterUpgradeDomainCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_complete_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_completed_event_py3.py similarity index 82% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_complete_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_completed_event_py3.py index fa13c8bbd3e4..df7ea67d1f8e 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_complete_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_domain_completed_event_py3.py @@ -12,14 +12,16 @@ from .cluster_event_py3 import ClusterEvent -class ClusterUpgradeDomainCompleteEvent(ClusterEvent): - """Cluster Upgrade Domain Complete event. +class ClusterUpgradeDomainCompletedEvent(ClusterEvent): + """Cluster Upgrade Domain Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -50,6 +52,7 @@ class ClusterUpgradeDomainCompleteEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -59,10 +62,10 @@ class ClusterUpgradeDomainCompleteEvent(ClusterEvent): 'upgrade_domain_elapsed_time_in_ms': {'key': 'UpgradeDomainElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, upgrade_state: str, upgrade_domains: str, upgrade_domain_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterUpgradeDomainCompleteEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, upgrade_state: str, upgrade_domains: str, upgrade_domain_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterUpgradeDomainCompletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.target_cluster_version = target_cluster_version self.upgrade_state = upgrade_state self.upgrade_domains = upgrade_domains self.upgrade_domain_elapsed_time_in_ms = upgrade_domain_elapsed_time_in_ms - self.kind = 'ClusterUpgradeDomainComplete' + self.kind = 'ClusterUpgradeDomainCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_start_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_completed_event.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_start_event.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_completed_event.py index 8753b3d223ee..a914773939c4 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_start_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_completed_event.py @@ -12,14 +12,16 @@ from .cluster_event import ClusterEvent -class ClusterUpgradeRollbackStartEvent(ClusterEvent): - """Cluster Upgrade Rollback Start event. +class ClusterUpgradeRollbackCompletedEvent(ClusterEvent): + """Cluster Upgrade Rollback Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -47,6 +49,7 @@ class ClusterUpgradeRollbackStartEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -56,8 +59,8 @@ class ClusterUpgradeRollbackStartEvent(ClusterEvent): } def __init__(self, **kwargs): - super(ClusterUpgradeRollbackStartEvent, self).__init__(**kwargs) + super(ClusterUpgradeRollbackCompletedEvent, self).__init__(**kwargs) self.target_cluster_version = kwargs.get('target_cluster_version', None) self.failure_reason = kwargs.get('failure_reason', None) self.overall_upgrade_elapsed_time_in_ms = kwargs.get('overall_upgrade_elapsed_time_in_ms', None) - self.kind = 'ClusterUpgradeRollbackStart' + self.kind = 'ClusterUpgradeRollbackCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_complete_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_completed_event_py3.py similarity index 81% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_complete_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_completed_event_py3.py index 608701dfb776..75e456c3e866 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_complete_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_completed_event_py3.py @@ -12,14 +12,16 @@ from .cluster_event_py3 import ClusterEvent -class ClusterUpgradeRollbackCompleteEvent(ClusterEvent): - """Cluster Upgrade Rollback Complete event. +class ClusterUpgradeRollbackCompletedEvent(ClusterEvent): + """Cluster Upgrade Rollback Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -47,6 +49,7 @@ class ClusterUpgradeRollbackCompleteEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -55,9 +58,9 @@ class ClusterUpgradeRollbackCompleteEvent(ClusterEvent): 'overall_upgrade_elapsed_time_in_ms': {'key': 'OverallUpgradeElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterUpgradeRollbackCompleteEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterUpgradeRollbackCompletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.target_cluster_version = target_cluster_version self.failure_reason = failure_reason self.overall_upgrade_elapsed_time_in_ms = overall_upgrade_elapsed_time_in_ms - self.kind = 'ClusterUpgradeRollbackComplete' + self.kind = 'ClusterUpgradeRollbackCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_complete_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_started_event.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_complete_event.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_started_event.py index 968aa073f7e8..b278a7de8898 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_complete_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_started_event.py @@ -12,14 +12,16 @@ from .cluster_event import ClusterEvent -class ClusterUpgradeRollbackCompleteEvent(ClusterEvent): - """Cluster Upgrade Rollback Complete event. +class ClusterUpgradeRollbackStartedEvent(ClusterEvent): + """Cluster Upgrade Rollback Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -47,6 +49,7 @@ class ClusterUpgradeRollbackCompleteEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -56,8 +59,8 @@ class ClusterUpgradeRollbackCompleteEvent(ClusterEvent): } def __init__(self, **kwargs): - super(ClusterUpgradeRollbackCompleteEvent, self).__init__(**kwargs) + super(ClusterUpgradeRollbackStartedEvent, self).__init__(**kwargs) self.target_cluster_version = kwargs.get('target_cluster_version', None) self.failure_reason = kwargs.get('failure_reason', None) self.overall_upgrade_elapsed_time_in_ms = kwargs.get('overall_upgrade_elapsed_time_in_ms', None) - self.kind = 'ClusterUpgradeRollbackComplete' + self.kind = 'ClusterUpgradeRollbackStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_start_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_started_event_py3.py similarity index 81% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_start_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_started_event_py3.py index a38772063737..54e00da901ba 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_start_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_rollback_started_event_py3.py @@ -12,14 +12,16 @@ from .cluster_event_py3 import ClusterEvent -class ClusterUpgradeRollbackStartEvent(ClusterEvent): - """Cluster Upgrade Rollback Start event. +class ClusterUpgradeRollbackStartedEvent(ClusterEvent): + """Cluster Upgrade Rollback Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -47,6 +49,7 @@ class ClusterUpgradeRollbackStartEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -55,9 +58,9 @@ class ClusterUpgradeRollbackStartEvent(ClusterEvent): 'overall_upgrade_elapsed_time_in_ms': {'key': 'OverallUpgradeElapsedTimeInMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterUpgradeRollbackStartEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, target_cluster_version: str, failure_reason: str, overall_upgrade_elapsed_time_in_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterUpgradeRollbackStartedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.target_cluster_version = target_cluster_version self.failure_reason = failure_reason self.overall_upgrade_elapsed_time_in_ms = overall_upgrade_elapsed_time_in_ms - self.kind = 'ClusterUpgradeRollbackStart' + self.kind = 'ClusterUpgradeRollbackStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_start_event.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_started_event.py similarity index 90% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_start_event.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_started_event.py index 9058c78dd787..7222ff3358ef 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_start_event.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_started_event.py @@ -12,14 +12,16 @@ from .cluster_event import ClusterEvent -class ClusterUpgradeStartEvent(ClusterEvent): - """Cluster Upgrade Start event. +class ClusterUpgradeStartedEvent(ClusterEvent): + """Cluster Upgrade Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -52,6 +54,7 @@ class ClusterUpgradeStartEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -63,10 +66,10 @@ class ClusterUpgradeStartEvent(ClusterEvent): } def __init__(self, **kwargs): - super(ClusterUpgradeStartEvent, self).__init__(**kwargs) + super(ClusterUpgradeStartedEvent, self).__init__(**kwargs) self.current_cluster_version = kwargs.get('current_cluster_version', None) self.target_cluster_version = kwargs.get('target_cluster_version', None) self.upgrade_type = kwargs.get('upgrade_type', None) self.rolling_upgrade_mode = kwargs.get('rolling_upgrade_mode', None) self.failure_action = kwargs.get('failure_action', None) - self.kind = 'ClusterUpgradeStart' + self.kind = 'ClusterUpgradeStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_start_event_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_started_event_py3.py similarity index 83% rename from azure-servicefabric/azure/servicefabric/models/cluster_upgrade_start_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/cluster_upgrade_started_event_py3.py index b3c4a53469d3..e35bc36b55e6 100644 --- a/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_start_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/cluster_upgrade_started_event_py3.py @@ -12,14 +12,16 @@ from .cluster_event_py3 import ClusterEvent -class ClusterUpgradeStartEvent(ClusterEvent): - """Cluster Upgrade Start event. +class ClusterUpgradeStartedEvent(ClusterEvent): + """Cluster Upgrade Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -52,6 +54,7 @@ class ClusterUpgradeStartEvent(ClusterEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -62,11 +65,11 @@ class ClusterUpgradeStartEvent(ClusterEvent): 'failure_action': {'key': 'FailureAction', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, current_cluster_version: str, target_cluster_version: str, upgrade_type: str, rolling_upgrade_mode: str, failure_action: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ClusterUpgradeStartEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, current_cluster_version: str, target_cluster_version: str, upgrade_type: str, rolling_upgrade_mode: str, failure_action: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ClusterUpgradeStartedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.current_cluster_version = current_cluster_version self.target_cluster_version = target_cluster_version self.upgrade_type = upgrade_type self.rolling_upgrade_mode = rolling_upgrade_mode self.failure_action = failure_action - self.kind = 'ClusterUpgradeStart' + self.kind = 'ClusterUpgradeStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_version.py b/azure-servicefabric/azure/servicefabric/models/cluster_version.py new file mode 100644 index 000000000000..15b4fdbf77b6 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/cluster_version.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ClusterVersion(Model): + """The cluster version. + + :param version: The Service Fabric cluster runtime version. + :type version: str + """ + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ClusterVersion, self).__init__(**kwargs) + self.version = kwargs.get('version', None) diff --git a/azure-servicefabric/azure/servicefabric/models/cluster_version_py3.py b/azure-servicefabric/azure/servicefabric/models/cluster_version_py3.py new file mode 100644 index 000000000000..fcf80889c1de --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/cluster_version_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ClusterVersion(Model): + """The cluster version. + + :param version: The Service Fabric cluster runtime version. + :type version: str + """ + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str'}, + } + + def __init__(self, *, version: str=None, **kwargs) -> None: + super(ClusterVersion, self).__init__(**kwargs) + self.version = version diff --git a/azure-servicefabric/azure/servicefabric/models/container_code_package_properties.py b/azure-servicefabric/azure/servicefabric/models/container_code_package_properties.py index 7f9982d44792..b41fecb62b10 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_code_package_properties.py +++ b/azure-servicefabric/azure/servicefabric/models/container_code_package_properties.py @@ -45,15 +45,23 @@ class ContainerCodePackageProperties(Model): :type labels: list[~azure.servicefabric.models.ContainerLabel] :param endpoints: The endpoints exposed by this container. :type endpoints: list[~azure.servicefabric.models.EndpointProperties] - :param resources: Required. This type describes the resource requirements - for a container or a service. + :param resources: Required. The resources required by this container. :type resources: ~azure.servicefabric.models.ResourceRequirements - :param volume_refs: The volumes to be attached to the container. - :type volume_refs: list[~azure.servicefabric.models.ContainerVolume] - :ivar instance_view: Runtime information of a container instance. - :vartype instance_view: ~azure.servicefabric.models.ContainerInstanceView + :param volume_refs: Volumes to be attached to the container. The lifetime + of these volumes is independent of the application's lifetime. + :type volume_refs: list[~azure.servicefabric.models.VolumeReference] + :param volumes: Volumes to be attached to the container. The lifetime of + these volumes is scoped to the application's lifetime. + :type volumes: list[~azure.servicefabric.models.ApplicationScopedVolume] :param diagnostics: Reference to sinks in DiagnosticsDescription. :type diagnostics: ~azure.servicefabric.models.DiagnosticsRef + :param reliable_collections_refs: A list of ReliableCollection resources + used by this particular code package. Please refer to + ReliablecollectionsRef for more details. + :type reliable_collections_refs: + list[~azure.servicefabric.models.ReliableCollectionsRef] + :ivar instance_view: Runtime information of a container instance. + :vartype instance_view: ~azure.servicefabric.models.ContainerInstanceView """ _validation = { @@ -74,9 +82,11 @@ class ContainerCodePackageProperties(Model): 'labels': {'key': 'labels', 'type': '[ContainerLabel]'}, 'endpoints': {'key': 'endpoints', 'type': '[EndpointProperties]'}, 'resources': {'key': 'resources', 'type': 'ResourceRequirements'}, - 'volume_refs': {'key': 'volumeRefs', 'type': '[ContainerVolume]'}, - 'instance_view': {'key': 'instanceView', 'type': 'ContainerInstanceView'}, + 'volume_refs': {'key': 'volumeRefs', 'type': '[VolumeReference]'}, + 'volumes': {'key': 'volumes', 'type': '[ApplicationScopedVolume]'}, 'diagnostics': {'key': 'diagnostics', 'type': 'DiagnosticsRef'}, + 'reliable_collections_refs': {'key': 'reliableCollectionsRefs', 'type': '[ReliableCollectionsRef]'}, + 'instance_view': {'key': 'instanceView', 'type': 'ContainerInstanceView'}, } def __init__(self, **kwargs): @@ -92,5 +102,7 @@ def __init__(self, **kwargs): self.endpoints = kwargs.get('endpoints', None) self.resources = kwargs.get('resources', None) self.volume_refs = kwargs.get('volume_refs', None) - self.instance_view = None + self.volumes = kwargs.get('volumes', None) self.diagnostics = kwargs.get('diagnostics', None) + self.reliable_collections_refs = kwargs.get('reliable_collections_refs', None) + self.instance_view = None diff --git a/azure-servicefabric/azure/servicefabric/models/container_code_package_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/container_code_package_properties_py3.py index 8022d73d5487..44d6e2666dc1 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_code_package_properties_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/container_code_package_properties_py3.py @@ -45,15 +45,23 @@ class ContainerCodePackageProperties(Model): :type labels: list[~azure.servicefabric.models.ContainerLabel] :param endpoints: The endpoints exposed by this container. :type endpoints: list[~azure.servicefabric.models.EndpointProperties] - :param resources: Required. This type describes the resource requirements - for a container or a service. + :param resources: Required. The resources required by this container. :type resources: ~azure.servicefabric.models.ResourceRequirements - :param volume_refs: The volumes to be attached to the container. - :type volume_refs: list[~azure.servicefabric.models.ContainerVolume] - :ivar instance_view: Runtime information of a container instance. - :vartype instance_view: ~azure.servicefabric.models.ContainerInstanceView + :param volume_refs: Volumes to be attached to the container. The lifetime + of these volumes is independent of the application's lifetime. + :type volume_refs: list[~azure.servicefabric.models.VolumeReference] + :param volumes: Volumes to be attached to the container. The lifetime of + these volumes is scoped to the application's lifetime. + :type volumes: list[~azure.servicefabric.models.ApplicationScopedVolume] :param diagnostics: Reference to sinks in DiagnosticsDescription. :type diagnostics: ~azure.servicefabric.models.DiagnosticsRef + :param reliable_collections_refs: A list of ReliableCollection resources + used by this particular code package. Please refer to + ReliablecollectionsRef for more details. + :type reliable_collections_refs: + list[~azure.servicefabric.models.ReliableCollectionsRef] + :ivar instance_view: Runtime information of a container instance. + :vartype instance_view: ~azure.servicefabric.models.ContainerInstanceView """ _validation = { @@ -74,12 +82,14 @@ class ContainerCodePackageProperties(Model): 'labels': {'key': 'labels', 'type': '[ContainerLabel]'}, 'endpoints': {'key': 'endpoints', 'type': '[EndpointProperties]'}, 'resources': {'key': 'resources', 'type': 'ResourceRequirements'}, - 'volume_refs': {'key': 'volumeRefs', 'type': '[ContainerVolume]'}, - 'instance_view': {'key': 'instanceView', 'type': 'ContainerInstanceView'}, + 'volume_refs': {'key': 'volumeRefs', 'type': '[VolumeReference]'}, + 'volumes': {'key': 'volumes', 'type': '[ApplicationScopedVolume]'}, 'diagnostics': {'key': 'diagnostics', 'type': 'DiagnosticsRef'}, + 'reliable_collections_refs': {'key': 'reliableCollectionsRefs', 'type': '[ReliableCollectionsRef]'}, + 'instance_view': {'key': 'instanceView', 'type': 'ContainerInstanceView'}, } - def __init__(self, *, name: str, image: str, resources, image_registry_credential=None, entrypoint: str=None, commands=None, environment_variables=None, settings=None, labels=None, endpoints=None, volume_refs=None, diagnostics=None, **kwargs) -> None: + def __init__(self, *, name: str, image: str, resources, image_registry_credential=None, entrypoint: str=None, commands=None, environment_variables=None, settings=None, labels=None, endpoints=None, volume_refs=None, volumes=None, diagnostics=None, reliable_collections_refs=None, **kwargs) -> None: super(ContainerCodePackageProperties, self).__init__(**kwargs) self.name = name self.image = image @@ -92,5 +102,7 @@ def __init__(self, *, name: str, image: str, resources, image_registry_credentia self.endpoints = endpoints self.resources = resources self.volume_refs = volume_refs - self.instance_view = None + self.volumes = volumes self.diagnostics = diagnostics + self.reliable_collections_refs = reliable_collections_refs + self.instance_view = None diff --git a/azure-servicefabric/azure/servicefabric/models/container_instance_event.py b/azure-servicefabric/azure/servicefabric/models/container_instance_event.py index 9b67613e8c40..1796cecba7ad 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_instance_event.py +++ b/azure-servicefabric/azure/servicefabric/models/container_instance_event.py @@ -20,6 +20,8 @@ class ContainerInstanceEvent(FabricEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -37,6 +39,7 @@ class ContainerInstanceEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/container_instance_event_py3.py b/azure-servicefabric/azure/servicefabric/models/container_instance_event_py3.py index 964cfb2cf087..82e227c10b6a 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_instance_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/container_instance_event_py3.py @@ -20,6 +20,8 @@ class ContainerInstanceEvent(FabricEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -37,11 +39,12 @@ class ContainerInstanceEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ContainerInstanceEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ContainerInstanceEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.kind = 'ContainerInstanceEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event.py index 49a5ba601c26..5646f2d6d6b6 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event.py @@ -20,6 +20,8 @@ class DeployedApplicationHealthReportExpiredEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -77,6 +79,7 @@ class DeployedApplicationHealthReportExpiredEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event_py3.py index 2a4e7960cea8..29489b61dac0 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class DeployedApplicationHealthReportExpiredEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -77,6 +79,7 @@ class DeployedApplicationHealthReportExpiredEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -93,8 +96,8 @@ class DeployedApplicationHealthReportExpiredEvent(ApplicationEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, node_name: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(DeployedApplicationHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, node_name: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(DeployedApplicationHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_instance_id = application_instance_id self.node_name = node_name self.source_id = source_id diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/deployed_application_new_health_report_event.py similarity index 93% rename from azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/deployed_application_new_health_report_event.py index 287298f3e914..297abe18034b 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_application_new_health_report_event.py @@ -12,7 +12,7 @@ from .application_event import ApplicationEvent -class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): +class DeployedApplicationNewHealthReportEvent(ApplicationEvent): """Deployed Application Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -77,6 +79,7 @@ class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -94,7 +97,7 @@ class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(DeployedApplicationHealthReportCreatedEvent, self).__init__(**kwargs) + super(DeployedApplicationNewHealthReportEvent, self).__init__(**kwargs) self.application_instance_id = kwargs.get('application_instance_id', None) self.node_name = kwargs.get('node_name', None) self.source_id = kwargs.get('source_id', None) @@ -105,4 +108,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'DeployedApplicationHealthReportCreated' + self.kind = 'DeployedApplicationNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/deployed_application_new_health_report_event_py3.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/deployed_application_new_health_report_event_py3.py index 440d679eb2e1..7f837853194f 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_application_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_application_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .application_event_py3 import ApplicationEvent -class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): +class DeployedApplicationNewHealthReportEvent(ApplicationEvent): """Deployed Application Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -77,6 +79,7 @@ class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -93,8 +96,8 @@ class DeployedApplicationHealthReportCreatedEvent(ApplicationEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, node_name: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(DeployedApplicationHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, application_instance_id: int, node_name: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(DeployedApplicationNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.application_instance_id = application_instance_id self.node_name = node_name self.source_id = source_id @@ -105,4 +108,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, a self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'DeployedApplicationHealthReportCreated' + self.kind = 'DeployedApplicationNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_health_report_expired_event.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_expired_event.py rename to azure-servicefabric/azure/servicefabric/models/deployed_service_package_health_report_expired_event.py index 8620c7d627d6..b8af144a0bcb 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_health_report_expired_event.py @@ -12,7 +12,7 @@ from .application_event import ApplicationEvent -class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): +class DeployedServicePackageHealthReportExpiredEvent(ApplicationEvent): """Deployed Service Health Report Expired event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -51,8 +53,8 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): :type property: str :param health_state: Required. Describes the property health state. :type health_state: str - :param ttl_timespan: Required. Time to live in milli-seconds. - :type ttl_timespan: long + :param time_to_live_ms: Required. Time to live in milli-seconds. + :type time_to_live_ms: long :param sequence_number: Required. Sequence number of report. :type sequence_number: long :param description: Required. Description of report. @@ -76,7 +78,7 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): 'source_id': {'required': True}, 'property': {'required': True}, 'health_state': {'required': True}, - 'ttl_timespan': {'required': True}, + 'time_to_live_ms': {'required': True}, 'sequence_number': {'required': True}, 'description': {'required': True}, 'remove_when_expired': {'required': True}, @@ -85,6 +87,7 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -96,7 +99,7 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): 'source_id': {'key': 'SourceId', 'type': 'str'}, 'property': {'key': 'Property', 'type': 'str'}, 'health_state': {'key': 'HealthState', 'type': 'str'}, - 'ttl_timespan': {'key': 'TTLTimespan', 'type': 'long'}, + 'time_to_live_ms': {'key': 'TimeToLiveMs', 'type': 'long'}, 'sequence_number': {'key': 'SequenceNumber', 'type': 'long'}, 'description': {'key': 'Description', 'type': 'str'}, 'remove_when_expired': {'key': 'RemoveWhenExpired', 'type': 'bool'}, @@ -104,7 +107,7 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(DeployedServiceHealthReportExpiredEvent, self).__init__(**kwargs) + super(DeployedServicePackageHealthReportExpiredEvent, self).__init__(**kwargs) self.service_manifest = kwargs.get('service_manifest', None) self.service_package_instance_id = kwargs.get('service_package_instance_id', None) self.service_package_activation_id = kwargs.get('service_package_activation_id', None) @@ -112,9 +115,9 @@ def __init__(self, **kwargs): self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) self.health_state = kwargs.get('health_state', None) - self.ttl_timespan = kwargs.get('ttl_timespan', None) + self.time_to_live_ms = kwargs.get('time_to_live_ms', None) self.sequence_number = kwargs.get('sequence_number', None) self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'DeployedServiceHealthReportExpired' + self.kind = 'DeployedServicePackageHealthReportExpired' diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_health_report_expired_event_py3.py similarity index 84% rename from azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_expired_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/deployed_service_package_health_report_expired_event_py3.py index 1180d24139e2..28e8f9d8c564 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_health_report_expired_event_py3.py @@ -12,7 +12,7 @@ from .application_event_py3 import ApplicationEvent -class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): +class DeployedServicePackageHealthReportExpiredEvent(ApplicationEvent): """Deployed Service Health Report Expired event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -51,8 +53,8 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): :type property: str :param health_state: Required. Describes the property health state. :type health_state: str - :param ttl_timespan: Required. Time to live in milli-seconds. - :type ttl_timespan: long + :param time_to_live_ms: Required. Time to live in milli-seconds. + :type time_to_live_ms: long :param sequence_number: Required. Sequence number of report. :type sequence_number: long :param description: Required. Description of report. @@ -76,7 +78,7 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): 'source_id': {'required': True}, 'property': {'required': True}, 'health_state': {'required': True}, - 'ttl_timespan': {'required': True}, + 'time_to_live_ms': {'required': True}, 'sequence_number': {'required': True}, 'description': {'required': True}, 'remove_when_expired': {'required': True}, @@ -85,6 +87,7 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -96,15 +99,15 @@ class DeployedServiceHealthReportExpiredEvent(ApplicationEvent): 'source_id': {'key': 'SourceId', 'type': 'str'}, 'property': {'key': 'Property', 'type': 'str'}, 'health_state': {'key': 'HealthState', 'type': 'str'}, - 'ttl_timespan': {'key': 'TTLTimespan', 'type': 'long'}, + 'time_to_live_ms': {'key': 'TimeToLiveMs', 'type': 'long'}, 'sequence_number': {'key': 'SequenceNumber', 'type': 'long'}, 'description': {'key': 'Description', 'type': 'str'}, 'remove_when_expired': {'key': 'RemoveWhenExpired', 'type': 'bool'}, 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_manifest: str, service_package_instance_id: int, service_package_activation_id: str, node_name: str, source_id: str, property: str, health_state: str, ttl_timespan: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(DeployedServiceHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_manifest: str, service_package_instance_id: int, service_package_activation_id: str, node_name: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(DeployedServicePackageHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.service_manifest = service_manifest self.service_package_instance_id = service_package_instance_id self.service_package_activation_id = service_package_activation_id @@ -112,9 +115,9 @@ def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, s self.source_id = source_id self.property = property self.health_state = health_state - self.ttl_timespan = ttl_timespan + self.time_to_live_ms = time_to_live_ms self.sequence_number = sequence_number self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'DeployedServiceHealthReportExpired' + self.kind = 'DeployedServicePackageHealthReportExpired' diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_new_health_report_event.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/deployed_service_package_new_health_report_event.py index 7b333aa48fbb..744978800f73 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_new_health_report_event.py @@ -12,7 +12,7 @@ from .application_event import ApplicationEvent -class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): +class DeployedServicePackageNewHealthReportEvent(ApplicationEvent): """Deployed Service Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -51,8 +53,8 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): :type property: str :param health_state: Required. Describes the property health state. :type health_state: str - :param ttl_timespan: Required. Time to live in milli-seconds. - :type ttl_timespan: long + :param time_to_live_ms: Required. Time to live in milli-seconds. + :type time_to_live_ms: long :param sequence_number: Required. Sequence number of report. :type sequence_number: long :param description: Required. Description of report. @@ -76,7 +78,7 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): 'source_id': {'required': True}, 'property': {'required': True}, 'health_state': {'required': True}, - 'ttl_timespan': {'required': True}, + 'time_to_live_ms': {'required': True}, 'sequence_number': {'required': True}, 'description': {'required': True}, 'remove_when_expired': {'required': True}, @@ -85,6 +87,7 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -96,7 +99,7 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): 'source_id': {'key': 'SourceId', 'type': 'str'}, 'property': {'key': 'Property', 'type': 'str'}, 'health_state': {'key': 'HealthState', 'type': 'str'}, - 'ttl_timespan': {'key': 'TTLTimespan', 'type': 'long'}, + 'time_to_live_ms': {'key': 'TimeToLiveMs', 'type': 'long'}, 'sequence_number': {'key': 'SequenceNumber', 'type': 'long'}, 'description': {'key': 'Description', 'type': 'str'}, 'remove_when_expired': {'key': 'RemoveWhenExpired', 'type': 'bool'}, @@ -104,7 +107,7 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): } def __init__(self, **kwargs): - super(DeployedServiceHealthReportCreatedEvent, self).__init__(**kwargs) + super(DeployedServicePackageNewHealthReportEvent, self).__init__(**kwargs) self.service_manifest_name = kwargs.get('service_manifest_name', None) self.service_package_instance_id = kwargs.get('service_package_instance_id', None) self.service_package_activation_id = kwargs.get('service_package_activation_id', None) @@ -112,9 +115,9 @@ def __init__(self, **kwargs): self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) self.health_state = kwargs.get('health_state', None) - self.ttl_timespan = kwargs.get('ttl_timespan', None) + self.time_to_live_ms = kwargs.get('time_to_live_ms', None) self.sequence_number = kwargs.get('sequence_number', None) self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'DeployedServiceHealthReportCreated' + self.kind = 'DeployedServicePackageNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_new_health_report_event_py3.py similarity index 84% rename from azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/deployed_service_package_new_health_report_event_py3.py index be41c0c0c6c0..487a29575491 100644 --- a/azure-servicefabric/azure/servicefabric/models/deployed_service_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/deployed_service_package_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .application_event_py3 import ApplicationEvent -class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): +class DeployedServicePackageNewHealthReportEvent(ApplicationEvent): """Deployed Service Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -51,8 +53,8 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): :type property: str :param health_state: Required. Describes the property health state. :type health_state: str - :param ttl_timespan: Required. Time to live in milli-seconds. - :type ttl_timespan: long + :param time_to_live_ms: Required. Time to live in milli-seconds. + :type time_to_live_ms: long :param sequence_number: Required. Sequence number of report. :type sequence_number: long :param description: Required. Description of report. @@ -76,7 +78,7 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): 'source_id': {'required': True}, 'property': {'required': True}, 'health_state': {'required': True}, - 'ttl_timespan': {'required': True}, + 'time_to_live_ms': {'required': True}, 'sequence_number': {'required': True}, 'description': {'required': True}, 'remove_when_expired': {'required': True}, @@ -85,6 +87,7 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -96,15 +99,15 @@ class DeployedServiceHealthReportCreatedEvent(ApplicationEvent): 'source_id': {'key': 'SourceId', 'type': 'str'}, 'property': {'key': 'Property', 'type': 'str'}, 'health_state': {'key': 'HealthState', 'type': 'str'}, - 'ttl_timespan': {'key': 'TTLTimespan', 'type': 'long'}, + 'time_to_live_ms': {'key': 'TimeToLiveMs', 'type': 'long'}, 'sequence_number': {'key': 'SequenceNumber', 'type': 'long'}, 'description': {'key': 'Description', 'type': 'str'}, 'remove_when_expired': {'key': 'RemoveWhenExpired', 'type': 'bool'}, 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_manifest_name: str, service_package_instance_id: int, service_package_activation_id: str, node_name: str, source_id: str, property: str, health_state: str, ttl_timespan: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(DeployedServiceHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, service_manifest_name: str, service_package_instance_id: int, service_package_activation_id: str, node_name: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(DeployedServicePackageNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, application_id=application_id, **kwargs) self.service_manifest_name = service_manifest_name self.service_package_instance_id = service_package_instance_id self.service_package_activation_id = service_package_activation_id @@ -112,9 +115,9 @@ def __init__(self, *, event_instance_id: str, time_stamp, application_id: str, s self.source_id = source_id self.property = property self.health_state = health_state - self.ttl_timespan = ttl_timespan + self.time_to_live_ms = time_to_live_ms self.sequence_number = sequence_number self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'DeployedServiceHealthReportCreated' + self.kind = 'DeployedServicePackageNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties.py b/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties.py index 0f92568c3b43..63da00fe00b8 100644 --- a/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties.py +++ b/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties.py @@ -15,6 +15,9 @@ class DiagnosticsSinkProperties(Model): """Properties of a DiagnosticsSink. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureInternalMonitoringPipelineSinkDescription + All required parameters must be populated in order to send to Azure. :param name: Name of the sink. This value is referenced by @@ -36,6 +39,10 @@ class DiagnosticsSinkProperties(Model): 'kind': {'key': 'kind', 'type': 'str'}, } + _subtype_map = { + 'kind': {'AzureInternalMonitoringPipeline': 'AzureInternalMonitoringPipelineSinkDescription'} + } + def __init__(self, **kwargs): super(DiagnosticsSinkProperties, self).__init__(**kwargs) self.name = kwargs.get('name', None) diff --git a/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties_py3.py index a127b1a8af5c..123512a45456 100644 --- a/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/diagnostics_sink_properties_py3.py @@ -15,6 +15,9 @@ class DiagnosticsSinkProperties(Model): """Properties of a DiagnosticsSink. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureInternalMonitoringPipelineSinkDescription + All required parameters must be populated in order to send to Azure. :param name: Name of the sink. This value is referenced by @@ -36,6 +39,10 @@ class DiagnosticsSinkProperties(Model): 'kind': {'key': 'kind', 'type': 'str'}, } + _subtype_map = { + 'kind': {'AzureInternalMonitoringPipeline': 'AzureInternalMonitoringPipelineSinkDescription'} + } + def __init__(self, *, name: str=None, description: str=None, **kwargs) -> None: super(DiagnosticsSinkProperties, self).__init__(**kwargs) self.name = name diff --git a/azure-servicefabric/azure/servicefabric/models/disable_backup_description.py b/azure-servicefabric/azure/servicefabric/models/disable_backup_description.py new file mode 100644 index 000000000000..3834793e3065 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/disable_backup_description.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DisableBackupDescription(Model): + """It describes the body parameters while disabling backup of a backup + entity(Application/Service/Partition). + + All required parameters must be populated in order to send to Azure. + + :param clean_backup: Required. Boolean flag to delete backups. It can be + set to true for deleting all the backups which were created for the backup + entity that is getting disabled for backup. + :type clean_backup: bool + """ + + _validation = { + 'clean_backup': {'required': True}, + } + + _attribute_map = { + 'clean_backup': {'key': 'CleanBackup', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(DisableBackupDescription, self).__init__(**kwargs) + self.clean_backup = kwargs.get('clean_backup', None) diff --git a/azure-servicefabric/azure/servicefabric/models/disable_backup_description_py3.py b/azure-servicefabric/azure/servicefabric/models/disable_backup_description_py3.py new file mode 100644 index 000000000000..ad03ecd784e2 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/disable_backup_description_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DisableBackupDescription(Model): + """It describes the body parameters while disabling backup of a backup + entity(Application/Service/Partition). + + All required parameters must be populated in order to send to Azure. + + :param clean_backup: Required. Boolean flag to delete backups. It can be + set to true for deleting all the backups which were created for the backup + entity that is getting disabled for backup. + :type clean_backup: bool + """ + + _validation = { + 'clean_backup': {'required': True}, + } + + _attribute_map = { + 'clean_backup': {'key': 'CleanBackup', 'type': 'bool'}, + } + + def __init__(self, *, clean_backup: bool, **kwargs) -> None: + super(DisableBackupDescription, self).__init__(**kwargs) + self.clean_backup = clean_backup diff --git a/azure-servicefabric/azure/servicefabric/models/endpoint_ref.py b/azure-servicefabric/azure/servicefabric/models/endpoint_ref.py new file mode 100644 index 000000000000..3740b8c79fa7 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/endpoint_ref.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EndpointRef(Model): + """Describes a reference to a service endpoint. + + :param name: Name of the endpoint. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EndpointRef, self).__init__(**kwargs) + self.name = kwargs.get('name', None) diff --git a/azure-servicefabric/azure/servicefabric/models/endpoint_ref_py3.py b/azure-servicefabric/azure/servicefabric/models/endpoint_ref_py3.py new file mode 100644 index 000000000000..28999f87ccaa --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/endpoint_ref_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EndpointRef(Model): + """Describes a reference to a service endpoint. + + :param name: Name of the endpoint. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(EndpointRef, self).__init__(**kwargs) + self.name = name diff --git a/azure-servicefabric/azure/servicefabric/models/fabric_error_error.py b/azure-servicefabric/azure/servicefabric/models/fabric_error_error.py index 62007b111bb4..411a5a13797e 100644 --- a/azure-servicefabric/azure/servicefabric/models/fabric_error_error.py +++ b/azure-servicefabric/azure/servicefabric/models/fabric_error_error.py @@ -65,6 +65,7 @@ class FabricErrorError(Model): - "FABRIC_E_BACKUP_NOT_ENABLED" - "FABRIC_E_BACKUP_POLICY_NOT_EXISTING" - "FABRIC_E_FAULT_ANALYSIS_SERVICE_NOT_EXISTING" + - "FABRIC_E_IMAGEBUILDER_RESERVED_DIRECTORY_ERROR" - Possible values of the error code for HTTP status code 409 (Conflict) - "FABRIC_E_APPLICATION_TYPE_ALREADY_EXISTS" - "FABRIC_E_APPLICATION_ALREADY_EXISTS" @@ -171,7 +172,7 @@ class FabricErrorError(Model): 'FABRIC_E_SINGLE_INSTANCE_APPLICATION_ALREADY_EXISTS', 'FABRIC_E_SINGLE_INSTANCE_APPLICATION_NOT_FOUND', 'FABRIC_E_VOLUME_ALREADY_EXISTS', 'FABRIC_E_VOLUME_NOT_FOUND', - 'SerializationError' + 'SerializationError', 'FABRIC_E_IMAGEBUILDER_RESERVED_DIRECTORY_ERROR' :type code: str or ~azure.servicefabric.models.FabricErrorCodes :param message: Error message. :type message: str diff --git a/azure-servicefabric/azure/servicefabric/models/fabric_error_error_py3.py b/azure-servicefabric/azure/servicefabric/models/fabric_error_error_py3.py index 981f5bc7d10e..b7e9b89d1dcb 100644 --- a/azure-servicefabric/azure/servicefabric/models/fabric_error_error_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/fabric_error_error_py3.py @@ -65,6 +65,7 @@ class FabricErrorError(Model): - "FABRIC_E_BACKUP_NOT_ENABLED" - "FABRIC_E_BACKUP_POLICY_NOT_EXISTING" - "FABRIC_E_FAULT_ANALYSIS_SERVICE_NOT_EXISTING" + - "FABRIC_E_IMAGEBUILDER_RESERVED_DIRECTORY_ERROR" - Possible values of the error code for HTTP status code 409 (Conflict) - "FABRIC_E_APPLICATION_TYPE_ALREADY_EXISTS" - "FABRIC_E_APPLICATION_ALREADY_EXISTS" @@ -171,7 +172,7 @@ class FabricErrorError(Model): 'FABRIC_E_SINGLE_INSTANCE_APPLICATION_ALREADY_EXISTS', 'FABRIC_E_SINGLE_INSTANCE_APPLICATION_NOT_FOUND', 'FABRIC_E_VOLUME_ALREADY_EXISTS', 'FABRIC_E_VOLUME_NOT_FOUND', - 'SerializationError' + 'SerializationError', 'FABRIC_E_IMAGEBUILDER_RESERVED_DIRECTORY_ERROR' :type code: str or ~azure.servicefabric.models.FabricErrorCodes :param message: Error message. :type message: str diff --git a/azure-servicefabric/azure/servicefabric/models/fabric_event.py b/azure-servicefabric/azure/servicefabric/models/fabric_event.py index 02c065a4e892..6c15194c1f9b 100644 --- a/azure-servicefabric/azure/servicefabric/models/fabric_event.py +++ b/azure-servicefabric/azure/servicefabric/models/fabric_event.py @@ -24,6 +24,8 @@ class FabricEvent(Model): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -41,6 +43,7 @@ class FabricEvent(Model): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -53,6 +56,7 @@ class FabricEvent(Model): def __init__(self, **kwargs): super(FabricEvent, self).__init__(**kwargs) self.event_instance_id = kwargs.get('event_instance_id', None) + self.category = kwargs.get('category', None) self.time_stamp = kwargs.get('time_stamp', None) self.has_correlated_events = kwargs.get('has_correlated_events', None) self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/fabric_event_py3.py b/azure-servicefabric/azure/servicefabric/models/fabric_event_py3.py index 3d69ee6a26b2..118bce4243fc 100644 --- a/azure-servicefabric/azure/servicefabric/models/fabric_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/fabric_event_py3.py @@ -24,6 +24,8 @@ class FabricEvent(Model): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -41,6 +43,7 @@ class FabricEvent(Model): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -50,9 +53,10 @@ class FabricEvent(Model): 'kind': {'ApplicationEvent': 'ApplicationEvent', 'ClusterEvent': 'ClusterEvent', 'ContainerInstanceEvent': 'ContainerInstanceEvent', 'NodeEvent': 'NodeEvent', 'PartitionEvent': 'PartitionEvent', 'ReplicaEvent': 'ReplicaEvent', 'ServiceEvent': 'ServiceEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, has_correlated_events: bool=None, **kwargs) -> None: + def __init__(self, *, event_instance_id: str, time_stamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: super(FabricEvent, self).__init__(**kwargs) self.event_instance_id = event_instance_id + self.category = category self.time_stamp = time_stamp self.has_correlated_events = has_correlated_events self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/gateway_destination.py b/azure-servicefabric/azure/servicefabric/models/gateway_destination.py new file mode 100644 index 000000000000..5c2162747085 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/gateway_destination.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GatewayDestination(Model): + """Describes destination endpoint for routing traffic. + + All required parameters must be populated in order to send to Azure. + + :param application_name: Required. Name of the service fabric Mesh + application. + :type application_name: str + :param service_name: Required. service that contains the endpoint. + :type service_name: str + :param endpoint_name: Required. name of the endpoint in the service. + :type endpoint_name: str + """ + + _validation = { + 'application_name': {'required': True}, + 'service_name': {'required': True}, + 'endpoint_name': {'required': True}, + } + + _attribute_map = { + 'application_name': {'key': 'applicationName', 'type': 'str'}, + 'service_name': {'key': 'serviceName', 'type': 'str'}, + 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GatewayDestination, self).__init__(**kwargs) + self.application_name = kwargs.get('application_name', None) + self.service_name = kwargs.get('service_name', None) + self.endpoint_name = kwargs.get('endpoint_name', None) diff --git a/azure-servicefabric/azure/servicefabric/models/gateway_destination_py3.py b/azure-servicefabric/azure/servicefabric/models/gateway_destination_py3.py new file mode 100644 index 000000000000..7819e3ecd02f --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/gateway_destination_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GatewayDestination(Model): + """Describes destination endpoint for routing traffic. + + All required parameters must be populated in order to send to Azure. + + :param application_name: Required. Name of the service fabric Mesh + application. + :type application_name: str + :param service_name: Required. service that contains the endpoint. + :type service_name: str + :param endpoint_name: Required. name of the endpoint in the service. + :type endpoint_name: str + """ + + _validation = { + 'application_name': {'required': True}, + 'service_name': {'required': True}, + 'endpoint_name': {'required': True}, + } + + _attribute_map = { + 'application_name': {'key': 'applicationName', 'type': 'str'}, + 'service_name': {'key': 'serviceName', 'type': 'str'}, + 'endpoint_name': {'key': 'endpointName', 'type': 'str'}, + } + + def __init__(self, *, application_name: str, service_name: str, endpoint_name: str, **kwargs) -> None: + super(GatewayDestination, self).__init__(**kwargs) + self.application_name = application_name + self.service_name = service_name + self.endpoint_name = endpoint_name diff --git a/azure-servicefabric/azure/servicefabric/models/gateway_resource_description.py b/azure-servicefabric/azure/servicefabric/models/gateway_resource_description.py new file mode 100644 index 000000000000..ad9d4e3f6b7d --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/gateway_resource_description.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GatewayResourceDescription(Model): + """This type describes a gateway resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the Gateway resource. + :type name: str + :param description: User readable description of the gateway. + :type description: str + :param source_network: Required. Network the gateway should listen on for + requests. + :type source_network: ~azure.servicefabric.models.NetworkRef + :param destination_network: Required. Network that the Application is + using. + :type destination_network: ~azure.servicefabric.models.NetworkRef + :param tcp: Configuration for tcp connectivity for this gateway. + :type tcp: list[~azure.servicefabric.models.TcpConfig] + :param http: Configuration for http connectivity for this gateway. + :type http: list[~azure.servicefabric.models.HttpConfig] + :ivar status: Status of the resource. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the gateway. + :vartype status_details: str + :ivar ip_address: IP address of the gateway. This is populated in the + response and is ignored for incoming requests. + :vartype ip_address: str + """ + + _validation = { + 'name': {'required': True}, + 'source_network': {'required': True}, + 'destination_network': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'source_network': {'key': 'properties.sourceNetwork', 'type': 'NetworkRef'}, + 'destination_network': {'key': 'properties.destinationNetwork', 'type': 'NetworkRef'}, + 'tcp': {'key': 'properties.tcp', 'type': '[TcpConfig]'}, + 'http': {'key': 'properties.http', 'type': '[HttpConfig]'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, + 'ip_address': {'key': 'properties.ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GatewayResourceDescription, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.source_network = kwargs.get('source_network', None) + self.destination_network = kwargs.get('destination_network', None) + self.tcp = kwargs.get('tcp', None) + self.http = kwargs.get('http', None) + self.status = None + self.status_details = None + self.ip_address = None diff --git a/azure-servicefabric/azure/servicefabric/models/gateway_resource_description_py3.py b/azure-servicefabric/azure/servicefabric/models/gateway_resource_description_py3.py new file mode 100644 index 000000000000..ddc77adae335 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/gateway_resource_description_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GatewayResourceDescription(Model): + """This type describes a gateway resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the Gateway resource. + :type name: str + :param description: User readable description of the gateway. + :type description: str + :param source_network: Required. Network the gateway should listen on for + requests. + :type source_network: ~azure.servicefabric.models.NetworkRef + :param destination_network: Required. Network that the Application is + using. + :type destination_network: ~azure.servicefabric.models.NetworkRef + :param tcp: Configuration for tcp connectivity for this gateway. + :type tcp: list[~azure.servicefabric.models.TcpConfig] + :param http: Configuration for http connectivity for this gateway. + :type http: list[~azure.servicefabric.models.HttpConfig] + :ivar status: Status of the resource. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the gateway. + :vartype status_details: str + :ivar ip_address: IP address of the gateway. This is populated in the + response and is ignored for incoming requests. + :vartype ip_address: str + """ + + _validation = { + 'name': {'required': True}, + 'source_network': {'required': True}, + 'destination_network': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'source_network': {'key': 'properties.sourceNetwork', 'type': 'NetworkRef'}, + 'destination_network': {'key': 'properties.destinationNetwork', 'type': 'NetworkRef'}, + 'tcp': {'key': 'properties.tcp', 'type': '[TcpConfig]'}, + 'http': {'key': 'properties.http', 'type': '[HttpConfig]'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, + 'ip_address': {'key': 'properties.ipAddress', 'type': 'str'}, + } + + def __init__(self, *, name: str, source_network, destination_network, description: str=None, tcp=None, http=None, **kwargs) -> None: + super(GatewayResourceDescription, self).__init__(**kwargs) + self.name = name + self.description = description + self.source_network = source_network + self.destination_network = destination_network + self.tcp = tcp + self.http = http + self.status = None + self.status_details = None + self.ip_address = None diff --git a/azure-servicefabric/azure/servicefabric/models/health_event.py b/azure-servicefabric/azure/servicefabric/models/health_event.py index cc865612cc31..43dd1bbd395e 100644 --- a/azure-servicefabric/azure/servicefabric/models/health_event.py +++ b/azure-servicefabric/azure/servicefabric/models/health_event.py @@ -78,7 +78,7 @@ class HealthEvent(HealthInformation): of this property is false by default. When clients report periodically, they should set RemoveWhenExpired false (default). - This way, is the reporter has issues (eg. deadlock) and can't report, the + This way, if the reporter has issues (e.g. deadlock) and can't report, the entity is evaluated at error when the health report expires. This flags the entity as being in Error health state. :type remove_when_expired: bool diff --git a/azure-servicefabric/azure/servicefabric/models/health_event_py3.py b/azure-servicefabric/azure/servicefabric/models/health_event_py3.py index 5ea857d34c74..675a74a98563 100644 --- a/azure-servicefabric/azure/servicefabric/models/health_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/health_event_py3.py @@ -78,7 +78,7 @@ class HealthEvent(HealthInformation): of this property is false by default. When clients report periodically, they should set RemoveWhenExpired false (default). - This way, is the reporter has issues (eg. deadlock) and can't report, the + This way, if the reporter has issues (e.g. deadlock) and can't report, the entity is evaluated at error when the health report expires. This flags the entity as being in Error health state. :type remove_when_expired: bool diff --git a/azure-servicefabric/azure/servicefabric/models/health_information.py b/azure-servicefabric/azure/servicefabric/models/health_information.py index 86501bb5b65c..133086a1b639 100644 --- a/azure-servicefabric/azure/servicefabric/models/health_information.py +++ b/azure-servicefabric/azure/servicefabric/models/health_information.py @@ -79,7 +79,7 @@ class HealthInformation(Model): of this property is false by default. When clients report periodically, they should set RemoveWhenExpired false (default). - This way, is the reporter has issues (eg. deadlock) and can't report, the + This way, if the reporter has issues (e.g. deadlock) and can't report, the entity is evaluated at error when the health report expires. This flags the entity as being in Error health state. :type remove_when_expired: bool diff --git a/azure-servicefabric/azure/servicefabric/models/health_information_py3.py b/azure-servicefabric/azure/servicefabric/models/health_information_py3.py index 1c7878e09687..070bc398e32e 100644 --- a/azure-servicefabric/azure/servicefabric/models/health_information_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/health_information_py3.py @@ -79,7 +79,7 @@ class HealthInformation(Model): of this property is false by default. When clients report periodically, they should set RemoveWhenExpired false (default). - This way, is the reporter has issues (eg. deadlock) and can't report, the + This way, if the reporter has issues (e.g. deadlock) and can't report, the entity is evaluated at error when the health report expires. This flags the entity as being in Error health state. :type remove_when_expired: bool diff --git a/azure-servicefabric/azure/servicefabric/models/http_config.py b/azure-servicefabric/azure/servicefabric/models/http_config.py new file mode 100644 index 000000000000..d52565f3bc8b --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_config.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpConfig(Model): + """Describes the http configuration for external connectivity for this + network. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. http gateway config name. + :type name: str + :param port: Required. Specifies the port at which the service endpoint + below needs to be exposed. + :type port: int + :param hosts: Required. description for routing. + :type hosts: list[~azure.servicefabric.models.HttpHostConfig] + """ + + _validation = { + 'name': {'required': True}, + 'port': {'required': True}, + 'hosts': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'hosts': {'key': 'hosts', 'type': '[HttpHostConfig]'}, + } + + def __init__(self, **kwargs): + super(HttpConfig, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.port = kwargs.get('port', None) + self.hosts = kwargs.get('hosts', None) diff --git a/azure-servicefabric/azure/servicefabric/models/http_config_py3.py b/azure-servicefabric/azure/servicefabric/models/http_config_py3.py new file mode 100644 index 000000000000..3f353d1630e2 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_config_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpConfig(Model): + """Describes the http configuration for external connectivity for this + network. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. http gateway config name. + :type name: str + :param port: Required. Specifies the port at which the service endpoint + below needs to be exposed. + :type port: int + :param hosts: Required. description for routing. + :type hosts: list[~azure.servicefabric.models.HttpHostConfig] + """ + + _validation = { + 'name': {'required': True}, + 'port': {'required': True}, + 'hosts': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'hosts': {'key': 'hosts', 'type': '[HttpHostConfig]'}, + } + + def __init__(self, *, name: str, port: int, hosts, **kwargs) -> None: + super(HttpConfig, self).__init__(**kwargs) + self.name = name + self.port = port + self.hosts = hosts diff --git a/azure-servicefabric/azure/servicefabric/models/http_host_config.py b/azure-servicefabric/azure/servicefabric/models/http_host_config.py new file mode 100644 index 000000000000..8b090774d835 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_host_config.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpHostConfig(Model): + """Describes the hostname properties for http routing. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. http hostname config name. + :type name: str + :param routes: Required. Route information to use for routing. Routes are + processed in the order they are specified. Specify routes that are more + specific before routes that can hamdle general cases. + :type routes: list[~azure.servicefabric.models.HttpRouteConfig] + """ + + _validation = { + 'name': {'required': True}, + 'routes': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'routes': {'key': 'routes', 'type': '[HttpRouteConfig]'}, + } + + def __init__(self, **kwargs): + super(HttpHostConfig, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.routes = kwargs.get('routes', None) diff --git a/azure-servicefabric/azure/servicefabric/models/http_host_config_py3.py b/azure-servicefabric/azure/servicefabric/models/http_host_config_py3.py new file mode 100644 index 000000000000..7e266b729dce --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_host_config_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpHostConfig(Model): + """Describes the hostname properties for http routing. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. http hostname config name. + :type name: str + :param routes: Required. Route information to use for routing. Routes are + processed in the order they are specified. Specify routes that are more + specific before routes that can hamdle general cases. + :type routes: list[~azure.servicefabric.models.HttpRouteConfig] + """ + + _validation = { + 'name': {'required': True}, + 'routes': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'routes': {'key': 'routes', 'type': '[HttpRouteConfig]'}, + } + + def __init__(self, *, name: str, routes, **kwargs) -> None: + super(HttpHostConfig, self).__init__(**kwargs) + self.name = name + self.routes = routes diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_config.py b/azure-servicefabric/azure/servicefabric/models/http_route_config.py new file mode 100644 index 000000000000..f21ddccd9e46 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_config.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteConfig(Model): + """Describes the hostname properties for http routing. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. http route name. + :type name: str + :param match: Required. Describes a rule for http route matching. + :type match: ~azure.servicefabric.models.HttpRouteMatchRule + :param destination: Required. Describes destination endpoint for routing + traffic. + :type destination: ~azure.servicefabric.models.GatewayDestination + """ + + _validation = { + 'name': {'required': True}, + 'match': {'required': True}, + 'destination': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'match': {'key': 'match', 'type': 'HttpRouteMatchRule'}, + 'destination': {'key': 'destination', 'type': 'GatewayDestination'}, + } + + def __init__(self, **kwargs): + super(HttpRouteConfig, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.match = kwargs.get('match', None) + self.destination = kwargs.get('destination', None) diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_config_py3.py b/azure-servicefabric/azure/servicefabric/models/http_route_config_py3.py new file mode 100644 index 000000000000..442ffb827f4f --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_config_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteConfig(Model): + """Describes the hostname properties for http routing. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. http route name. + :type name: str + :param match: Required. Describes a rule for http route matching. + :type match: ~azure.servicefabric.models.HttpRouteMatchRule + :param destination: Required. Describes destination endpoint for routing + traffic. + :type destination: ~azure.servicefabric.models.GatewayDestination + """ + + _validation = { + 'name': {'required': True}, + 'match': {'required': True}, + 'destination': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'match': {'key': 'match', 'type': 'HttpRouteMatchRule'}, + 'destination': {'key': 'destination', 'type': 'GatewayDestination'}, + } + + def __init__(self, *, name: str, match, destination, **kwargs) -> None: + super(HttpRouteConfig, self).__init__(**kwargs) + self.name = name + self.match = match + self.destination = destination diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_match_header.py b/azure-servicefabric/azure/servicefabric/models/http_route_match_header.py new file mode 100644 index 000000000000..54fbdd6069f5 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_match_header.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteMatchHeader(Model): + """Describes header information for http route matching. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of header to match in request. + :type name: str + :param value: Value of header to match in request. + :type value: str + :param type: how to match header value. Possible values include: 'exact' + :type type: str or ~azure.servicefabric.models.HeaderMatchType + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(HttpRouteMatchHeader, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + self.type = kwargs.get('type', None) diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_match_header_py3.py b/azure-servicefabric/azure/servicefabric/models/http_route_match_header_py3.py new file mode 100644 index 000000000000..ffb10df957f8 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_match_header_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteMatchHeader(Model): + """Describes header information for http route matching. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of header to match in request. + :type name: str + :param value: Value of header to match in request. + :type value: str + :param type: how to match header value. Possible values include: 'exact' + :type type: str or ~azure.servicefabric.models.HeaderMatchType + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, name: str, value: str=None, type=None, **kwargs) -> None: + super(HttpRouteMatchHeader, self).__init__(**kwargs) + self.name = name + self.value = value + self.type = type diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_match_path.py b/azure-servicefabric/azure/servicefabric/models/http_route_match_path.py new file mode 100644 index 000000000000..fa7b45f88cd7 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_match_path.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteMatchPath(Model): + """Path to match for routing. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. Uri path to match for request. + :type value: str + :param rewrite: replacement string for matched part of the Uri. + :type rewrite: str + :ivar type: Required. how to match value in the Uri. Default value: + "prefix" . + :vartype type: str + """ + + _validation = { + 'value': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'rewrite': {'key': 'rewrite', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + type = "prefix" + + def __init__(self, **kwargs): + super(HttpRouteMatchPath, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.rewrite = kwargs.get('rewrite', None) diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_match_path_py3.py b/azure-servicefabric/azure/servicefabric/models/http_route_match_path_py3.py new file mode 100644 index 000000000000..075b226e6ad7 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_match_path_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteMatchPath(Model): + """Path to match for routing. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. Uri path to match for request. + :type value: str + :param rewrite: replacement string for matched part of the Uri. + :type rewrite: str + :ivar type: Required. how to match value in the Uri. Default value: + "prefix" . + :vartype type: str + """ + + _validation = { + 'value': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'rewrite': {'key': 'rewrite', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + type = "prefix" + + def __init__(self, *, value: str, rewrite: str=None, **kwargs) -> None: + super(HttpRouteMatchPath, self).__init__(**kwargs) + self.value = value + self.rewrite = rewrite diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_match_rule.py b/azure-servicefabric/azure/servicefabric/models/http_route_match_rule.py new file mode 100644 index 000000000000..d09c2ea20987 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_match_rule.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteMatchRule(Model): + """Describes a rule for http route matching. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. Path to match for routing. + :type path: ~azure.servicefabric.models.HttpRouteMatchPath + :param headers: headers and their values to match in request. + :type headers: list[~azure.servicefabric.models.HttpRouteMatchHeader] + """ + + _validation = { + 'path': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'HttpRouteMatchPath'}, + 'headers': {'key': 'headers', 'type': '[HttpRouteMatchHeader]'}, + } + + def __init__(self, **kwargs): + super(HttpRouteMatchRule, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.headers = kwargs.get('headers', None) diff --git a/azure-servicefabric/azure/servicefabric/models/http_route_match_rule_py3.py b/azure-servicefabric/azure/servicefabric/models/http_route_match_rule_py3.py new file mode 100644 index 000000000000..61561bf4ad8b --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/http_route_match_rule_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class HttpRouteMatchRule(Model): + """Describes a rule for http route matching. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. Path to match for routing. + :type path: ~azure.servicefabric.models.HttpRouteMatchPath + :param headers: headers and their values to match in request. + :type headers: list[~azure.servicefabric.models.HttpRouteMatchHeader] + """ + + _validation = { + 'path': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'HttpRouteMatchPath'}, + 'headers': {'key': 'headers', 'type': '[HttpRouteMatchHeader]'}, + } + + def __init__(self, *, path, headers=None, **kwargs) -> None: + super(HttpRouteMatchRule, self).__init__(**kwargs) + self.path = path + self.headers = headers diff --git a/azure-servicefabric/azure/servicefabric/models/image_registry_credential.py b/azure-servicefabric/azure/servicefabric/models/image_registry_credential.py index 4a31577dad55..95aac323f274 100644 --- a/azure-servicefabric/azure/servicefabric/models/image_registry_credential.py +++ b/azure-servicefabric/azure/servicefabric/models/image_registry_credential.py @@ -22,7 +22,9 @@ class ImageRegistryCredential(Model): :type server: str :param username: Required. The username for the private registry. :type username: str - :param password: The password for the private registry. + :param password: The password for the private registry. The password is + required for create or update operations, however it is not returned in + the get or list operations. :type password: str """ diff --git a/azure-servicefabric/azure/servicefabric/models/image_registry_credential_py3.py b/azure-servicefabric/azure/servicefabric/models/image_registry_credential_py3.py index d189db0f0041..8d60c62a43ee 100644 --- a/azure-servicefabric/azure/servicefabric/models/image_registry_credential_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/image_registry_credential_py3.py @@ -22,7 +22,9 @@ class ImageRegistryCredential(Model): :type server: str :param username: Required. The username for the private registry. :type username: str - :param password: The password for the private registry. + :param password: The password for the private registry. The password is + required for create or update operations, however it is not returned in + the get or list operations. :type password: str """ diff --git a/azure-servicefabric/azure/servicefabric/models/inlined_value_secret_resource_properties.py b/azure-servicefabric/azure/servicefabric/models/inlined_value_secret_resource_properties.py new file mode 100644 index 000000000000..03ec2fb60bea --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/inlined_value_secret_resource_properties.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_resource_properties import SecretResourceProperties + + +class InlinedValueSecretResourceProperties(SecretResourceProperties): + """Describes the properties of a secret resource whose value is provided + explicitly as plaintext. The secret resource may have multiple values, each + being uniquely versioned. The secret value of each version is stored + encrypted, and delivered as plaintext into the context of applications + referencing it. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the secret. + :type description: str + :ivar status: Status of the resource. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the secret. + :vartype status_details: str + :param content_type: The type of the content stored in the secret value. + The value of this property is opaque to Service Fabric. Once set, the + value of this property cannot be changed. + :type content_type: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'content_type': {'key': 'contentType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(InlinedValueSecretResourceProperties, self).__init__(**kwargs) + self.kind = 'inlinedValue' diff --git a/azure-servicefabric/azure/servicefabric/models/inlined_value_secret_resource_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/inlined_value_secret_resource_properties_py3.py new file mode 100644 index 000000000000..ab7b72136b84 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/inlined_value_secret_resource_properties_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_resource_properties_py3 import SecretResourceProperties + + +class InlinedValueSecretResourceProperties(SecretResourceProperties): + """Describes the properties of a secret resource whose value is provided + explicitly as plaintext. The secret resource may have multiple values, each + being uniquely versioned. The secret value of each version is stored + encrypted, and delivered as plaintext into the context of applications + referencing it. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the secret. + :type description: str + :ivar status: Status of the resource. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the secret. + :vartype status_details: str + :param content_type: The type of the content stored in the secret value. + The value of this property is opaque to Service Fabric. Once set, the + value of this property cannot be changed. + :type content_type: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'content_type': {'key': 'contentType', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, content_type: str=None, **kwargs) -> None: + super(InlinedValueSecretResourceProperties, self).__init__(description=description, content_type=content_type, **kwargs) + self.kind = 'inlinedValue' diff --git a/azure-servicefabric/azure/servicefabric/models/load_metric_report.py b/azure-servicefabric/azure/servicefabric/models/load_metric_report.py index 2f061fd5cc8b..2e7e321ce3bb 100644 --- a/azure-servicefabric/azure/servicefabric/models/load_metric_report.py +++ b/azure-servicefabric/azure/servicefabric/models/load_metric_report.py @@ -20,14 +20,18 @@ class LoadMetricReport(Model): :type last_reported_utc: datetime :param name: The name of the load metric. :type name: str - :param value: The value of the load metric. + :param value: The value of the load metric. In future releases of Service + Fabric this parameter will be deprecated in favor of CurrentValue. :type value: str + :param current_value: The value of the load metric. + :type current_value: str """ _attribute_map = { 'last_reported_utc': {'key': 'LastReportedUtc', 'type': 'iso-8601'}, 'name': {'key': 'Name', 'type': 'str'}, 'value': {'key': 'Value', 'type': 'str'}, + 'current_value': {'key': 'CurrentValue', 'type': 'str'}, } def __init__(self, **kwargs): @@ -35,3 +39,4 @@ def __init__(self, **kwargs): self.last_reported_utc = kwargs.get('last_reported_utc', None) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) + self.current_value = kwargs.get('current_value', None) diff --git a/azure-servicefabric/azure/servicefabric/models/load_metric_report_info.py b/azure-servicefabric/azure/servicefabric/models/load_metric_report_info.py index a7c9c56764e5..93fd4a457be7 100644 --- a/azure-servicefabric/azure/servicefabric/models/load_metric_report_info.py +++ b/azure-servicefabric/azure/servicefabric/models/load_metric_report_info.py @@ -17,8 +17,11 @@ class LoadMetricReportInfo(Model): :param name: The name of the metric. :type name: str - :param value: The value of the load for the metric.. + :param value: The value of the load for the metric. In future releases of + Service Fabric this parameter will be deprecated in favor of CurrentValue. :type value: int + :param current_value: The double value of the load for the metric. + :type current_value: str :param last_reported_utc: The UTC time when the load is reported. :type last_reported_utc: datetime """ @@ -26,6 +29,7 @@ class LoadMetricReportInfo(Model): _attribute_map = { 'name': {'key': 'Name', 'type': 'str'}, 'value': {'key': 'Value', 'type': 'int'}, + 'current_value': {'key': 'CurrentValue', 'type': 'str'}, 'last_reported_utc': {'key': 'LastReportedUtc', 'type': 'iso-8601'}, } @@ -33,4 +37,5 @@ def __init__(self, **kwargs): super(LoadMetricReportInfo, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.value = kwargs.get('value', None) + self.current_value = kwargs.get('current_value', None) self.last_reported_utc = kwargs.get('last_reported_utc', None) diff --git a/azure-servicefabric/azure/servicefabric/models/load_metric_report_info_py3.py b/azure-servicefabric/azure/servicefabric/models/load_metric_report_info_py3.py index 965f30c28231..2401392951ac 100644 --- a/azure-servicefabric/azure/servicefabric/models/load_metric_report_info_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/load_metric_report_info_py3.py @@ -17,8 +17,11 @@ class LoadMetricReportInfo(Model): :param name: The name of the metric. :type name: str - :param value: The value of the load for the metric.. + :param value: The value of the load for the metric. In future releases of + Service Fabric this parameter will be deprecated in favor of CurrentValue. :type value: int + :param current_value: The double value of the load for the metric. + :type current_value: str :param last_reported_utc: The UTC time when the load is reported. :type last_reported_utc: datetime """ @@ -26,11 +29,13 @@ class LoadMetricReportInfo(Model): _attribute_map = { 'name': {'key': 'Name', 'type': 'str'}, 'value': {'key': 'Value', 'type': 'int'}, + 'current_value': {'key': 'CurrentValue', 'type': 'str'}, 'last_reported_utc': {'key': 'LastReportedUtc', 'type': 'iso-8601'}, } - def __init__(self, *, name: str=None, value: int=None, last_reported_utc=None, **kwargs) -> None: + def __init__(self, *, name: str=None, value: int=None, current_value: str=None, last_reported_utc=None, **kwargs) -> None: super(LoadMetricReportInfo, self).__init__(**kwargs) self.name = name self.value = value + self.current_value = current_value self.last_reported_utc = last_reported_utc diff --git a/azure-servicefabric/azure/servicefabric/models/load_metric_report_py3.py b/azure-servicefabric/azure/servicefabric/models/load_metric_report_py3.py index c1b5ef538451..e8aec5b59bcc 100644 --- a/azure-servicefabric/azure/servicefabric/models/load_metric_report_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/load_metric_report_py3.py @@ -20,18 +20,23 @@ class LoadMetricReport(Model): :type last_reported_utc: datetime :param name: The name of the load metric. :type name: str - :param value: The value of the load metric. + :param value: The value of the load metric. In future releases of Service + Fabric this parameter will be deprecated in favor of CurrentValue. :type value: str + :param current_value: The value of the load metric. + :type current_value: str """ _attribute_map = { 'last_reported_utc': {'key': 'LastReportedUtc', 'type': 'iso-8601'}, 'name': {'key': 'Name', 'type': 'str'}, 'value': {'key': 'Value', 'type': 'str'}, + 'current_value': {'key': 'CurrentValue', 'type': 'str'}, } - def __init__(self, *, last_reported_utc=None, name: str=None, value: str=None, **kwargs) -> None: + def __init__(self, *, last_reported_utc=None, name: str=None, value: str=None, current_value: str=None, **kwargs) -> None: super(LoadMetricReport, self).__init__(**kwargs) self.last_reported_utc = last_reported_utc self.name = name self.value = value + self.current_value = current_value diff --git a/azure-servicefabric/azure/servicefabric/models/local_network_resource_properties.py b/azure-servicefabric/azure/servicefabric/models/local_network_resource_properties.py new file mode 100644 index 000000000000..98eddf5dbeed --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/local_network_resource_properties.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .network_resource_properties import NetworkResourceProperties + + +class LocalNetworkResourceProperties(NetworkResourceProperties): + """Information about a Service Fabric container network local to a single + Service Fabric cluster. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the network. + :type description: str + :ivar status: Status of the network. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the network. + :vartype status_details: str + :param network_address_prefix: Address space for the local container + network. + :type network_address_prefix: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'network_address_prefix': {'key': 'networkAddressPrefix', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LocalNetworkResourceProperties, self).__init__(**kwargs) + self.network_address_prefix = kwargs.get('network_address_prefix', None) + self.kind = 'Local' diff --git a/azure-servicefabric/azure/servicefabric/models/local_network_resource_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/local_network_resource_properties_py3.py new file mode 100644 index 000000000000..6578e59b2c3f --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/local_network_resource_properties_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .network_resource_properties_py3 import NetworkResourceProperties + + +class LocalNetworkResourceProperties(NetworkResourceProperties): + """Information about a Service Fabric container network local to a single + Service Fabric cluster. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the network. + :type description: str + :ivar status: Status of the network. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the network. + :vartype status_details: str + :param network_address_prefix: Address space for the local container + network. + :type network_address_prefix: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'network_address_prefix': {'key': 'networkAddressPrefix', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, network_address_prefix: str=None, **kwargs) -> None: + super(LocalNetworkResourceProperties, self).__init__(description=description, **kwargs) + self.network_address_prefix = network_address_prefix + self.kind = 'Local' diff --git a/azure-servicefabric/azure/servicefabric/models/network_ref.py b/azure-servicefabric/azure/servicefabric/models/network_ref.py index b86c10253c0f..7c811e73629e 100644 --- a/azure-servicefabric/azure/servicefabric/models/network_ref.py +++ b/azure-servicefabric/azure/servicefabric/models/network_ref.py @@ -15,14 +15,19 @@ class NetworkRef(Model): """Describes a network reference in a service. - :param name: Name of the network. + :param name: Name of the network :type name: str + :param endpoint_refs: A list of endpoints that are exposed on this + network. + :type endpoint_refs: list[~azure.servicefabric.models.EndpointRef] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, + 'endpoint_refs': {'key': 'endpointRefs', 'type': '[EndpointRef]'}, } def __init__(self, **kwargs): super(NetworkRef, self).__init__(**kwargs) self.name = kwargs.get('name', None) + self.endpoint_refs = kwargs.get('endpoint_refs', None) diff --git a/azure-servicefabric/azure/servicefabric/models/network_ref_py3.py b/azure-servicefabric/azure/servicefabric/models/network_ref_py3.py index 45e5d333f747..9cb21573bccc 100644 --- a/azure-servicefabric/azure/servicefabric/models/network_ref_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/network_ref_py3.py @@ -15,14 +15,19 @@ class NetworkRef(Model): """Describes a network reference in a service. - :param name: Name of the network. + :param name: Name of the network :type name: str + :param endpoint_refs: A list of endpoints that are exposed on this + network. + :type endpoint_refs: list[~azure.servicefabric.models.EndpointRef] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, + 'endpoint_refs': {'key': 'endpointRefs', 'type': '[EndpointRef]'}, } - def __init__(self, *, name: str=None, **kwargs) -> None: + def __init__(self, *, name: str=None, endpoint_refs=None, **kwargs) -> None: super(NetworkRef, self).__init__(**kwargs) self.name = name + self.endpoint_refs = endpoint_refs diff --git a/azure-servicefabric/azure/servicefabric/models/network_resource_description.py b/azure-servicefabric/azure/servicefabric/models/network_resource_description.py new file mode 100644 index 000000000000..56a03e76e1ae --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/network_resource_description.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetworkResourceDescription(Model): + """This type describes a network resource. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the Network resource. + :type name: str + :param properties: Required. Describes properties of a network resource. + :type properties: ~azure.servicefabric.models.NetworkResourceProperties + """ + + _validation = { + 'name': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'NetworkResourceProperties'}, + } + + def __init__(self, **kwargs): + super(NetworkResourceDescription, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.properties = kwargs.get('properties', None) diff --git a/azure-servicefabric/azure/servicefabric/models/network_resource_description_py3.py b/azure-servicefabric/azure/servicefabric/models/network_resource_description_py3.py new file mode 100644 index 000000000000..c812b39c47aa --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/network_resource_description_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetworkResourceDescription(Model): + """This type describes a network resource. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the Network resource. + :type name: str + :param properties: Required. Describes properties of a network resource. + :type properties: ~azure.servicefabric.models.NetworkResourceProperties + """ + + _validation = { + 'name': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'NetworkResourceProperties'}, + } + + def __init__(self, *, name: str, properties, **kwargs) -> None: + super(NetworkResourceDescription, self).__init__(**kwargs) + self.name = name + self.properties = properties diff --git a/azure-servicefabric/azure/servicefabric/models/network_resource_properties.py b/azure-servicefabric/azure/servicefabric/models/network_resource_properties.py new file mode 100644 index 000000000000..bca9a90f94fc --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/network_resource_properties.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .network_resource_properties_base import NetworkResourcePropertiesBase + + +class NetworkResourceProperties(NetworkResourcePropertiesBase): + """Describes properties of a network resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LocalNetworkResourceProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the network. + :type description: str + :ivar status: Status of the network. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the network. + :vartype status_details: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'Local': 'LocalNetworkResourceProperties'} + } + + def __init__(self, **kwargs): + super(NetworkResourceProperties, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.status = None + self.status_details = None + self.kind = 'NetworkResourceProperties' diff --git a/azure-servicefabric/azure/servicefabric/models/network_resource_properties_base.py b/azure-servicefabric/azure/servicefabric/models/network_resource_properties_base.py new file mode 100644 index 000000000000..d0ecd3ae3f00 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/network_resource_properties_base.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetworkResourcePropertiesBase(Model): + """This type describes the properties of a network resource, including its + kind. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: NetworkResourceProperties + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'NetworkResourceProperties': 'NetworkResourceProperties'} + } + + def __init__(self, **kwargs): + super(NetworkResourcePropertiesBase, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/network_resource_properties_base_py3.py b/azure-servicefabric/azure/servicefabric/models/network_resource_properties_base_py3.py new file mode 100644 index 000000000000..6410aae6145c --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/network_resource_properties_base_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class NetworkResourcePropertiesBase(Model): + """This type describes the properties of a network resource, including its + kind. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: NetworkResourceProperties + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'NetworkResourceProperties': 'NetworkResourceProperties'} + } + + def __init__(self, **kwargs) -> None: + super(NetworkResourcePropertiesBase, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/network_resource_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/network_resource_properties_py3.py new file mode 100644 index 000000000000..0fce38298c8a --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/network_resource_properties_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .network_resource_properties_base_py3 import NetworkResourcePropertiesBase + + +class NetworkResourceProperties(NetworkResourcePropertiesBase): + """Describes properties of a network resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LocalNetworkResourceProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the network. + :type description: str + :ivar status: Status of the network. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the network. + :vartype status_details: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'Local': 'LocalNetworkResourceProperties'} + } + + def __init__(self, *, description: str=None, **kwargs) -> None: + super(NetworkResourceProperties, self).__init__(**kwargs) + self.description = description + self.status = None + self.status_details = None + self.kind = 'NetworkResourceProperties' diff --git a/azure-servicefabric/azure/servicefabric/models/node_aborted_event.py b/azure-servicefabric/azure/servicefabric/models/node_aborted_event.py index e55742ce2c28..744a8feee1de 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_aborted_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_aborted_event.py @@ -20,6 +20,8 @@ class NodeAbortedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -64,6 +66,7 @@ class NodeAbortedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/node_aborted_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_aborted_event_py3.py index f38a98f53035..9b51fbd20c5e 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_aborted_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_aborted_event_py3.py @@ -20,6 +20,8 @@ class NodeAbortedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -64,6 +66,7 @@ class NodeAbortedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -78,8 +81,8 @@ class NodeAbortedEvent(NodeEvent): 'node_version': {'key': 'NodeVersion', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeAbortedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeAbortedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance = node_instance self.node_id = node_id self.upgrade_domain = upgrade_domain diff --git a/azure-servicefabric/azure/servicefabric/models/node_aborting_event.py b/azure-servicefabric/azure/servicefabric/models/node_aborting_event.py deleted file mode 100644 index 6e39ee04d8ff..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/node_aborting_event.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event import NodeEvent - - -class NodeAbortingEvent(NodeEvent): - """Node Aborting event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance: Required. Id of Node instance. - :type node_instance: long - :param node_id: Required. Id of Node. - :type node_id: str - :param upgrade_domain: Required. Upgrade domain of Node. - :type upgrade_domain: str - :param fault_domain: Required. Fault domain of Node. - :type fault_domain: str - :param ip_address_or_fqdn: Required. IP address or FQDN. - :type ip_address_or_fqdn: str - :param hostname: Required. Name of Host. - :type hostname: str - :param is_seed_node: Required. Indicates if it is seed node. - :type is_seed_node: bool - :param node_version: Required. Version of Node. - :type node_version: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance': {'required': True}, - 'node_id': {'required': True}, - 'upgrade_domain': {'required': True}, - 'fault_domain': {'required': True}, - 'ip_address_or_fqdn': {'required': True}, - 'hostname': {'required': True}, - 'is_seed_node': {'required': True}, - 'node_version': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, - 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'upgrade_domain': {'key': 'UpgradeDomain', 'type': 'str'}, - 'fault_domain': {'key': 'FaultDomain', 'type': 'str'}, - 'ip_address_or_fqdn': {'key': 'IpAddressOrFQDN', 'type': 'str'}, - 'hostname': {'key': 'Hostname', 'type': 'str'}, - 'is_seed_node': {'key': 'IsSeedNode', 'type': 'bool'}, - 'node_version': {'key': 'NodeVersion', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(NodeAbortingEvent, self).__init__(**kwargs) - self.node_instance = kwargs.get('node_instance', None) - self.node_id = kwargs.get('node_id', None) - self.upgrade_domain = kwargs.get('upgrade_domain', None) - self.fault_domain = kwargs.get('fault_domain', None) - self.ip_address_or_fqdn = kwargs.get('ip_address_or_fqdn', None) - self.hostname = kwargs.get('hostname', None) - self.is_seed_node = kwargs.get('is_seed_node', None) - self.node_version = kwargs.get('node_version', None) - self.kind = 'NodeAborting' diff --git a/azure-servicefabric/azure/servicefabric/models/node_aborting_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_aborting_event_py3.py deleted file mode 100644 index 6fe81b78e2d6..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/node_aborting_event_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event_py3 import NodeEvent - - -class NodeAbortingEvent(NodeEvent): - """Node Aborting event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance: Required. Id of Node instance. - :type node_instance: long - :param node_id: Required. Id of Node. - :type node_id: str - :param upgrade_domain: Required. Upgrade domain of Node. - :type upgrade_domain: str - :param fault_domain: Required. Fault domain of Node. - :type fault_domain: str - :param ip_address_or_fqdn: Required. IP address or FQDN. - :type ip_address_or_fqdn: str - :param hostname: Required. Name of Host. - :type hostname: str - :param is_seed_node: Required. Indicates if it is seed node. - :type is_seed_node: bool - :param node_version: Required. Version of Node. - :type node_version: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance': {'required': True}, - 'node_id': {'required': True}, - 'upgrade_domain': {'required': True}, - 'fault_domain': {'required': True}, - 'ip_address_or_fqdn': {'required': True}, - 'hostname': {'required': True}, - 'is_seed_node': {'required': True}, - 'node_version': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, - 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'upgrade_domain': {'key': 'UpgradeDomain', 'type': 'str'}, - 'fault_domain': {'key': 'FaultDomain', 'type': 'str'}, - 'ip_address_or_fqdn': {'key': 'IpAddressOrFQDN', 'type': 'str'}, - 'hostname': {'key': 'Hostname', 'type': 'str'}, - 'is_seed_node': {'key': 'IsSeedNode', 'type': 'bool'}, - 'node_version': {'key': 'NodeVersion', 'type': 'str'}, - } - - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeAbortingEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) - self.node_instance = node_instance - self.node_id = node_id - self.upgrade_domain = upgrade_domain - self.fault_domain = fault_domain - self.ip_address_or_fqdn = ip_address_or_fqdn - self.hostname = hostname - self.is_seed_node = is_seed_node - self.node_version = node_version - self.kind = 'NodeAborting' diff --git a/azure-servicefabric/azure/servicefabric/models/node_added_event.py b/azure-servicefabric/azure/servicefabric/models/node_added_to_cluster_event.py similarity index 92% rename from azure-servicefabric/azure/servicefabric/models/node_added_event.py rename to azure-servicefabric/azure/servicefabric/models/node_added_to_cluster_event.py index be138ba7b7e7..3c55e7b217d4 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_added_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_added_to_cluster_event.py @@ -12,7 +12,7 @@ from .node_event import NodeEvent -class NodeAddedEvent(NodeEvent): +class NodeAddedToClusterEvent(NodeEvent): """Node Added event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class NodeAddedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -58,6 +60,7 @@ class NodeAddedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -71,11 +74,11 @@ class NodeAddedEvent(NodeEvent): } def __init__(self, **kwargs): - super(NodeAddedEvent, self).__init__(**kwargs) + super(NodeAddedToClusterEvent, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.node_instance = kwargs.get('node_instance', None) self.node_type = kwargs.get('node_type', None) self.fabric_version = kwargs.get('fabric_version', None) self.ip_address_or_fqdn = kwargs.get('ip_address_or_fqdn', None) self.node_capacities = kwargs.get('node_capacities', None) - self.kind = 'NodeAdded' + self.kind = 'NodeAddedToCluster' diff --git a/azure-servicefabric/azure/servicefabric/models/node_added_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_added_to_cluster_event_py3.py similarity index 86% rename from azure-servicefabric/azure/servicefabric/models/node_added_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/node_added_to_cluster_event_py3.py index 8813b0cca696..617803762beb 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_added_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_added_to_cluster_event_py3.py @@ -12,7 +12,7 @@ from .node_event_py3 import NodeEvent -class NodeAddedEvent(NodeEvent): +class NodeAddedToClusterEvent(NodeEvent): """Node Added event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class NodeAddedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -58,6 +60,7 @@ class NodeAddedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -70,12 +73,12 @@ class NodeAddedEvent(NodeEvent): 'node_capacities': {'key': 'NodeCapacities', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_id: str, node_instance: int, node_type: str, fabric_version: str, ip_address_or_fqdn: str, node_capacities: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeAddedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_id: str, node_instance: int, node_type: str, fabric_version: str, ip_address_or_fqdn: str, node_capacities: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeAddedToClusterEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_id = node_id self.node_instance = node_instance self.node_type = node_type self.fabric_version = fabric_version self.ip_address_or_fqdn = ip_address_or_fqdn self.node_capacities = node_capacities - self.kind = 'NodeAdded' + self.kind = 'NodeAddedToCluster' diff --git a/azure-servicefabric/azure/servicefabric/models/node_close_event.py b/azure-servicefabric/azure/servicefabric/models/node_closed_event.py similarity index 85% rename from azure-servicefabric/azure/servicefabric/models/node_close_event.py rename to azure-servicefabric/azure/servicefabric/models/node_closed_event.py index 11c4d88bcc67..d9abbee9e812 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_close_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_closed_event.py @@ -12,14 +12,16 @@ from .node_event import NodeEvent -class NodeCloseEvent(NodeEvent): - """Node Close event. +class NodeClosedEvent(NodeEvent): + """Node Closed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -32,7 +34,7 @@ class NodeCloseEvent(NodeEvent): :param node_id: Required. Id of Node. :type node_id: str :param node_instance: Required. Id of Node instance. - :type node_instance: str + :type node_instance: long :param error: Required. Describes error. :type error: str """ @@ -49,18 +51,19 @@ class NodeCloseEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, 'node_name': {'key': 'NodeName', 'type': 'str'}, 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'str'}, + 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, 'error': {'key': 'Error', 'type': 'str'}, } def __init__(self, **kwargs): - super(NodeCloseEvent, self).__init__(**kwargs) + super(NodeClosedEvent, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.node_instance = kwargs.get('node_instance', None) self.error = kwargs.get('error', None) - self.kind = 'NodeClose' + self.kind = 'NodeClosed' diff --git a/azure-servicefabric/azure/servicefabric/models/node_close_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_closed_event_py3.py similarity index 77% rename from azure-servicefabric/azure/servicefabric/models/node_close_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/node_closed_event_py3.py index f6725025866f..e655d3d2dfc8 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_close_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_closed_event_py3.py @@ -12,14 +12,16 @@ from .node_event_py3 import NodeEvent -class NodeCloseEvent(NodeEvent): - """Node Close event. +class NodeClosedEvent(NodeEvent): + """Node Closed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -32,7 +34,7 @@ class NodeCloseEvent(NodeEvent): :param node_id: Required. Id of Node. :type node_id: str :param node_instance: Required. Id of Node instance. - :type node_instance: str + :type node_instance: long :param error: Required. Describes error. :type error: str """ @@ -49,18 +51,19 @@ class NodeCloseEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, 'node_name': {'key': 'NodeName', 'type': 'str'}, 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'str'}, + 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, 'error': {'key': 'Error', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_id: str, node_instance: str, error: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeCloseEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_id: str, node_instance: int, error: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeClosedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_id = node_id self.node_instance = node_instance self.error = error - self.kind = 'NodeClose' + self.kind = 'NodeClosed' diff --git a/azure-servicefabric/azure/servicefabric/models/node_closing_event.py b/azure-servicefabric/azure/servicefabric/models/node_closing_event.py deleted file mode 100644 index 6a3c5af744fa..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/node_closing_event.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event import NodeEvent - - -class NodeClosingEvent(NodeEvent): - """Node Closing event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance: Required. Id of Node instance. - :type node_instance: long - :param node_id: Required. Id of Node. - :type node_id: str - :param upgrade_domain: Required. Upgrade domain of Node. - :type upgrade_domain: str - :param fault_domain: Required. Fault domain of Node. - :type fault_domain: str - :param ip_address_or_fqdn: Required. IP address or FQDN. - :type ip_address_or_fqdn: str - :param hostname: Required. Name of Host. - :type hostname: str - :param is_seed_node: Required. Indicates if it is seed node. - :type is_seed_node: bool - :param node_version: Required. Version of Node. - :type node_version: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance': {'required': True}, - 'node_id': {'required': True}, - 'upgrade_domain': {'required': True}, - 'fault_domain': {'required': True}, - 'ip_address_or_fqdn': {'required': True}, - 'hostname': {'required': True}, - 'is_seed_node': {'required': True}, - 'node_version': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, - 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'upgrade_domain': {'key': 'UpgradeDomain', 'type': 'str'}, - 'fault_domain': {'key': 'FaultDomain', 'type': 'str'}, - 'ip_address_or_fqdn': {'key': 'IpAddressOrFQDN', 'type': 'str'}, - 'hostname': {'key': 'Hostname', 'type': 'str'}, - 'is_seed_node': {'key': 'IsSeedNode', 'type': 'bool'}, - 'node_version': {'key': 'NodeVersion', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(NodeClosingEvent, self).__init__(**kwargs) - self.node_instance = kwargs.get('node_instance', None) - self.node_id = kwargs.get('node_id', None) - self.upgrade_domain = kwargs.get('upgrade_domain', None) - self.fault_domain = kwargs.get('fault_domain', None) - self.ip_address_or_fqdn = kwargs.get('ip_address_or_fqdn', None) - self.hostname = kwargs.get('hostname', None) - self.is_seed_node = kwargs.get('is_seed_node', None) - self.node_version = kwargs.get('node_version', None) - self.kind = 'NodeClosing' diff --git a/azure-servicefabric/azure/servicefabric/models/node_closing_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_closing_event_py3.py deleted file mode 100644 index 217865660754..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/node_closing_event_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event_py3 import NodeEvent - - -class NodeClosingEvent(NodeEvent): - """Node Closing event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance: Required. Id of Node instance. - :type node_instance: long - :param node_id: Required. Id of Node. - :type node_id: str - :param upgrade_domain: Required. Upgrade domain of Node. - :type upgrade_domain: str - :param fault_domain: Required. Fault domain of Node. - :type fault_domain: str - :param ip_address_or_fqdn: Required. IP address or FQDN. - :type ip_address_or_fqdn: str - :param hostname: Required. Name of Host. - :type hostname: str - :param is_seed_node: Required. Indicates if it is seed node. - :type is_seed_node: bool - :param node_version: Required. Version of Node. - :type node_version: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance': {'required': True}, - 'node_id': {'required': True}, - 'upgrade_domain': {'required': True}, - 'fault_domain': {'required': True}, - 'ip_address_or_fqdn': {'required': True}, - 'hostname': {'required': True}, - 'is_seed_node': {'required': True}, - 'node_version': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, - 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'upgrade_domain': {'key': 'UpgradeDomain', 'type': 'str'}, - 'fault_domain': {'key': 'FaultDomain', 'type': 'str'}, - 'ip_address_or_fqdn': {'key': 'IpAddressOrFQDN', 'type': 'str'}, - 'hostname': {'key': 'Hostname', 'type': 'str'}, - 'is_seed_node': {'key': 'IsSeedNode', 'type': 'bool'}, - 'node_version': {'key': 'NodeVersion', 'type': 'str'}, - } - - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeClosingEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) - self.node_instance = node_instance - self.node_id = node_id - self.upgrade_domain = upgrade_domain - self.fault_domain = fault_domain - self.ip_address_or_fqdn = ip_address_or_fqdn - self.hostname = hostname - self.is_seed_node = is_seed_node - self.node_version = node_version - self.kind = 'NodeClosing' diff --git a/azure-servicefabric/azure/servicefabric/models/node_deactivate_complete_event.py b/azure-servicefabric/azure/servicefabric/models/node_deactivate_completed_event.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/node_deactivate_complete_event.py rename to azure-servicefabric/azure/servicefabric/models/node_deactivate_completed_event.py index 4856dc7873e4..9e20d52b544e 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_deactivate_complete_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_deactivate_completed_event.py @@ -12,14 +12,16 @@ from .node_event import NodeEvent -class NodeDeactivateCompleteEvent(NodeEvent): - """Node Deactivate Complete event. +class NodeDeactivateCompletedEvent(NodeEvent): + """Node Deactivate Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -52,6 +54,7 @@ class NodeDeactivateCompleteEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -63,9 +66,9 @@ class NodeDeactivateCompleteEvent(NodeEvent): } def __init__(self, **kwargs): - super(NodeDeactivateCompleteEvent, self).__init__(**kwargs) + super(NodeDeactivateCompletedEvent, self).__init__(**kwargs) self.node_instance = kwargs.get('node_instance', None) self.effective_deactivate_intent = kwargs.get('effective_deactivate_intent', None) self.batch_ids_with_deactivate_intent = kwargs.get('batch_ids_with_deactivate_intent', None) self.start_time = kwargs.get('start_time', None) - self.kind = 'NodeDeactivateComplete' + self.kind = 'NodeDeactivateCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/node_deactivate_complete_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_deactivate_completed_event_py3.py similarity index 83% rename from azure-servicefabric/azure/servicefabric/models/node_deactivate_complete_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/node_deactivate_completed_event_py3.py index 1e077364a6cd..d78dbcef6921 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_deactivate_complete_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_deactivate_completed_event_py3.py @@ -12,14 +12,16 @@ from .node_event_py3 import NodeEvent -class NodeDeactivateCompleteEvent(NodeEvent): - """Node Deactivate Complete event. +class NodeDeactivateCompletedEvent(NodeEvent): + """Node Deactivate Completed event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -52,6 +54,7 @@ class NodeDeactivateCompleteEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -62,10 +65,10 @@ class NodeDeactivateCompleteEvent(NodeEvent): 'start_time': {'key': 'StartTime', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, effective_deactivate_intent: str, batch_ids_with_deactivate_intent: str, start_time, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeDeactivateCompleteEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, effective_deactivate_intent: str, batch_ids_with_deactivate_intent: str, start_time, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeDeactivateCompletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance = node_instance self.effective_deactivate_intent = effective_deactivate_intent self.batch_ids_with_deactivate_intent = batch_ids_with_deactivate_intent self.start_time = start_time - self.kind = 'NodeDeactivateComplete' + self.kind = 'NodeDeactivateCompleted' diff --git a/azure-servicefabric/azure/servicefabric/models/node_deactivate_start_event.py b/azure-servicefabric/azure/servicefabric/models/node_deactivate_started_event.py similarity index 88% rename from azure-servicefabric/azure/servicefabric/models/node_deactivate_start_event.py rename to azure-servicefabric/azure/servicefabric/models/node_deactivate_started_event.py index 9c0a25df7294..7cf057fc516c 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_deactivate_start_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_deactivate_started_event.py @@ -12,14 +12,16 @@ from .node_event import NodeEvent -class NodeDeactivateStartEvent(NodeEvent): - """Node Deactivate Start event. +class NodeDeactivateStartedEvent(NodeEvent): + """Node Deactivate Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -49,6 +51,7 @@ class NodeDeactivateStartEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -59,8 +62,8 @@ class NodeDeactivateStartEvent(NodeEvent): } def __init__(self, **kwargs): - super(NodeDeactivateStartEvent, self).__init__(**kwargs) + super(NodeDeactivateStartedEvent, self).__init__(**kwargs) self.node_instance = kwargs.get('node_instance', None) self.batch_id = kwargs.get('batch_id', None) self.deactivate_intent = kwargs.get('deactivate_intent', None) - self.kind = 'NodeDeactivateStart' + self.kind = 'NodeDeactivateStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/node_deactivate_start_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_deactivate_started_event_py3.py similarity index 82% rename from azure-servicefabric/azure/servicefabric/models/node_deactivate_start_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/node_deactivate_started_event_py3.py index 8722fdd9ef0b..30c39037040c 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_deactivate_start_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_deactivate_started_event_py3.py @@ -12,14 +12,16 @@ from .node_event_py3 import NodeEvent -class NodeDeactivateStartEvent(NodeEvent): - """Node Deactivate Start event. +class NodeDeactivateStartedEvent(NodeEvent): + """Node Deactivate Started event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -49,6 +51,7 @@ class NodeDeactivateStartEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -58,9 +61,9 @@ class NodeDeactivateStartEvent(NodeEvent): 'deactivate_intent': {'key': 'DeactivateIntent', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, batch_id: str, deactivate_intent: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeDeactivateStartEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, batch_id: str, deactivate_intent: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeDeactivateStartedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance = node_instance self.batch_id = batch_id self.deactivate_intent = deactivate_intent - self.kind = 'NodeDeactivateStart' + self.kind = 'NodeDeactivateStarted' diff --git a/azure-servicefabric/azure/servicefabric/models/node_down_event.py b/azure-servicefabric/azure/servicefabric/models/node_down_event.py index 0807efd1966f..b20b3d4baada 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_down_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_down_event.py @@ -20,6 +20,8 @@ class NodeDownEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -46,6 +48,7 @@ class NodeDownEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/node_down_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_down_event_py3.py index 0400d49454c1..7702aee91966 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_down_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_down_event_py3.py @@ -20,6 +20,8 @@ class NodeDownEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -46,6 +48,7 @@ class NodeDownEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -54,8 +57,8 @@ class NodeDownEvent(NodeEvent): 'last_node_up_at': {'key': 'LastNodeUpAt', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, last_node_up_at, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeDownEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, last_node_up_at, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeDownEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance = node_instance self.last_node_up_at = last_node_up_at self.kind = 'NodeDown' diff --git a/azure-servicefabric/azure/servicefabric/models/node_event.py b/azure-servicefabric/azure/servicefabric/models/node_event.py index b383f75cfbb2..b10fa82f0057 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_event.py @@ -16,18 +16,19 @@ class NodeEvent(FabricEvent): """Represents the base for all Node Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: NodeAbortedEvent, NodeAbortingEvent, NodeAddedEvent, - NodeCloseEvent, NodeClosingEvent, NodeDeactivateCompleteEvent, - NodeDeactivateStartEvent, NodeDownEvent, NodeHealthReportCreatedEvent, - NodeHealthReportExpiredEvent, NodeOpenedSuccessEvent, NodeOpenFailedEvent, - NodeOpeningEvent, NodeRemovedEvent, NodeUpEvent, - ChaosRestartNodeFaultCompletedEvent, ChaosRestartNodeFaultScheduledEvent + sub-classes are: NodeAbortedEvent, NodeAddedToClusterEvent, + NodeClosedEvent, NodeDeactivateCompletedEvent, NodeDeactivateStartedEvent, + NodeDownEvent, NodeNewHealthReportEvent, NodeHealthReportExpiredEvent, + NodeOpenSucceededEvent, NodeOpenFailedEvent, NodeRemovedFromClusterEvent, + NodeUpEvent, ChaosNodeRestartScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -48,6 +49,7 @@ class NodeEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -55,7 +57,7 @@ class NodeEvent(FabricEvent): } _subtype_map = { - 'kind': {'NodeAborted': 'NodeAbortedEvent', 'NodeAborting': 'NodeAbortingEvent', 'NodeAdded': 'NodeAddedEvent', 'NodeClose': 'NodeCloseEvent', 'NodeClosing': 'NodeClosingEvent', 'NodeDeactivateComplete': 'NodeDeactivateCompleteEvent', 'NodeDeactivateStart': 'NodeDeactivateStartEvent', 'NodeDown': 'NodeDownEvent', 'NodeHealthReportCreated': 'NodeHealthReportCreatedEvent', 'NodeHealthReportExpired': 'NodeHealthReportExpiredEvent', 'NodeOpenedSuccess': 'NodeOpenedSuccessEvent', 'NodeOpenFailed': 'NodeOpenFailedEvent', 'NodeOpening': 'NodeOpeningEvent', 'NodeRemoved': 'NodeRemovedEvent', 'NodeUp': 'NodeUpEvent', 'ChaosRestartNodeFaultCompleted': 'ChaosRestartNodeFaultCompletedEvent', 'ChaosRestartNodeFaultScheduled': 'ChaosRestartNodeFaultScheduledEvent'} + 'kind': {'NodeAborted': 'NodeAbortedEvent', 'NodeAddedToCluster': 'NodeAddedToClusterEvent', 'NodeClosed': 'NodeClosedEvent', 'NodeDeactivateCompleted': 'NodeDeactivateCompletedEvent', 'NodeDeactivateStarted': 'NodeDeactivateStartedEvent', 'NodeDown': 'NodeDownEvent', 'NodeNewHealthReport': 'NodeNewHealthReportEvent', 'NodeHealthReportExpired': 'NodeHealthReportExpiredEvent', 'NodeOpenSucceeded': 'NodeOpenSucceededEvent', 'NodeOpenFailed': 'NodeOpenFailedEvent', 'NodeRemovedFromCluster': 'NodeRemovedFromClusterEvent', 'NodeUp': 'NodeUpEvent', 'ChaosNodeRestartScheduled': 'ChaosNodeRestartScheduledEvent'} } def __init__(self, **kwargs): diff --git a/azure-servicefabric/azure/servicefabric/models/node_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_event_py3.py index 09d9d378c6b5..1fd27413a593 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_event_py3.py @@ -16,18 +16,19 @@ class NodeEvent(FabricEvent): """Represents the base for all Node Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: NodeAbortedEvent, NodeAbortingEvent, NodeAddedEvent, - NodeCloseEvent, NodeClosingEvent, NodeDeactivateCompleteEvent, - NodeDeactivateStartEvent, NodeDownEvent, NodeHealthReportCreatedEvent, - NodeHealthReportExpiredEvent, NodeOpenedSuccessEvent, NodeOpenFailedEvent, - NodeOpeningEvent, NodeRemovedEvent, NodeUpEvent, - ChaosRestartNodeFaultCompletedEvent, ChaosRestartNodeFaultScheduledEvent + sub-classes are: NodeAbortedEvent, NodeAddedToClusterEvent, + NodeClosedEvent, NodeDeactivateCompletedEvent, NodeDeactivateStartedEvent, + NodeDownEvent, NodeNewHealthReportEvent, NodeHealthReportExpiredEvent, + NodeOpenSucceededEvent, NodeOpenFailedEvent, NodeRemovedFromClusterEvent, + NodeUpEvent, ChaosNodeRestartScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -48,6 +49,7 @@ class NodeEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -55,10 +57,10 @@ class NodeEvent(FabricEvent): } _subtype_map = { - 'kind': {'NodeAborted': 'NodeAbortedEvent', 'NodeAborting': 'NodeAbortingEvent', 'NodeAdded': 'NodeAddedEvent', 'NodeClose': 'NodeCloseEvent', 'NodeClosing': 'NodeClosingEvent', 'NodeDeactivateComplete': 'NodeDeactivateCompleteEvent', 'NodeDeactivateStart': 'NodeDeactivateStartEvent', 'NodeDown': 'NodeDownEvent', 'NodeHealthReportCreated': 'NodeHealthReportCreatedEvent', 'NodeHealthReportExpired': 'NodeHealthReportExpiredEvent', 'NodeOpenedSuccess': 'NodeOpenedSuccessEvent', 'NodeOpenFailed': 'NodeOpenFailedEvent', 'NodeOpening': 'NodeOpeningEvent', 'NodeRemoved': 'NodeRemovedEvent', 'NodeUp': 'NodeUpEvent', 'ChaosRestartNodeFaultCompleted': 'ChaosRestartNodeFaultCompletedEvent', 'ChaosRestartNodeFaultScheduled': 'ChaosRestartNodeFaultScheduledEvent'} + 'kind': {'NodeAborted': 'NodeAbortedEvent', 'NodeAddedToCluster': 'NodeAddedToClusterEvent', 'NodeClosed': 'NodeClosedEvent', 'NodeDeactivateCompleted': 'NodeDeactivateCompletedEvent', 'NodeDeactivateStarted': 'NodeDeactivateStartedEvent', 'NodeDown': 'NodeDownEvent', 'NodeNewHealthReport': 'NodeNewHealthReportEvent', 'NodeHealthReportExpired': 'NodeHealthReportExpiredEvent', 'NodeOpenSucceeded': 'NodeOpenSucceededEvent', 'NodeOpenFailed': 'NodeOpenFailedEvent', 'NodeRemovedFromCluster': 'NodeRemovedFromClusterEvent', 'NodeUp': 'NodeUpEvent', 'ChaosNodeRestartScheduled': 'ChaosNodeRestartScheduledEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.node_name = node_name self.kind = 'NodeEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event.py index 6aee39026383..961324f6fe0e 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event.py @@ -20,6 +20,8 @@ class NodeHealthReportExpiredEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -68,6 +70,7 @@ class NodeHealthReportExpiredEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event_py3.py index 6e186c2f62fe..8012e4bc284c 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class NodeHealthReportExpiredEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -68,6 +70,7 @@ class NodeHealthReportExpiredEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -83,8 +86,8 @@ class NodeHealthReportExpiredEvent(NodeEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance_id = node_instance_id self.source_id = source_id self.property = property diff --git a/azure-servicefabric/azure/servicefabric/models/node_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/node_new_health_report_event.py similarity index 93% rename from azure-servicefabric/azure/servicefabric/models/node_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/node_new_health_report_event.py index d8ff5a432b67..d8f835a0bbf1 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_new_health_report_event.py @@ -12,7 +12,7 @@ from .node_event import NodeEvent -class NodeHealthReportCreatedEvent(NodeEvent): +class NodeNewHealthReportEvent(NodeEvent): """Node Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class NodeHealthReportCreatedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -68,6 +70,7 @@ class NodeHealthReportCreatedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -84,7 +87,7 @@ class NodeHealthReportCreatedEvent(NodeEvent): } def __init__(self, **kwargs): - super(NodeHealthReportCreatedEvent, self).__init__(**kwargs) + super(NodeNewHealthReportEvent, self).__init__(**kwargs) self.node_instance_id = kwargs.get('node_instance_id', None) self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) @@ -94,4 +97,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'NodeHealthReportCreated' + self.kind = 'NodeNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/node_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_new_health_report_event_py3.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/node_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/node_new_health_report_event_py3.py index 09fbe55018fa..bf7a0effaecb 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .node_event_py3 import NodeEvent -class NodeHealthReportCreatedEvent(NodeEvent): +class NodeNewHealthReportEvent(NodeEvent): """Node Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class NodeHealthReportCreatedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -68,6 +70,7 @@ class NodeHealthReportCreatedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -83,8 +86,8 @@ class NodeHealthReportCreatedEvent(NodeEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance_id = node_instance_id self.source_id = source_id self.property = property @@ -94,4 +97,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_i self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'NodeHealthReportCreated' + self.kind = 'NodeNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/node_open_failed_event.py b/azure-servicefabric/azure/servicefabric/models/node_open_failed_event.py index 66a9802b7351..e7fb8fbefa3f 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_open_failed_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_open_failed_event.py @@ -20,6 +20,8 @@ class NodeOpenFailedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -67,6 +69,7 @@ class NodeOpenFailedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/node_open_failed_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_open_failed_event_py3.py index 9bfb09d9f0e2..8f9e240fccaa 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_open_failed_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_open_failed_event_py3.py @@ -20,6 +20,8 @@ class NodeOpenFailedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -67,6 +69,7 @@ class NodeOpenFailedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -82,8 +85,8 @@ class NodeOpenFailedEvent(NodeEvent): 'error': {'key': 'Error', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, error: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeOpenFailedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, error: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeOpenFailedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance = node_instance self.node_id = node_id self.upgrade_domain = upgrade_domain diff --git a/azure-servicefabric/azure/servicefabric/models/node_opening_event.py b/azure-servicefabric/azure/servicefabric/models/node_open_succeeded_event.py similarity index 92% rename from azure-servicefabric/azure/servicefabric/models/node_opening_event.py rename to azure-servicefabric/azure/servicefabric/models/node_open_succeeded_event.py index 1ea9b262df07..a0ab8edcb288 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_opening_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_open_succeeded_event.py @@ -12,14 +12,16 @@ from .node_event import NodeEvent -class NodeOpeningEvent(NodeEvent): - """Node Opening event. +class NodeOpenSucceededEvent(NodeEvent): + """Node Opened Succeeded event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -64,6 +66,7 @@ class NodeOpeningEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -79,7 +82,7 @@ class NodeOpeningEvent(NodeEvent): } def __init__(self, **kwargs): - super(NodeOpeningEvent, self).__init__(**kwargs) + super(NodeOpenSucceededEvent, self).__init__(**kwargs) self.node_instance = kwargs.get('node_instance', None) self.node_id = kwargs.get('node_id', None) self.upgrade_domain = kwargs.get('upgrade_domain', None) @@ -88,4 +91,4 @@ def __init__(self, **kwargs): self.hostname = kwargs.get('hostname', None) self.is_seed_node = kwargs.get('is_seed_node', None) self.node_version = kwargs.get('node_version', None) - self.kind = 'NodeOpening' + self.kind = 'NodeOpenSucceeded' diff --git a/azure-servicefabric/azure/servicefabric/models/node_opened_success_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_open_succeeded_event_py3.py similarity index 86% rename from azure-servicefabric/azure/servicefabric/models/node_opened_success_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/node_open_succeeded_event_py3.py index 5e297bfad8b5..e29ba22fa256 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_opened_success_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_open_succeeded_event_py3.py @@ -12,14 +12,16 @@ from .node_event_py3 import NodeEvent -class NodeOpenedSuccessEvent(NodeEvent): - """Node Opened Success event. +class NodeOpenSucceededEvent(NodeEvent): + """Node Opened Succeeded event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -64,6 +66,7 @@ class NodeOpenedSuccessEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -78,8 +81,8 @@ class NodeOpenedSuccessEvent(NodeEvent): 'node_version': {'key': 'NodeVersion', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeOpenedSuccessEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeOpenSucceededEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance = node_instance self.node_id = node_id self.upgrade_domain = upgrade_domain @@ -88,4 +91,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_i self.hostname = hostname self.is_seed_node = is_seed_node self.node_version = node_version - self.kind = 'NodeOpenedSuccess' + self.kind = 'NodeOpenSucceeded' diff --git a/azure-servicefabric/azure/servicefabric/models/node_opened_success_event.py b/azure-servicefabric/azure/servicefabric/models/node_opened_success_event.py deleted file mode 100644 index 3ed453c7fec0..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/node_opened_success_event.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event import NodeEvent - - -class NodeOpenedSuccessEvent(NodeEvent): - """Node Opened Success event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance: Required. Id of Node instance. - :type node_instance: long - :param node_id: Required. Id of Node. - :type node_id: str - :param upgrade_domain: Required. Upgrade domain of Node. - :type upgrade_domain: str - :param fault_domain: Required. Fault domain of Node. - :type fault_domain: str - :param ip_address_or_fqdn: Required. IP address or FQDN. - :type ip_address_or_fqdn: str - :param hostname: Required. Name of Host. - :type hostname: str - :param is_seed_node: Required. Indicates if it is seed node. - :type is_seed_node: bool - :param node_version: Required. Version of Node. - :type node_version: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance': {'required': True}, - 'node_id': {'required': True}, - 'upgrade_domain': {'required': True}, - 'fault_domain': {'required': True}, - 'ip_address_or_fqdn': {'required': True}, - 'hostname': {'required': True}, - 'is_seed_node': {'required': True}, - 'node_version': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, - 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'upgrade_domain': {'key': 'UpgradeDomain', 'type': 'str'}, - 'fault_domain': {'key': 'FaultDomain', 'type': 'str'}, - 'ip_address_or_fqdn': {'key': 'IpAddressOrFQDN', 'type': 'str'}, - 'hostname': {'key': 'Hostname', 'type': 'str'}, - 'is_seed_node': {'key': 'IsSeedNode', 'type': 'bool'}, - 'node_version': {'key': 'NodeVersion', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(NodeOpenedSuccessEvent, self).__init__(**kwargs) - self.node_instance = kwargs.get('node_instance', None) - self.node_id = kwargs.get('node_id', None) - self.upgrade_domain = kwargs.get('upgrade_domain', None) - self.fault_domain = kwargs.get('fault_domain', None) - self.ip_address_or_fqdn = kwargs.get('ip_address_or_fqdn', None) - self.hostname = kwargs.get('hostname', None) - self.is_seed_node = kwargs.get('is_seed_node', None) - self.node_version = kwargs.get('node_version', None) - self.kind = 'NodeOpenedSuccess' diff --git a/azure-servicefabric/azure/servicefabric/models/node_opening_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_opening_event_py3.py deleted file mode 100644 index c16e8c965a44..000000000000 --- a/azure-servicefabric/azure/servicefabric/models/node_opening_event_py3.py +++ /dev/null @@ -1,91 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .node_event_py3 import NodeEvent - - -class NodeOpeningEvent(NodeEvent): - """Node Opening event. - - All required parameters must be populated in order to send to Azure. - - :param event_instance_id: Required. The identifier for the FabricEvent - instance. - :type event_instance_id: str - :param time_stamp: Required. The time event was logged. - :type time_stamp: datetime - :param has_correlated_events: Shows there is existing related events - available. - :type has_correlated_events: bool - :param kind: Required. Constant filled by server. - :type kind: str - :param node_name: Required. The name of a Service Fabric node. - :type node_name: str - :param node_instance: Required. Id of Node instance. - :type node_instance: long - :param node_id: Required. Id of Node. - :type node_id: str - :param upgrade_domain: Required. Upgrade domain of Node. - :type upgrade_domain: str - :param fault_domain: Required. Fault domain of Node. - :type fault_domain: str - :param ip_address_or_fqdn: Required. IP address or FQDN. - :type ip_address_or_fqdn: str - :param hostname: Required. Name of Host. - :type hostname: str - :param is_seed_node: Required. Indicates if it is seed node. - :type is_seed_node: bool - :param node_version: Required. Version of Node. - :type node_version: str - """ - - _validation = { - 'event_instance_id': {'required': True}, - 'time_stamp': {'required': True}, - 'kind': {'required': True}, - 'node_name': {'required': True}, - 'node_instance': {'required': True}, - 'node_id': {'required': True}, - 'upgrade_domain': {'required': True}, - 'fault_domain': {'required': True}, - 'ip_address_or_fqdn': {'required': True}, - 'hostname': {'required': True}, - 'is_seed_node': {'required': True}, - 'node_version': {'required': True}, - } - - _attribute_map = { - 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, - 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, - 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, - 'kind': {'key': 'Kind', 'type': 'str'}, - 'node_name': {'key': 'NodeName', 'type': 'str'}, - 'node_instance': {'key': 'NodeInstance', 'type': 'long'}, - 'node_id': {'key': 'NodeId', 'type': 'str'}, - 'upgrade_domain': {'key': 'UpgradeDomain', 'type': 'str'}, - 'fault_domain': {'key': 'FaultDomain', 'type': 'str'}, - 'ip_address_or_fqdn': {'key': 'IpAddressOrFQDN', 'type': 'str'}, - 'hostname': {'key': 'Hostname', 'type': 'str'}, - 'is_seed_node': {'key': 'IsSeedNode', 'type': 'bool'}, - 'node_version': {'key': 'NodeVersion', 'type': 'str'}, - } - - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, node_id: str, upgrade_domain: str, fault_domain: str, ip_address_or_fqdn: str, hostname: str, is_seed_node: bool, node_version: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeOpeningEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) - self.node_instance = node_instance - self.node_id = node_id - self.upgrade_domain = upgrade_domain - self.fault_domain = fault_domain - self.ip_address_or_fqdn = ip_address_or_fqdn - self.hostname = hostname - self.is_seed_node = is_seed_node - self.node_version = node_version - self.kind = 'NodeOpening' diff --git a/azure-servicefabric/azure/servicefabric/models/node_removed_event.py b/azure-servicefabric/azure/servicefabric/models/node_removed_from_cluster_event.py similarity index 91% rename from azure-servicefabric/azure/servicefabric/models/node_removed_event.py rename to azure-servicefabric/azure/servicefabric/models/node_removed_from_cluster_event.py index f37cab9d905d..651954f94078 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_removed_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_removed_from_cluster_event.py @@ -12,7 +12,7 @@ from .node_event import NodeEvent -class NodeRemovedEvent(NodeEvent): +class NodeRemovedFromClusterEvent(NodeEvent): """Node Removed event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class NodeRemovedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -58,6 +60,7 @@ class NodeRemovedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -71,11 +74,11 @@ class NodeRemovedEvent(NodeEvent): } def __init__(self, **kwargs): - super(NodeRemovedEvent, self).__init__(**kwargs) + super(NodeRemovedFromClusterEvent, self).__init__(**kwargs) self.node_id = kwargs.get('node_id', None) self.node_instance = kwargs.get('node_instance', None) self.node_type = kwargs.get('node_type', None) self.fabric_version = kwargs.get('fabric_version', None) self.ip_address_or_fqdn = kwargs.get('ip_address_or_fqdn', None) self.node_capacities = kwargs.get('node_capacities', None) - self.kind = 'NodeRemoved' + self.kind = 'NodeRemovedFromCluster' diff --git a/azure-servicefabric/azure/servicefabric/models/node_removed_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_removed_from_cluster_event_py3.py similarity index 86% rename from azure-servicefabric/azure/servicefabric/models/node_removed_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/node_removed_from_cluster_event_py3.py index ff3dbcb6fb7e..dfbf977f2496 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_removed_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_removed_from_cluster_event_py3.py @@ -12,7 +12,7 @@ from .node_event_py3 import NodeEvent -class NodeRemovedEvent(NodeEvent): +class NodeRemovedFromClusterEvent(NodeEvent): """Node Removed event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class NodeRemovedEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -58,6 +60,7 @@ class NodeRemovedEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -70,12 +73,12 @@ class NodeRemovedEvent(NodeEvent): 'node_capacities': {'key': 'NodeCapacities', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_id: str, node_instance: int, node_type: str, fabric_version: str, ip_address_or_fqdn: str, node_capacities: str, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeRemovedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_id: str, node_instance: int, node_type: str, fabric_version: str, ip_address_or_fqdn: str, node_capacities: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeRemovedFromClusterEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_id = node_id self.node_instance = node_instance self.node_type = node_type self.fabric_version = fabric_version self.ip_address_or_fqdn = ip_address_or_fqdn self.node_capacities = node_capacities - self.kind = 'NodeRemoved' + self.kind = 'NodeRemovedFromCluster' diff --git a/azure-servicefabric/azure/servicefabric/models/node_up_event.py b/azure-servicefabric/azure/servicefabric/models/node_up_event.py index bbe4ec65f7cd..bcacf73a8285 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_up_event.py +++ b/azure-servicefabric/azure/servicefabric/models/node_up_event.py @@ -20,6 +20,8 @@ class NodeUpEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -46,6 +48,7 @@ class NodeUpEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/node_up_event_py3.py b/azure-servicefabric/azure/servicefabric/models/node_up_event_py3.py index c8d920f069a9..4b02f97cdca6 100644 --- a/azure-servicefabric/azure/servicefabric/models/node_up_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/node_up_event_py3.py @@ -20,6 +20,8 @@ class NodeUpEvent(NodeEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -46,6 +48,7 @@ class NodeUpEvent(NodeEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -54,8 +57,8 @@ class NodeUpEvent(NodeEvent): 'last_node_down_at': {'key': 'LastNodeDownAt', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, last_node_down_at, has_correlated_events: bool=None, **kwargs) -> None: - super(NodeUpEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, node_name: str, node_instance: int, last_node_down_at, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(NodeUpEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, node_name=node_name, **kwargs) self.node_instance = node_instance self.last_node_down_at = last_node_down_at self.kind = 'NodeUp' diff --git a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_replica_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_application_resource_description_list.py similarity index 72% rename from azure-servicefabric/azure/servicefabric/models/paged_service_resource_replica_description_list.py rename to azure-servicefabric/azure/servicefabric/models/paged_application_resource_description_list.py index 59965287ec2a..bda03402c2bc 100644 --- a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_replica_description_list.py +++ b/azure-servicefabric/azure/servicefabric/models/paged_application_resource_description_list.py @@ -12,9 +12,9 @@ from msrest.serialization import Model -class PagedServiceResourceReplicaDescriptionList(Model): - """The list of service resources in the cluster. The list is paged when all of - the results cannot fit in a single message. The next set of results can be +class PagedApplicationResourceDescriptionList(Model): + """The list of application resources. The list is paged when all of the + results cannot fit in a single message. The next set of results can be obtained by executing the same query with the continuation token provided in this list. @@ -25,17 +25,17 @@ class PagedServiceResourceReplicaDescriptionList(Model): returns next set of results. If there are no further results, then the continuation token is not included in the response. :type continuation_token: str - :param items: List of service resource description. + :param items: One page of the list. :type items: - list[~azure.servicefabric.models.ServiceResourceReplicaDescription] + list[~azure.servicefabric.models.ApplicationResourceDescription] """ _attribute_map = { 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, - 'items': {'key': 'Items', 'type': '[ServiceResourceReplicaDescription]'}, + 'items': {'key': 'Items', 'type': '[ApplicationResourceDescription]'}, } def __init__(self, **kwargs): - super(PagedServiceResourceReplicaDescriptionList, self).__init__(**kwargs) + super(PagedApplicationResourceDescriptionList, self).__init__(**kwargs) self.continuation_token = kwargs.get('continuation_token', None) self.items = kwargs.get('items', None) diff --git a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_replica_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_application_resource_description_list_py3.py similarity index 72% rename from azure-servicefabric/azure/servicefabric/models/paged_service_resource_replica_description_list_py3.py rename to azure-servicefabric/azure/servicefabric/models/paged_application_resource_description_list_py3.py index 3336e5559b8e..bc5bd4690f1c 100644 --- a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_replica_description_list_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/paged_application_resource_description_list_py3.py @@ -12,9 +12,9 @@ from msrest.serialization import Model -class PagedServiceResourceReplicaDescriptionList(Model): - """The list of service resources in the cluster. The list is paged when all of - the results cannot fit in a single message. The next set of results can be +class PagedApplicationResourceDescriptionList(Model): + """The list of application resources. The list is paged when all of the + results cannot fit in a single message. The next set of results can be obtained by executing the same query with the continuation token provided in this list. @@ -25,17 +25,17 @@ class PagedServiceResourceReplicaDescriptionList(Model): returns next set of results. If there are no further results, then the continuation token is not included in the response. :type continuation_token: str - :param items: List of service resource description. + :param items: One page of the list. :type items: - list[~azure.servicefabric.models.ServiceResourceReplicaDescription] + list[~azure.servicefabric.models.ApplicationResourceDescription] """ _attribute_map = { 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, - 'items': {'key': 'Items', 'type': '[ServiceResourceReplicaDescription]'}, + 'items': {'key': 'Items', 'type': '[ApplicationResourceDescription]'}, } def __init__(self, *, continuation_token: str=None, items=None, **kwargs) -> None: - super(PagedServiceResourceReplicaDescriptionList, self).__init__(**kwargs) + super(PagedApplicationResourceDescriptionList, self).__init__(**kwargs) self.continuation_token = continuation_token self.items = items diff --git a/azure-servicefabric/azure/servicefabric/models/paged_gateway_resource_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_gateway_resource_description_list.py new file mode 100644 index 000000000000..4c415664302a --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_gateway_resource_description_list.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedGatewayResourceDescriptionList(Model): + """The list of gateway resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.GatewayResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[GatewayResourceDescription]'}, + } + + def __init__(self, **kwargs): + super(PagedGatewayResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.items = kwargs.get('items', None) diff --git a/azure-servicefabric/azure/servicefabric/models/paged_gateway_resource_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_gateway_resource_description_list_py3.py new file mode 100644 index 000000000000..b4a5cbe08f41 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_gateway_resource_description_list_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedGatewayResourceDescriptionList(Model): + """The list of gateway resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.GatewayResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[GatewayResourceDescription]'}, + } + + def __init__(self, *, continuation_token: str=None, items=None, **kwargs) -> None: + super(PagedGatewayResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.items = items diff --git a/azure-servicefabric/azure/servicefabric/models/paged_network_resource_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_network_resource_description_list.py new file mode 100644 index 000000000000..9391a3aa8d55 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_network_resource_description_list.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedNetworkResourceDescriptionList(Model): + """The list of network resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.NetworkResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[NetworkResourceDescription]'}, + } + + def __init__(self, **kwargs): + super(PagedNetworkResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.items = kwargs.get('items', None) diff --git a/azure-servicefabric/azure/servicefabric/models/paged_network_resource_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_network_resource_description_list_py3.py new file mode 100644 index 000000000000..729aeed28bde --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_network_resource_description_list_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedNetworkResourceDescriptionList(Model): + """The list of network resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.NetworkResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[NetworkResourceDescription]'}, + } + + def __init__(self, *, continuation_token: str=None, items=None, **kwargs) -> None: + super(PagedNetworkResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.items = items diff --git a/azure-servicefabric/azure/servicefabric/models/paged_secret_resource_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_secret_resource_description_list.py new file mode 100644 index 000000000000..8ec32f9fc767 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_secret_resource_description_list.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedSecretResourceDescriptionList(Model): + """The list of secret resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.SecretResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[SecretResourceDescription]'}, + } + + def __init__(self, **kwargs): + super(PagedSecretResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.items = kwargs.get('items', None) diff --git a/azure-servicefabric/azure/servicefabric/models/paged_secret_resource_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_secret_resource_description_list_py3.py new file mode 100644 index 000000000000..6c02124ff12a --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_secret_resource_description_list_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedSecretResourceDescriptionList(Model): + """The list of secret resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.SecretResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[SecretResourceDescription]'}, + } + + def __init__(self, *, continuation_token: str=None, items=None, **kwargs) -> None: + super(PagedSecretResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.items = items diff --git a/azure-servicefabric/azure/servicefabric/models/paged_secret_value_resource_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_secret_value_resource_description_list.py new file mode 100644 index 000000000000..927c0fd85d7d --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_secret_value_resource_description_list.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedSecretValueResourceDescriptionList(Model): + """The list of values of a secret resource, paged if the number of results + exceeds the limits of a single message. The next set of results can be + obtained by executing the same query with the continuation token provided + in the previous page. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: + list[~azure.servicefabric.models.SecretValueResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[SecretValueResourceDescription]'}, + } + + def __init__(self, **kwargs): + super(PagedSecretValueResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.items = kwargs.get('items', None) diff --git a/azure-servicefabric/azure/servicefabric/models/paged_secret_value_resource_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_secret_value_resource_description_list_py3.py new file mode 100644 index 000000000000..683d5a382d58 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_secret_value_resource_description_list_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedSecretValueResourceDescriptionList(Model): + """The list of values of a secret resource, paged if the number of results + exceeds the limits of a single message. The next set of results can be + obtained by executing the same query with the continuation token provided + in the previous page. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: + list[~azure.servicefabric.models.SecretValueResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[SecretValueResourceDescription]'}, + } + + def __init__(self, *, continuation_token: str=None, items=None, **kwargs) -> None: + super(PagedSecretValueResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.items = items diff --git a/azure-servicefabric/azure/servicefabric/models/paged_service_replica_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_service_replica_description_list.py new file mode 100644 index 000000000000..94956f063de4 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_service_replica_description_list.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedServiceReplicaDescriptionList(Model): + """The list of service resource replicas in the cluster. The list is paged + when all of the results cannot fit in a single message. The next set of + results can be obtained by executing the same query with the continuation + token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: List of service resource replica description. + :type items: list[~azure.servicefabric.models.ServiceReplicaDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[ServiceReplicaDescription]'}, + } + + def __init__(self, **kwargs): + super(PagedServiceReplicaDescriptionList, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.items = kwargs.get('items', None) diff --git a/azure-servicefabric/azure/servicefabric/models/paged_service_replica_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_service_replica_description_list_py3.py new file mode 100644 index 000000000000..ad77f539351b --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_service_replica_description_list_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedServiceReplicaDescriptionList(Model): + """The list of service resource replicas in the cluster. The list is paged + when all of the results cannot fit in a single message. The next set of + results can be obtained by executing the same query with the continuation + token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: List of service resource replica description. + :type items: list[~azure.servicefabric.models.ServiceReplicaDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[ServiceReplicaDescription]'}, + } + + def __init__(self, *, continuation_token: str=None, items=None, **kwargs) -> None: + super(PagedServiceReplicaDescriptionList, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.items = items diff --git a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list.py index 004ad553d81e..76591cdc5e7d 100644 --- a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list.py +++ b/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list.py @@ -13,10 +13,9 @@ class PagedServiceResourceDescriptionList(Model): - """The list of service resources in the cluster. The list is paged when all of - the results cannot fit in a single message. The next set of results can be - obtained by executing the same query with the continuation token provided - in this list. + """The list of service resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. :param continuation_token: The continuation token parameter is used to obtain next set of results. The continuation token is included in the @@ -25,7 +24,7 @@ class PagedServiceResourceDescriptionList(Model): returns next set of results. If there are no further results, then the continuation token is not included in the response. :type continuation_token: str - :param items: List of service resource description. + :param items: One page of the list. :type items: list[~azure.servicefabric.models.ServiceResourceDescription] """ diff --git a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list_py3.py index 03c2ef9a5404..0d17ddbdc884 100644 --- a/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/paged_service_resource_description_list_py3.py @@ -13,10 +13,9 @@ class PagedServiceResourceDescriptionList(Model): - """The list of service resources in the cluster. The list is paged when all of - the results cannot fit in a single message. The next set of results can be - obtained by executing the same query with the continuation token provided - in this list. + """The list of service resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. :param continuation_token: The continuation token parameter is used to obtain next set of results. The continuation token is included in the @@ -25,7 +24,7 @@ class PagedServiceResourceDescriptionList(Model): returns next set of results. If there are no further results, then the continuation token is not included in the response. :type continuation_token: str - :param items: List of service resource description. + :param items: One page of the list. :type items: list[~azure.servicefabric.models.ServiceResourceDescription] """ diff --git a/azure-servicefabric/azure/servicefabric/models/paged_volume_resource_description_list.py b/azure-servicefabric/azure/servicefabric/models/paged_volume_resource_description_list.py new file mode 100644 index 000000000000..96f1ac5597bd --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_volume_resource_description_list.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedVolumeResourceDescriptionList(Model): + """The list of volume resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.VolumeResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[VolumeResourceDescription]'}, + } + + def __init__(self, **kwargs): + super(PagedVolumeResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.items = kwargs.get('items', None) diff --git a/azure-servicefabric/azure/servicefabric/models/paged_volume_resource_description_list_py3.py b/azure-servicefabric/azure/servicefabric/models/paged_volume_resource_description_list_py3.py new file mode 100644 index 000000000000..a72a818e5ef7 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/paged_volume_resource_description_list_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PagedVolumeResourceDescriptionList(Model): + """The list of volume resources. The list is paged when all of the results + cannot fit in a single message. The next set of results can be obtained by + executing the same query with the continuation token provided in this list. + + :param continuation_token: The continuation token parameter is used to + obtain next set of results. The continuation token is included in the + response of the API when the results from the system do not fit in a + single response. When this value is passed to the next API call, the API + returns next set of results. If there are no further results, then the + continuation token is not included in the response. + :type continuation_token: str + :param items: One page of the list. + :type items: list[~azure.servicefabric.models.VolumeResourceDescription] + """ + + _attribute_map = { + 'continuation_token': {'key': 'ContinuationToken', 'type': 'str'}, + 'items': {'key': 'Items', 'type': '[VolumeResourceDescription]'}, + } + + def __init__(self, *, continuation_token: str=None, items=None, **kwargs) -> None: + super(PagedVolumeResourceDescriptionList, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.items = items diff --git a/azure-servicefabric/azure/servicefabric/models/partition_analysis_event.py b/azure-servicefabric/azure/servicefabric/models/partition_analysis_event.py index 5cfd8a5ec0b0..1bd12aebebea 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_analysis_event.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_analysis_event.py @@ -23,6 +23,8 @@ class PartitionAnalysisEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -50,6 +52,7 @@ class PartitionAnalysisEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/partition_analysis_event_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_analysis_event_py3.py index 91bdb627b784..b864bde3a847 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_analysis_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_analysis_event_py3.py @@ -23,6 +23,8 @@ class PartitionAnalysisEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -50,6 +52,7 @@ class PartitionAnalysisEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -61,7 +64,7 @@ class PartitionAnalysisEvent(PartitionEvent): 'kind': {'PartitionPrimaryMoveAnalysis': 'PartitionPrimaryMoveAnalysisEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, metadata, has_correlated_events: bool=None, **kwargs) -> None: - super(PartitionAnalysisEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, metadata, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(PartitionAnalysisEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) self.metadata = metadata self.kind = 'PartitionAnalysisEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info.py b/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info.py index 803db32008bd..a78e8f8e094c 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info.py @@ -33,11 +33,7 @@ class PartitionBackupConfigurationInfo(BackupConfigurationInfo): :param service_name: The full name of the service with 'fabric:' URI scheme. :type service_name: str - :param partition_id: An internal ID used by Service Fabric to uniquely - identify a partition. This is a randomly generated GUID when the service - was created. The partition ID is unique and does not change for the - lifetime of the service. If the same service was deleted and recreated the - IDs of its partitions would be different. + :param partition_id: The partition ID indentifying the partition. :type partition_id: str """ diff --git a/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info_py3.py index b23c637916a6..f0f73352ad29 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_backup_configuration_info_py3.py @@ -33,11 +33,7 @@ class PartitionBackupConfigurationInfo(BackupConfigurationInfo): :param service_name: The full name of the service with 'fabric:' URI scheme. :type service_name: str - :param partition_id: An internal ID used by Service Fabric to uniquely - identify a partition. This is a randomly generated GUID when the service - was created. The partition ID is unique and does not change for the - lifetime of the service. If the same service was deleted and recreated the - IDs of its partitions would be different. + :param partition_id: The partition ID indentifying the partition. :type partition_id: str """ diff --git a/azure-servicefabric/azure/servicefabric/models/partition_backup_entity.py b/azure-servicefabric/azure/servicefabric/models/partition_backup_entity.py index ba25e0443e45..b53c33d02871 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_backup_entity.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_backup_entity.py @@ -22,11 +22,7 @@ class PartitionBackupEntity(BackupEntity): :param service_name: The full name of the service with 'fabric:' URI scheme. :type service_name: str - :param partition_id: An internal ID used by Service Fabric to uniquely - identify a partition. This is a randomly generated GUID when the service - was created. The partition ID is unique and does not change for the - lifetime of the service. If the same service was deleted and recreated the - IDs of its partitions would be different. + :param partition_id: The partition ID indentifying the partition. :type partition_id: str """ diff --git a/azure-servicefabric/azure/servicefabric/models/partition_backup_entity_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_backup_entity_py3.py index 806668396429..7d5658732337 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_backup_entity_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_backup_entity_py3.py @@ -22,11 +22,7 @@ class PartitionBackupEntity(BackupEntity): :param service_name: The full name of the service with 'fabric:' URI scheme. :type service_name: str - :param partition_id: An internal ID used by Service Fabric to uniquely - identify a partition. This is a randomly generated GUID when the service - was created. The partition ID is unique and does not change for the - lifetime of the service. If the same service was deleted and recreated the - IDs of its partitions would be different. + :param partition_id: The partition ID indentifying the partition. :type partition_id: str """ diff --git a/azure-servicefabric/azure/servicefabric/models/partition_event.py b/azure-servicefabric/azure/servicefabric/models/partition_event.py index 9bdc6dbce7ce..9ab8f5e8173a 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_event.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_event.py @@ -16,15 +16,18 @@ class PartitionEvent(FabricEvent): """Represents the base for all Partition Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: PartitionAnalysisEvent, PartitionHealthReportCreatedEvent, - PartitionHealthReportExpiredEvent, PartitionReconfigurationCompletedEvent, - ChaosMoveSecondaryFaultScheduledEvent, ChaosMovePrimaryFaultScheduledEvent + sub-classes are: PartitionAnalysisEvent, PartitionNewHealthReportEvent, + PartitionHealthReportExpiredEvent, PartitionReconfiguredEvent, + ChaosPartitionSecondaryMoveScheduledEvent, + ChaosPartitionPrimaryMoveScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -49,6 +52,7 @@ class PartitionEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -56,7 +60,7 @@ class PartitionEvent(FabricEvent): } _subtype_map = { - 'kind': {'PartitionAnalysisEvent': 'PartitionAnalysisEvent', 'PartitionHealthReportCreated': 'PartitionHealthReportCreatedEvent', 'PartitionHealthReportExpired': 'PartitionHealthReportExpiredEvent', 'PartitionReconfigurationCompleted': 'PartitionReconfigurationCompletedEvent', 'ChaosMoveSecondaryFaultScheduled': 'ChaosMoveSecondaryFaultScheduledEvent', 'ChaosMovePrimaryFaultScheduled': 'ChaosMovePrimaryFaultScheduledEvent'} + 'kind': {'PartitionAnalysisEvent': 'PartitionAnalysisEvent', 'PartitionNewHealthReport': 'PartitionNewHealthReportEvent', 'PartitionHealthReportExpired': 'PartitionHealthReportExpiredEvent', 'PartitionReconfigured': 'PartitionReconfiguredEvent', 'ChaosPartitionSecondaryMoveScheduled': 'ChaosPartitionSecondaryMoveScheduledEvent', 'ChaosPartitionPrimaryMoveScheduled': 'ChaosPartitionPrimaryMoveScheduledEvent'} } def __init__(self, **kwargs): diff --git a/azure-servicefabric/azure/servicefabric/models/partition_event_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_event_py3.py index c4f66b909bcd..dc3cdd9fc4b5 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_event_py3.py @@ -16,15 +16,18 @@ class PartitionEvent(FabricEvent): """Represents the base for all Partition Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: PartitionAnalysisEvent, PartitionHealthReportCreatedEvent, - PartitionHealthReportExpiredEvent, PartitionReconfigurationCompletedEvent, - ChaosMoveSecondaryFaultScheduledEvent, ChaosMovePrimaryFaultScheduledEvent + sub-classes are: PartitionAnalysisEvent, PartitionNewHealthReportEvent, + PartitionHealthReportExpiredEvent, PartitionReconfiguredEvent, + ChaosPartitionSecondaryMoveScheduledEvent, + ChaosPartitionPrimaryMoveScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -49,6 +52,7 @@ class PartitionEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -56,10 +60,10 @@ class PartitionEvent(FabricEvent): } _subtype_map = { - 'kind': {'PartitionAnalysisEvent': 'PartitionAnalysisEvent', 'PartitionHealthReportCreated': 'PartitionHealthReportCreatedEvent', 'PartitionHealthReportExpired': 'PartitionHealthReportExpiredEvent', 'PartitionReconfigurationCompleted': 'PartitionReconfigurationCompletedEvent', 'ChaosMoveSecondaryFaultScheduled': 'ChaosMoveSecondaryFaultScheduledEvent', 'ChaosMovePrimaryFaultScheduled': 'ChaosMovePrimaryFaultScheduledEvent'} + 'kind': {'PartitionAnalysisEvent': 'PartitionAnalysisEvent', 'PartitionNewHealthReport': 'PartitionNewHealthReportEvent', 'PartitionHealthReportExpired': 'PartitionHealthReportExpiredEvent', 'PartitionReconfigured': 'PartitionReconfiguredEvent', 'ChaosPartitionSecondaryMoveScheduled': 'ChaosPartitionSecondaryMoveScheduledEvent', 'ChaosPartitionPrimaryMoveScheduled': 'ChaosPartitionPrimaryMoveScheduledEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(PartitionEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(PartitionEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.partition_id = partition_id self.kind = 'PartitionEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event.py index a7129a1f0fd4..3f6144ef284a 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event.py @@ -20,6 +20,8 @@ class PartitionHealthReportExpiredEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -69,6 +71,7 @@ class PartitionHealthReportExpiredEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event_py3.py index 175c3fe16f12..69430a961d1d 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class PartitionHealthReportExpiredEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -69,6 +71,7 @@ class PartitionHealthReportExpiredEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -83,8 +86,8 @@ class PartitionHealthReportExpiredEvent(PartitionEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(PartitionHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(PartitionHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) self.source_id = source_id self.property = property self.health_state = health_state diff --git a/azure-servicefabric/azure/servicefabric/models/partition_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/partition_new_health_report_event.py similarity index 93% rename from azure-servicefabric/azure/servicefabric/models/partition_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/partition_new_health_report_event.py index 5a33ab9e0a06..b51931d36f4a 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_new_health_report_event.py @@ -12,7 +12,7 @@ from .partition_event import PartitionEvent -class PartitionHealthReportCreatedEvent(PartitionEvent): +class PartitionNewHealthReportEvent(PartitionEvent): """Partition Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class PartitionHealthReportCreatedEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -69,6 +71,7 @@ class PartitionHealthReportCreatedEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -84,7 +87,7 @@ class PartitionHealthReportCreatedEvent(PartitionEvent): } def __init__(self, **kwargs): - super(PartitionHealthReportCreatedEvent, self).__init__(**kwargs) + super(PartitionNewHealthReportEvent, self).__init__(**kwargs) self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) self.health_state = kwargs.get('health_state', None) @@ -93,4 +96,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'PartitionHealthReportCreated' + self.kind = 'PartitionNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/partition_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_new_health_report_event_py3.py similarity index 88% rename from azure-servicefabric/azure/servicefabric/models/partition_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/partition_new_health_report_event_py3.py index 0327534671c9..e03c92aad9b1 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .partition_event_py3 import PartitionEvent -class PartitionHealthReportCreatedEvent(PartitionEvent): +class PartitionNewHealthReportEvent(PartitionEvent): """Partition Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class PartitionHealthReportCreatedEvent(PartitionEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -69,6 +71,7 @@ class PartitionHealthReportCreatedEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -83,8 +86,8 @@ class PartitionHealthReportCreatedEvent(PartitionEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(PartitionHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(PartitionNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) self.source_id = source_id self.property = property self.health_state = health_state @@ -93,4 +96,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, sou self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'PartitionHealthReportCreated' + self.kind = 'PartitionNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event.py b/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event.py index 414e39f34ebe..e6aefebd260d 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event.py @@ -20,6 +20,8 @@ class PartitionPrimaryMoveAnalysisEvent(PartitionAnalysisEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class PartitionPrimaryMoveAnalysisEvent(PartitionAnalysisEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event_py3.py index 673562923e5b..a537c9501904 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_primary_move_analysis_event_py3.py @@ -20,6 +20,8 @@ class PartitionPrimaryMoveAnalysisEvent(PartitionAnalysisEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +64,7 @@ class PartitionPrimaryMoveAnalysisEvent(PartitionAnalysisEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -74,8 +77,8 @@ class PartitionPrimaryMoveAnalysisEvent(PartitionAnalysisEvent): 'relevant_traces': {'key': 'RelevantTraces', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, metadata, when_move_completed, previous_node: str, current_node: str, move_reason: str, relevant_traces: str, has_correlated_events: bool=None, **kwargs) -> None: - super(PartitionPrimaryMoveAnalysisEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, metadata=metadata, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, metadata, when_move_completed, previous_node: str, current_node: str, move_reason: str, relevant_traces: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(PartitionPrimaryMoveAnalysisEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, metadata=metadata, **kwargs) self.when_move_completed = when_move_completed self.previous_node = previous_node self.current_node = current_node diff --git a/azure-servicefabric/azure/servicefabric/models/partition_reconfiguration_completed_event.py b/azure-servicefabric/azure/servicefabric/models/partition_reconfigured_event.py similarity index 94% rename from azure-servicefabric/azure/servicefabric/models/partition_reconfiguration_completed_event.py rename to azure-servicefabric/azure/servicefabric/models/partition_reconfigured_event.py index 36b890b365e4..94dd7abdabb0 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_reconfiguration_completed_event.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_reconfigured_event.py @@ -12,14 +12,16 @@ from .partition_event import PartitionEvent -class PartitionReconfigurationCompletedEvent(PartitionEvent): - """Partition Reconfiguration Completed event. +class PartitionReconfiguredEvent(PartitionEvent): + """Partition Reconfiguration event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -83,6 +85,7 @@ class PartitionReconfigurationCompletedEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -103,7 +106,7 @@ class PartitionReconfigurationCompletedEvent(PartitionEvent): } def __init__(self, **kwargs): - super(PartitionReconfigurationCompletedEvent, self).__init__(**kwargs) + super(PartitionReconfiguredEvent, self).__init__(**kwargs) self.node_name = kwargs.get('node_name', None) self.node_instance_id = kwargs.get('node_instance_id', None) self.service_type = kwargs.get('service_type', None) @@ -117,4 +120,4 @@ def __init__(self, **kwargs): self.phase3_duration_ms = kwargs.get('phase3_duration_ms', None) self.phase4_duration_ms = kwargs.get('phase4_duration_ms', None) self.total_duration_ms = kwargs.get('total_duration_ms', None) - self.kind = 'PartitionReconfigurationCompleted' + self.kind = 'PartitionReconfigured' diff --git a/azure-servicefabric/azure/servicefabric/models/partition_reconfiguration_completed_event_py3.py b/azure-servicefabric/azure/servicefabric/models/partition_reconfigured_event_py3.py similarity index 91% rename from azure-servicefabric/azure/servicefabric/models/partition_reconfiguration_completed_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/partition_reconfigured_event_py3.py index 3a72ca7a93aa..d414c34438a4 100644 --- a/azure-servicefabric/azure/servicefabric/models/partition_reconfiguration_completed_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/partition_reconfigured_event_py3.py @@ -12,14 +12,16 @@ from .partition_event_py3 import PartitionEvent -class PartitionReconfigurationCompletedEvent(PartitionEvent): - """Partition Reconfiguration Completed event. +class PartitionReconfiguredEvent(PartitionEvent): + """Partition Reconfiguration event. All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -83,6 +85,7 @@ class PartitionReconfigurationCompletedEvent(PartitionEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -102,8 +105,8 @@ class PartitionReconfigurationCompletedEvent(PartitionEvent): 'total_duration_ms': {'key': 'TotalDurationMs', 'type': 'float'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, node_name: str, node_instance_id: str, service_type: str, cc_epoch_data_loss_version: int, cc_epoch_config_version: int, reconfig_type: str, result: str, phase0_duration_ms: float, phase1_duration_ms: float, phase2_duration_ms: float, phase3_duration_ms: float, phase4_duration_ms: float, total_duration_ms: float, has_correlated_events: bool=None, **kwargs) -> None: - super(PartitionReconfigurationCompletedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, node_name: str, node_instance_id: str, service_type: str, cc_epoch_data_loss_version: int, cc_epoch_config_version: int, reconfig_type: str, result: str, phase0_duration_ms: float, phase1_duration_ms: float, phase2_duration_ms: float, phase3_duration_ms: float, phase4_duration_ms: float, total_duration_ms: float, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(PartitionReconfiguredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, **kwargs) self.node_name = node_name self.node_instance_id = node_instance_id self.service_type = service_type @@ -117,4 +120,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, nod self.phase3_duration_ms = phase3_duration_ms self.phase4_duration_ms = phase4_duration_ms self.total_duration_ms = total_duration_ms - self.kind = 'PartitionReconfigurationCompleted' + self.kind = 'PartitionReconfigured' diff --git a/azure-servicefabric/azure/servicefabric/models/reliable_collections_ref.py b/azure-servicefabric/azure/servicefabric/models/reliable_collections_ref.py new file mode 100644 index 000000000000..0bdbaa8515d5 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/reliable_collections_ref.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ReliableCollectionsRef(Model): + """Specifying this parameter adds support for reliable collections. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of ReliableCollection resource. Right now it's + not used and you can use any string. + :type name: str + :param do_not_persist_state: False (the default) if ReliableCollections + state is persisted to disk as usual. True if you do not want to persist + state, in which case replication is still enabled and you can use + ReliableCollections as distributed cache. + :type do_not_persist_state: bool + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'do_not_persist_state': {'key': 'doNotPersistState', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(ReliableCollectionsRef, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.do_not_persist_state = kwargs.get('do_not_persist_state', None) diff --git a/azure-servicefabric/azure/servicefabric/models/reliable_collections_ref_py3.py b/azure-servicefabric/azure/servicefabric/models/reliable_collections_ref_py3.py new file mode 100644 index 000000000000..c517f34c0b8a --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/reliable_collections_ref_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ReliableCollectionsRef(Model): + """Specifying this parameter adds support for reliable collections. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of ReliableCollection resource. Right now it's + not used and you can use any string. + :type name: str + :param do_not_persist_state: False (the default) if ReliableCollections + state is persisted to disk as usual. True if you do not want to persist + state, in which case replication is still enabled and you can use + ReliableCollections as distributed cache. + :type do_not_persist_state: bool + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'do_not_persist_state': {'key': 'doNotPersistState', 'type': 'bool'}, + } + + def __init__(self, *, name: str, do_not_persist_state: bool=None, **kwargs) -> None: + super(ReliableCollectionsRef, self).__init__(**kwargs) + self.name = name + self.do_not_persist_state = do_not_persist_state diff --git a/azure-servicefabric/azure/servicefabric/models/replica_event.py b/azure-servicefabric/azure/servicefabric/models/replica_event.py index b05166ea6630..62dbd958df29 100644 --- a/azure-servicefabric/azure/servicefabric/models/replica_event.py +++ b/azure-servicefabric/azure/servicefabric/models/replica_event.py @@ -16,19 +16,19 @@ class ReplicaEvent(FabricEvent): """Represents the base for all Replica Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: StatefulReplicaHealthReportCreatedEvent, + sub-classes are: StatefulReplicaNewHealthReportEvent, StatefulReplicaHealthReportExpiredEvent, - StatelessReplicaHealthReportCreatedEvent, + StatelessReplicaNewHealthReportEvent, StatelessReplicaHealthReportExpiredEvent, - ChaosRemoveReplicaFaultScheduledEvent, - ChaosRemoveReplicaFaultCompletedEvent, - ChaosRestartReplicaFaultScheduledEvent + ChaosReplicaRemovalScheduledEvent, ChaosReplicaRestartScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +62,7 @@ class ReplicaEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -70,7 +71,7 @@ class ReplicaEvent(FabricEvent): } _subtype_map = { - 'kind': {'StatefulReplicaHealthReportCreated': 'StatefulReplicaHealthReportCreatedEvent', 'StatefulReplicaHealthReportExpired': 'StatefulReplicaHealthReportExpiredEvent', 'StatelessReplicaHealthReportCreated': 'StatelessReplicaHealthReportCreatedEvent', 'StatelessReplicaHealthReportExpired': 'StatelessReplicaHealthReportExpiredEvent', 'ChaosRemoveReplicaFaultScheduled': 'ChaosRemoveReplicaFaultScheduledEvent', 'ChaosRemoveReplicaFaultCompleted': 'ChaosRemoveReplicaFaultCompletedEvent', 'ChaosRestartReplicaFaultScheduled': 'ChaosRestartReplicaFaultScheduledEvent'} + 'kind': {'StatefulReplicaNewHealthReport': 'StatefulReplicaNewHealthReportEvent', 'StatefulReplicaHealthReportExpired': 'StatefulReplicaHealthReportExpiredEvent', 'StatelessReplicaNewHealthReport': 'StatelessReplicaNewHealthReportEvent', 'StatelessReplicaHealthReportExpired': 'StatelessReplicaHealthReportExpiredEvent', 'ChaosReplicaRemovalScheduled': 'ChaosReplicaRemovalScheduledEvent', 'ChaosReplicaRestartScheduled': 'ChaosReplicaRestartScheduledEvent'} } def __init__(self, **kwargs): diff --git a/azure-servicefabric/azure/servicefabric/models/replica_event_py3.py b/azure-servicefabric/azure/servicefabric/models/replica_event_py3.py index d5fc9a5fe2f1..e9f534258750 100644 --- a/azure-servicefabric/azure/servicefabric/models/replica_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/replica_event_py3.py @@ -16,19 +16,19 @@ class ReplicaEvent(FabricEvent): """Represents the base for all Replica Events. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: StatefulReplicaHealthReportCreatedEvent, + sub-classes are: StatefulReplicaNewHealthReportEvent, StatefulReplicaHealthReportExpiredEvent, - StatelessReplicaHealthReportCreatedEvent, + StatelessReplicaNewHealthReportEvent, StatelessReplicaHealthReportExpiredEvent, - ChaosRemoveReplicaFaultScheduledEvent, - ChaosRemoveReplicaFaultCompletedEvent, - ChaosRestartReplicaFaultScheduledEvent + ChaosReplicaRemovalScheduledEvent, ChaosReplicaRestartScheduledEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -62,6 +62,7 @@ class ReplicaEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -70,11 +71,11 @@ class ReplicaEvent(FabricEvent): } _subtype_map = { - 'kind': {'StatefulReplicaHealthReportCreated': 'StatefulReplicaHealthReportCreatedEvent', 'StatefulReplicaHealthReportExpired': 'StatefulReplicaHealthReportExpiredEvent', 'StatelessReplicaHealthReportCreated': 'StatelessReplicaHealthReportCreatedEvent', 'StatelessReplicaHealthReportExpired': 'StatelessReplicaHealthReportExpiredEvent', 'ChaosRemoveReplicaFaultScheduled': 'ChaosRemoveReplicaFaultScheduledEvent', 'ChaosRemoveReplicaFaultCompleted': 'ChaosRemoveReplicaFaultCompletedEvent', 'ChaosRestartReplicaFaultScheduled': 'ChaosRestartReplicaFaultScheduledEvent'} + 'kind': {'StatefulReplicaNewHealthReport': 'StatefulReplicaNewHealthReportEvent', 'StatefulReplicaHealthReportExpired': 'StatefulReplicaHealthReportExpiredEvent', 'StatelessReplicaNewHealthReport': 'StatelessReplicaNewHealthReportEvent', 'StatelessReplicaHealthReportExpired': 'StatelessReplicaHealthReportExpiredEvent', 'ChaosReplicaRemovalScheduled': 'ChaosReplicaRemovalScheduledEvent', 'ChaosReplicaRestartScheduled': 'ChaosReplicaRestartScheduledEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, has_correlated_events: bool=None, **kwargs) -> None: - super(ReplicaEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ReplicaEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.partition_id = partition_id self.replica_id = replica_id self.kind = 'ReplicaEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/restore_progress_info.py b/azure-servicefabric/azure/servicefabric/models/restore_progress_info.py index 8f893348ff67..9d6319252f5e 100644 --- a/azure-servicefabric/azure/servicefabric/models/restore_progress_info.py +++ b/azure-servicefabric/azure/servicefabric/models/restore_progress_info.py @@ -23,7 +23,7 @@ class RestoreProgressInfo(Model): :type time_stamp_utc: datetime :param restored_epoch: Describes the epoch at which the partition is restored. - :type restored_epoch: ~azure.servicefabric.models.BackupEpoch + :type restored_epoch: ~azure.servicefabric.models.Epoch :param restored_lsn: Restored LSN. :type restored_lsn: str :param failure_error: Denotes the failure encountered in performing @@ -34,7 +34,7 @@ class RestoreProgressInfo(Model): _attribute_map = { 'restore_state': {'key': 'RestoreState', 'type': 'str'}, 'time_stamp_utc': {'key': 'TimeStampUtc', 'type': 'iso-8601'}, - 'restored_epoch': {'key': 'RestoredEpoch', 'type': 'BackupEpoch'}, + 'restored_epoch': {'key': 'RestoredEpoch', 'type': 'Epoch'}, 'restored_lsn': {'key': 'RestoredLsn', 'type': 'str'}, 'failure_error': {'key': 'FailureError', 'type': 'FabricErrorError'}, } diff --git a/azure-servicefabric/azure/servicefabric/models/restore_progress_info_py3.py b/azure-servicefabric/azure/servicefabric/models/restore_progress_info_py3.py index ec761d523099..2274e6bd3450 100644 --- a/azure-servicefabric/azure/servicefabric/models/restore_progress_info_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/restore_progress_info_py3.py @@ -23,7 +23,7 @@ class RestoreProgressInfo(Model): :type time_stamp_utc: datetime :param restored_epoch: Describes the epoch at which the partition is restored. - :type restored_epoch: ~azure.servicefabric.models.BackupEpoch + :type restored_epoch: ~azure.servicefabric.models.Epoch :param restored_lsn: Restored LSN. :type restored_lsn: str :param failure_error: Denotes the failure encountered in performing @@ -34,7 +34,7 @@ class RestoreProgressInfo(Model): _attribute_map = { 'restore_state': {'key': 'RestoreState', 'type': 'str'}, 'time_stamp_utc': {'key': 'TimeStampUtc', 'type': 'iso-8601'}, - 'restored_epoch': {'key': 'RestoredEpoch', 'type': 'BackupEpoch'}, + 'restored_epoch': {'key': 'RestoredEpoch', 'type': 'Epoch'}, 'restored_lsn': {'key': 'RestoredLsn', 'type': 'str'}, 'failure_error': {'key': 'FailureError', 'type': 'FabricErrorError'}, } diff --git a/azure-servicefabric/azure/servicefabric/models/retention_policy_description.py b/azure-servicefabric/azure/servicefabric/models/retention_policy_description.py new file mode 100644 index 000000000000..b1a6a7dfa5b2 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/retention_policy_description.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RetentionPolicyDescription(Model): + """Describes the retention policy configured. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BasicRetentionPolicyDescription + + All required parameters must be populated in order to send to Azure. + + :param retention_policy_type: Required. Constant filled by server. + :type retention_policy_type: str + """ + + _validation = { + 'retention_policy_type': {'required': True}, + } + + _attribute_map = { + 'retention_policy_type': {'key': 'RetentionPolicyType', 'type': 'str'}, + } + + _subtype_map = { + 'retention_policy_type': {'Basic': 'BasicRetentionPolicyDescription'} + } + + def __init__(self, **kwargs): + super(RetentionPolicyDescription, self).__init__(**kwargs) + self.retention_policy_type = None diff --git a/azure-servicefabric/azure/servicefabric/models/retention_policy_description_py3.py b/azure-servicefabric/azure/servicefabric/models/retention_policy_description_py3.py new file mode 100644 index 000000000000..6f775cf7d94a --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/retention_policy_description_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RetentionPolicyDescription(Model): + """Describes the retention policy configured. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BasicRetentionPolicyDescription + + All required parameters must be populated in order to send to Azure. + + :param retention_policy_type: Required. Constant filled by server. + :type retention_policy_type: str + """ + + _validation = { + 'retention_policy_type': {'required': True}, + } + + _attribute_map = { + 'retention_policy_type': {'key': 'RetentionPolicyType', 'type': 'str'}, + } + + _subtype_map = { + 'retention_policy_type': {'Basic': 'BasicRetentionPolicyDescription'} + } + + def __init__(self, **kwargs) -> None: + super(RetentionPolicyDescription, self).__init__(**kwargs) + self.retention_policy_type = None diff --git a/azure-servicefabric/azure/servicefabric/models/secret_resource_description.py b/azure-servicefabric/azure/servicefabric/models/secret_resource_description.py new file mode 100644 index 000000000000..a4e4101293fd --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_resource_description.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretResourceDescription(Model): + """This type describes a secret resource. + + All required parameters must be populated in order to send to Azure. + + :param properties: Required. Describes the properties of a secret + resource. + :type properties: ~azure.servicefabric.models.SecretResourceProperties + :param name: Required. Name of the Secret resource. + :type name: str + """ + + _validation = { + 'properties': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'SecretResourceProperties'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SecretResourceDescription, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.name = kwargs.get('name', None) diff --git a/azure-servicefabric/azure/servicefabric/models/secret_resource_description_py3.py b/azure-servicefabric/azure/servicefabric/models/secret_resource_description_py3.py new file mode 100644 index 000000000000..c31454aa2e21 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_resource_description_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretResourceDescription(Model): + """This type describes a secret resource. + + All required parameters must be populated in order to send to Azure. + + :param properties: Required. Describes the properties of a secret + resource. + :type properties: ~azure.servicefabric.models.SecretResourceProperties + :param name: Required. Name of the Secret resource. + :type name: str + """ + + _validation = { + 'properties': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'SecretResourceProperties'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, properties, name: str, **kwargs) -> None: + super(SecretResourceDescription, self).__init__(**kwargs) + self.properties = properties + self.name = name diff --git a/azure-servicefabric/azure/servicefabric/models/secret_resource_properties.py b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties.py new file mode 100644 index 000000000000..84868c395086 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_resource_properties_base import SecretResourcePropertiesBase + + +class SecretResourceProperties(SecretResourcePropertiesBase): + """Describes the properties of a secret resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: InlinedValueSecretResourceProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the secret. + :type description: str + :ivar status: Status of the resource. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the secret. + :vartype status_details: str + :param content_type: The type of the content stored in the secret value. + The value of this property is opaque to Service Fabric. Once set, the + value of this property cannot be changed. + :type content_type: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'content_type': {'key': 'contentType', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'inlinedValue': 'InlinedValueSecretResourceProperties'} + } + + def __init__(self, **kwargs): + super(SecretResourceProperties, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.status = None + self.status_details = None + self.content_type = kwargs.get('content_type', None) + self.kind = 'SecretResourceProperties' diff --git a/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_base.py b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_base.py new file mode 100644 index 000000000000..ac72b82dbc82 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_base.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretResourcePropertiesBase(Model): + """This type describes the properties of a secret resource, including its + kind. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecretResourceProperties + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'SecretResourceProperties': 'SecretResourceProperties'} + } + + def __init__(self, **kwargs): + super(SecretResourcePropertiesBase, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_base_py3.py b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_base_py3.py new file mode 100644 index 000000000000..d0044f2ebcf4 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_base_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretResourcePropertiesBase(Model): + """This type describes the properties of a secret resource, including its + kind. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecretResourceProperties + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + """ + + _validation = { + 'kind': {'required': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'SecretResourceProperties': 'SecretResourceProperties'} + } + + def __init__(self, **kwargs) -> None: + super(SecretResourcePropertiesBase, self).__init__(**kwargs) + self.kind = None diff --git a/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_py3.py new file mode 100644 index 000000000000..f49f34420981 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_resource_properties_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_resource_properties_base_py3 import SecretResourcePropertiesBase + + +class SecretResourceProperties(SecretResourcePropertiesBase): + """Describes the properties of a secret resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: InlinedValueSecretResourceProperties + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param kind: Required. Constant filled by server. + :type kind: str + :param description: User readable description of the secret. + :type description: str + :ivar status: Status of the resource. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the secret. + :vartype status_details: str + :param content_type: The type of the content stored in the secret value. + The value of this property is opaque to Service Fabric. Once set, the + value of this property cannot be changed. + :type content_type: str + """ + + _validation = { + 'kind': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + } + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'content_type': {'key': 'contentType', 'type': 'str'}, + } + + _subtype_map = { + 'kind': {'inlinedValue': 'InlinedValueSecretResourceProperties'} + } + + def __init__(self, *, description: str=None, content_type: str=None, **kwargs) -> None: + super(SecretResourceProperties, self).__init__(**kwargs) + self.description = description + self.status = None + self.status_details = None + self.content_type = content_type + self.kind = 'SecretResourceProperties' diff --git a/azure-servicefabric/azure/servicefabric/models/secret_value.py b/azure-servicefabric/azure/servicefabric/models/secret_value.py new file mode 100644 index 000000000000..d7f632582e37 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_value.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretValue(Model): + """This type represents the unencrypted value of the secret. + + :param value: The actual value of the secret. + :type value: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SecretValue, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/azure-servicefabric/azure/servicefabric/models/secret_value_properties.py b/azure-servicefabric/azure/servicefabric/models/secret_value_properties.py new file mode 100644 index 000000000000..2d26acf9d5db --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_value_properties.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretValueProperties(Model): + """This type describes properties of secret value resource. + + :param value: The actual value of the secret. + :type value: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SecretValueProperties, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/azure-servicefabric/azure/servicefabric/models/secret_value_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/secret_value_properties_py3.py new file mode 100644 index 000000000000..4b422eb9a5a7 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_value_properties_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretValueProperties(Model): + """This type describes properties of secret value resource. + + :param value: The actual value of the secret. + :type value: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, *, value: str=None, **kwargs) -> None: + super(SecretValueProperties, self).__init__(**kwargs) + self.value = value diff --git a/azure-servicefabric/azure/servicefabric/models/secret_value_py3.py b/azure-servicefabric/azure/servicefabric/models/secret_value_py3.py new file mode 100644 index 000000000000..d196402e5fb7 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_value_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretValue(Model): + """This type represents the unencrypted value of the secret. + + :param value: The actual value of the secret. + :type value: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, *, value: str=None, **kwargs) -> None: + super(SecretValue, self).__init__(**kwargs) + self.value = value diff --git a/azure-servicefabric/azure/servicefabric/models/secret_value_resource_description.py b/azure-servicefabric/azure/servicefabric/models/secret_value_resource_description.py new file mode 100644 index 000000000000..35809565f0ca --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_value_resource_description.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretValueResourceDescription(Model): + """This type describes a value of a secret resource. The name of this resource + is the version identifier corresponding to this secret value. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Version identifier of the secret value. + :type name: str + :param value: The actual value of the secret. + :type value: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SecretValueResourceDescription, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) diff --git a/azure-servicefabric/azure/servicefabric/models/secret_value_resource_description_py3.py b/azure-servicefabric/azure/servicefabric/models/secret_value_resource_description_py3.py new file mode 100644 index 000000000000..70866a9369ac --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/secret_value_resource_description_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretValueResourceDescription(Model): + """This type describes a value of a secret resource. The name of this resource + is the version identifier corresponding to this secret value. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Version identifier of the secret value. + :type name: str + :param value: The actual value of the secret. + :type value: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + } + + def __init__(self, *, name: str, value: str=None, **kwargs) -> None: + super(SecretValueResourceDescription, self).__init__(**kwargs) + self.name = name + self.value = value diff --git a/azure-servicefabric/azure/servicefabric/models/service_created_event.py b/azure-servicefabric/azure/servicefabric/models/service_created_event.py index 41507f2da246..33d46d28d090 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/service_created_event.py @@ -20,6 +20,8 @@ class ServiceCreatedEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -80,6 +82,7 @@ class ServiceCreatedEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/service_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/service_created_event_py3.py index 85da62411112..1843c8595026 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_created_event_py3.py @@ -20,6 +20,8 @@ class ServiceCreatedEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -80,6 +82,7 @@ class ServiceCreatedEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -96,8 +99,8 @@ class ServiceCreatedEvent(ServiceEvent): 'partition_id': {'key': 'PartitionId', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, service_type_name: str, application_name: str, application_type_name: str, service_instance: int, is_stateful: bool, partition_count: int, target_replica_set_size: int, min_replica_set_size: int, service_package_version: str, partition_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ServiceCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, service_type_name: str, application_name: str, application_type_name: str, service_instance: int, is_stateful: bool, partition_count: int, target_replica_set_size: int, min_replica_set_size: int, service_package_version: str, partition_id: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ServiceCreatedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) self.service_type_name = service_type_name self.application_name = application_name self.application_type_name = application_type_name diff --git a/azure-servicefabric/azure/servicefabric/models/service_deleted_event.py b/azure-servicefabric/azure/servicefabric/models/service_deleted_event.py index fa14df02ecee..7b7642f8fbf0 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_deleted_event.py +++ b/azure-servicefabric/azure/servicefabric/models/service_deleted_event.py @@ -20,6 +20,8 @@ class ServiceDeletedEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -73,6 +75,7 @@ class ServiceDeletedEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/service_deleted_event_py3.py b/azure-servicefabric/azure/servicefabric/models/service_deleted_event_py3.py index 4bada4904170..58357e388118 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_deleted_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_deleted_event_py3.py @@ -20,6 +20,8 @@ class ServiceDeletedEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -73,6 +75,7 @@ class ServiceDeletedEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -88,8 +91,8 @@ class ServiceDeletedEvent(ServiceEvent): 'service_package_version': {'key': 'ServicePackageVersion', 'type': 'str'}, } - def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, service_type_name: str, application_name: str, application_type_name: str, service_instance: int, is_stateful: bool, partition_count: int, target_replica_set_size: int, min_replica_set_size: int, service_package_version: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ServiceDeletedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, service_type_name: str, application_name: str, application_type_name: str, service_instance: int, is_stateful: bool, partition_count: int, target_replica_set_size: int, min_replica_set_size: int, service_package_version: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ServiceDeletedEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) self.service_type_name = service_type_name self.application_name = application_name self.application_type_name = application_type_name diff --git a/azure-servicefabric/azure/servicefabric/models/service_event.py b/azure-servicefabric/azure/servicefabric/models/service_event.py index 8c7937b204c5..8babe820978f 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_event.py +++ b/azure-servicefabric/azure/servicefabric/models/service_event.py @@ -17,13 +17,15 @@ class ServiceEvent(FabricEvent): You probably want to use the sub-classes and not this class directly. Known sub-classes are: ServiceCreatedEvent, ServiceDeletedEvent, - ServiceHealthReportCreatedEvent, ServiceHealthReportExpiredEvent + ServiceNewHealthReportEvent, ServiceHealthReportExpiredEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -50,6 +52,7 @@ class ServiceEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -57,7 +60,7 @@ class ServiceEvent(FabricEvent): } _subtype_map = { - 'kind': {'ServiceCreated': 'ServiceCreatedEvent', 'ServiceDeleted': 'ServiceDeletedEvent', 'ServiceHealthReportCreated': 'ServiceHealthReportCreatedEvent', 'ServiceHealthReportExpired': 'ServiceHealthReportExpiredEvent'} + 'kind': {'ServiceCreated': 'ServiceCreatedEvent', 'ServiceDeleted': 'ServiceDeletedEvent', 'ServiceNewHealthReport': 'ServiceNewHealthReportEvent', 'ServiceHealthReportExpired': 'ServiceHealthReportExpiredEvent'} } def __init__(self, **kwargs): diff --git a/azure-servicefabric/azure/servicefabric/models/service_event_py3.py b/azure-servicefabric/azure/servicefabric/models/service_event_py3.py index 1be7606566c1..da126aba2326 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_event_py3.py @@ -17,13 +17,15 @@ class ServiceEvent(FabricEvent): You probably want to use the sub-classes and not this class directly. Known sub-classes are: ServiceCreatedEvent, ServiceDeletedEvent, - ServiceHealthReportCreatedEvent, ServiceHealthReportExpiredEvent + ServiceNewHealthReportEvent, ServiceHealthReportExpiredEvent All required parameters must be populated in order to send to Azure. :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -50,6 +52,7 @@ class ServiceEvent(FabricEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -57,10 +60,10 @@ class ServiceEvent(FabricEvent): } _subtype_map = { - 'kind': {'ServiceCreated': 'ServiceCreatedEvent', 'ServiceDeleted': 'ServiceDeletedEvent', 'ServiceHealthReportCreated': 'ServiceHealthReportCreatedEvent', 'ServiceHealthReportExpired': 'ServiceHealthReportExpiredEvent'} + 'kind': {'ServiceCreated': 'ServiceCreatedEvent', 'ServiceDeleted': 'ServiceDeletedEvent', 'ServiceNewHealthReport': 'ServiceNewHealthReportEvent', 'ServiceHealthReportExpired': 'ServiceHealthReportExpiredEvent'} } - def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, has_correlated_events: bool=None, **kwargs) -> None: - super(ServiceEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ServiceEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, **kwargs) self.service_id = service_id self.kind = 'ServiceEvent' diff --git a/azure-servicefabric/azure/servicefabric/models/service_fabric_client_ap_is_enums.py b/azure-servicefabric/azure/servicefabric/models/service_fabric_client_ap_is_enums.py index a3e57c5884ec..cbbe829e16e8 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_fabric_client_ap_is_enums.py +++ b/azure-servicefabric/azure/servicefabric/models/service_fabric_client_ap_is_enums.py @@ -12,13 +12,6 @@ from enum import Enum -class ApplicationDefinitionKind(str, Enum): - - invalid = "Invalid" #: Indicates the application definition kind is invalid. All Service Fabric enumerations have the invalid type. The value is 65535. - service_fabric_application_description = "ServiceFabricApplicationDescription" #: Indicates the application is defined by a Service Fabric application description. The value is 0. - compose = "Compose" #: Indicates the application is defined by compose file(s). The value is 1. - - class HealthState(str, Enum): invalid = "Invalid" #: Indicates an invalid health state. All Service Fabric enumerations have the invalid type. The value is zero. @@ -28,6 +21,101 @@ class HealthState(str, Enum): unknown = "Unknown" #: Indicates an unknown health status. The value is 65535. +class FabricErrorCodes(str, Enum): + + fabric_e_invalid_partition_key = "FABRIC_E_INVALID_PARTITION_KEY" + fabric_e_imagebuilder_validation_error = "FABRIC_E_IMAGEBUILDER_VALIDATION_ERROR" + fabric_e_invalid_address = "FABRIC_E_INVALID_ADDRESS" + fabric_e_application_not_upgrading = "FABRIC_E_APPLICATION_NOT_UPGRADING" + fabric_e_application_upgrade_validation_error = "FABRIC_E_APPLICATION_UPGRADE_VALIDATION_ERROR" + fabric_e_fabric_not_upgrading = "FABRIC_E_FABRIC_NOT_UPGRADING" + fabric_e_fabric_upgrade_validation_error = "FABRIC_E_FABRIC_UPGRADE_VALIDATION_ERROR" + fabric_e_invalid_configuration = "FABRIC_E_INVALID_CONFIGURATION" + fabric_e_invalid_name_uri = "FABRIC_E_INVALID_NAME_URI" + fabric_e_path_too_long = "FABRIC_E_PATH_TOO_LONG" + fabric_e_key_too_large = "FABRIC_E_KEY_TOO_LARGE" + fabric_e_service_affinity_chain_not_supported = "FABRIC_E_SERVICE_AFFINITY_CHAIN_NOT_SUPPORTED" + fabric_e_invalid_atomic_group = "FABRIC_E_INVALID_ATOMIC_GROUP" + fabric_e_value_empty = "FABRIC_E_VALUE_EMPTY" + fabric_e_node_not_found = "FABRIC_E_NODE_NOT_FOUND" + fabric_e_application_type_not_found = "FABRIC_E_APPLICATION_TYPE_NOT_FOUND" + fabric_e_application_not_found = "FABRIC_E_APPLICATION_NOT_FOUND" + fabric_e_service_type_not_found = "FABRIC_E_SERVICE_TYPE_NOT_FOUND" + fabric_e_service_does_not_exist = "FABRIC_E_SERVICE_DOES_NOT_EXIST" + fabric_e_service_type_template_not_found = "FABRIC_E_SERVICE_TYPE_TEMPLATE_NOT_FOUND" + fabric_e_configuration_section_not_found = "FABRIC_E_CONFIGURATION_SECTION_NOT_FOUND" + fabric_e_partition_not_found = "FABRIC_E_PARTITION_NOT_FOUND" + fabric_e_replica_does_not_exist = "FABRIC_E_REPLICA_DOES_NOT_EXIST" + fabric_e_service_group_does_not_exist = "FABRIC_E_SERVICE_GROUP_DOES_NOT_EXIST" + fabric_e_configuration_parameter_not_found = "FABRIC_E_CONFIGURATION_PARAMETER_NOT_FOUND" + fabric_e_directory_not_found = "FABRIC_E_DIRECTORY_NOT_FOUND" + fabric_e_fabric_version_not_found = "FABRIC_E_FABRIC_VERSION_NOT_FOUND" + fabric_e_file_not_found = "FABRIC_E_FILE_NOT_FOUND" + fabric_e_name_does_not_exist = "FABRIC_E_NAME_DOES_NOT_EXIST" + fabric_e_property_does_not_exist = "FABRIC_E_PROPERTY_DOES_NOT_EXIST" + fabric_e_enumeration_completed = "FABRIC_E_ENUMERATION_COMPLETED" + fabric_e_service_manifest_not_found = "FABRIC_E_SERVICE_MANIFEST_NOT_FOUND" + fabric_e_key_not_found = "FABRIC_E_KEY_NOT_FOUND" + fabric_e_health_entity_not_found = "FABRIC_E_HEALTH_ENTITY_NOT_FOUND" + fabric_e_application_type_already_exists = "FABRIC_E_APPLICATION_TYPE_ALREADY_EXISTS" + fabric_e_application_already_exists = "FABRIC_E_APPLICATION_ALREADY_EXISTS" + fabric_e_application_already_in_target_version = "FABRIC_E_APPLICATION_ALREADY_IN_TARGET_VERSION" + fabric_e_application_type_provision_in_progress = "FABRIC_E_APPLICATION_TYPE_PROVISION_IN_PROGRESS" + fabric_e_application_upgrade_in_progress = "FABRIC_E_APPLICATION_UPGRADE_IN_PROGRESS" + fabric_e_service_already_exists = "FABRIC_E_SERVICE_ALREADY_EXISTS" + fabric_e_service_group_already_exists = "FABRIC_E_SERVICE_GROUP_ALREADY_EXISTS" + fabric_e_application_type_in_use = "FABRIC_E_APPLICATION_TYPE_IN_USE" + fabric_e_fabric_already_in_target_version = "FABRIC_E_FABRIC_ALREADY_IN_TARGET_VERSION" + fabric_e_fabric_version_already_exists = "FABRIC_E_FABRIC_VERSION_ALREADY_EXISTS" + fabric_e_fabric_version_in_use = "FABRIC_E_FABRIC_VERSION_IN_USE" + fabric_e_fabric_upgrade_in_progress = "FABRIC_E_FABRIC_UPGRADE_IN_PROGRESS" + fabric_e_name_already_exists = "FABRIC_E_NAME_ALREADY_EXISTS" + fabric_e_name_not_empty = "FABRIC_E_NAME_NOT_EMPTY" + fabric_e_property_check_failed = "FABRIC_E_PROPERTY_CHECK_FAILED" + fabric_e_service_metadata_mismatch = "FABRIC_E_SERVICE_METADATA_MISMATCH" + fabric_e_service_type_mismatch = "FABRIC_E_SERVICE_TYPE_MISMATCH" + fabric_e_health_stale_report = "FABRIC_E_HEALTH_STALE_REPORT" + fabric_e_sequence_number_check_failed = "FABRIC_E_SEQUENCE_NUMBER_CHECK_FAILED" + fabric_e_node_has_not_stopped_yet = "FABRIC_E_NODE_HAS_NOT_STOPPED_YET" + fabric_e_instance_id_mismatch = "FABRIC_E_INSTANCE_ID_MISMATCH" + fabric_e_value_too_large = "FABRIC_E_VALUE_TOO_LARGE" + fabric_e_no_write_quorum = "FABRIC_E_NO_WRITE_QUORUM" + fabric_e_not_primary = "FABRIC_E_NOT_PRIMARY" + fabric_e_not_ready = "FABRIC_E_NOT_READY" + fabric_e_reconfiguration_pending = "FABRIC_E_RECONFIGURATION_PENDING" + fabric_e_service_offline = "FABRIC_E_SERVICE_OFFLINE" + e_abort = "E_ABORT" + fabric_e_communication_error = "FABRIC_E_COMMUNICATION_ERROR" + fabric_e_operation_not_complete = "FABRIC_E_OPERATION_NOT_COMPLETE" + fabric_e_timeout = "FABRIC_E_TIMEOUT" + fabric_e_node_is_up = "FABRIC_E_NODE_IS_UP" + e_fail = "E_FAIL" + fabric_e_backup_is_enabled = "FABRIC_E_BACKUP_IS_ENABLED" + fabric_e_restore_source_target_partition_mismatch = "FABRIC_E_RESTORE_SOURCE_TARGET_PARTITION_MISMATCH" + fabric_e_invalid_for_stateless_services = "FABRIC_E_INVALID_FOR_STATELESS_SERVICES" + fabric_e_backup_not_enabled = "FABRIC_E_BACKUP_NOT_ENABLED" + fabric_e_backup_policy_not_existing = "FABRIC_E_BACKUP_POLICY_NOT_EXISTING" + fabric_e_fault_analysis_service_not_existing = "FABRIC_E_FAULT_ANALYSIS_SERVICE_NOT_EXISTING" + fabric_e_backup_in_progress = "FABRIC_E_BACKUP_IN_PROGRESS" + fabric_e_restore_in_progress = "FABRIC_E_RESTORE_IN_PROGRESS" + fabric_e_backup_policy_already_existing = "FABRIC_E_BACKUP_POLICY_ALREADY_EXISTING" + fabric_e_invalid_service_scaling_policy = "FABRIC_E_INVALID_SERVICE_SCALING_POLICY" + e_invalidarg = "E_INVALIDARG" + fabric_e_single_instance_application_already_exists = "FABRIC_E_SINGLE_INSTANCE_APPLICATION_ALREADY_EXISTS" + fabric_e_single_instance_application_not_found = "FABRIC_E_SINGLE_INSTANCE_APPLICATION_NOT_FOUND" + fabric_e_volume_already_exists = "FABRIC_E_VOLUME_ALREADY_EXISTS" + fabric_e_volume_not_found = "FABRIC_E_VOLUME_NOT_FOUND" + serialization_error = "SerializationError" + fabric_e_imagebuilder_reserved_directory_error = "FABRIC_E_IMAGEBUILDER_RESERVED_DIRECTORY_ERROR" + + +class ApplicationDefinitionKind(str, Enum): + + invalid = "Invalid" #: Indicates the application definition kind is invalid. All Service Fabric enumerations have the invalid type. The value is 65535. + service_fabric_application_description = "ServiceFabricApplicationDescription" #: Indicates the application is defined by a Service Fabric application description. The value is 0. + compose = "Compose" #: Indicates the application is defined by compose file(s). The value is 1. + + class ApplicationStatus(str, Enum): invalid = "Invalid" #: Indicates the application status is invalid. All Service Fabric enumerations have the invalid type. The value is zero. @@ -188,93 +276,6 @@ class EntityKind(str, Enum): cluster = "Cluster" #: Indicates the entity is a Service Fabric cluster. The value is 8. -class FabricErrorCodes(str, Enum): - - fabric_e_invalid_partition_key = "FABRIC_E_INVALID_PARTITION_KEY" - fabric_e_imagebuilder_validation_error = "FABRIC_E_IMAGEBUILDER_VALIDATION_ERROR" - fabric_e_invalid_address = "FABRIC_E_INVALID_ADDRESS" - fabric_e_application_not_upgrading = "FABRIC_E_APPLICATION_NOT_UPGRADING" - fabric_e_application_upgrade_validation_error = "FABRIC_E_APPLICATION_UPGRADE_VALIDATION_ERROR" - fabric_e_fabric_not_upgrading = "FABRIC_E_FABRIC_NOT_UPGRADING" - fabric_e_fabric_upgrade_validation_error = "FABRIC_E_FABRIC_UPGRADE_VALIDATION_ERROR" - fabric_e_invalid_configuration = "FABRIC_E_INVALID_CONFIGURATION" - fabric_e_invalid_name_uri = "FABRIC_E_INVALID_NAME_URI" - fabric_e_path_too_long = "FABRIC_E_PATH_TOO_LONG" - fabric_e_key_too_large = "FABRIC_E_KEY_TOO_LARGE" - fabric_e_service_affinity_chain_not_supported = "FABRIC_E_SERVICE_AFFINITY_CHAIN_NOT_SUPPORTED" - fabric_e_invalid_atomic_group = "FABRIC_E_INVALID_ATOMIC_GROUP" - fabric_e_value_empty = "FABRIC_E_VALUE_EMPTY" - fabric_e_node_not_found = "FABRIC_E_NODE_NOT_FOUND" - fabric_e_application_type_not_found = "FABRIC_E_APPLICATION_TYPE_NOT_FOUND" - fabric_e_application_not_found = "FABRIC_E_APPLICATION_NOT_FOUND" - fabric_e_service_type_not_found = "FABRIC_E_SERVICE_TYPE_NOT_FOUND" - fabric_e_service_does_not_exist = "FABRIC_E_SERVICE_DOES_NOT_EXIST" - fabric_e_service_type_template_not_found = "FABRIC_E_SERVICE_TYPE_TEMPLATE_NOT_FOUND" - fabric_e_configuration_section_not_found = "FABRIC_E_CONFIGURATION_SECTION_NOT_FOUND" - fabric_e_partition_not_found = "FABRIC_E_PARTITION_NOT_FOUND" - fabric_e_replica_does_not_exist = "FABRIC_E_REPLICA_DOES_NOT_EXIST" - fabric_e_service_group_does_not_exist = "FABRIC_E_SERVICE_GROUP_DOES_NOT_EXIST" - fabric_e_configuration_parameter_not_found = "FABRIC_E_CONFIGURATION_PARAMETER_NOT_FOUND" - fabric_e_directory_not_found = "FABRIC_E_DIRECTORY_NOT_FOUND" - fabric_e_fabric_version_not_found = "FABRIC_E_FABRIC_VERSION_NOT_FOUND" - fabric_e_file_not_found = "FABRIC_E_FILE_NOT_FOUND" - fabric_e_name_does_not_exist = "FABRIC_E_NAME_DOES_NOT_EXIST" - fabric_e_property_does_not_exist = "FABRIC_E_PROPERTY_DOES_NOT_EXIST" - fabric_e_enumeration_completed = "FABRIC_E_ENUMERATION_COMPLETED" - fabric_e_service_manifest_not_found = "FABRIC_E_SERVICE_MANIFEST_NOT_FOUND" - fabric_e_key_not_found = "FABRIC_E_KEY_NOT_FOUND" - fabric_e_health_entity_not_found = "FABRIC_E_HEALTH_ENTITY_NOT_FOUND" - fabric_e_application_type_already_exists = "FABRIC_E_APPLICATION_TYPE_ALREADY_EXISTS" - fabric_e_application_already_exists = "FABRIC_E_APPLICATION_ALREADY_EXISTS" - fabric_e_application_already_in_target_version = "FABRIC_E_APPLICATION_ALREADY_IN_TARGET_VERSION" - fabric_e_application_type_provision_in_progress = "FABRIC_E_APPLICATION_TYPE_PROVISION_IN_PROGRESS" - fabric_e_application_upgrade_in_progress = "FABRIC_E_APPLICATION_UPGRADE_IN_PROGRESS" - fabric_e_service_already_exists = "FABRIC_E_SERVICE_ALREADY_EXISTS" - fabric_e_service_group_already_exists = "FABRIC_E_SERVICE_GROUP_ALREADY_EXISTS" - fabric_e_application_type_in_use = "FABRIC_E_APPLICATION_TYPE_IN_USE" - fabric_e_fabric_already_in_target_version = "FABRIC_E_FABRIC_ALREADY_IN_TARGET_VERSION" - fabric_e_fabric_version_already_exists = "FABRIC_E_FABRIC_VERSION_ALREADY_EXISTS" - fabric_e_fabric_version_in_use = "FABRIC_E_FABRIC_VERSION_IN_USE" - fabric_e_fabric_upgrade_in_progress = "FABRIC_E_FABRIC_UPGRADE_IN_PROGRESS" - fabric_e_name_already_exists = "FABRIC_E_NAME_ALREADY_EXISTS" - fabric_e_name_not_empty = "FABRIC_E_NAME_NOT_EMPTY" - fabric_e_property_check_failed = "FABRIC_E_PROPERTY_CHECK_FAILED" - fabric_e_service_metadata_mismatch = "FABRIC_E_SERVICE_METADATA_MISMATCH" - fabric_e_service_type_mismatch = "FABRIC_E_SERVICE_TYPE_MISMATCH" - fabric_e_health_stale_report = "FABRIC_E_HEALTH_STALE_REPORT" - fabric_e_sequence_number_check_failed = "FABRIC_E_SEQUENCE_NUMBER_CHECK_FAILED" - fabric_e_node_has_not_stopped_yet = "FABRIC_E_NODE_HAS_NOT_STOPPED_YET" - fabric_e_instance_id_mismatch = "FABRIC_E_INSTANCE_ID_MISMATCH" - fabric_e_value_too_large = "FABRIC_E_VALUE_TOO_LARGE" - fabric_e_no_write_quorum = "FABRIC_E_NO_WRITE_QUORUM" - fabric_e_not_primary = "FABRIC_E_NOT_PRIMARY" - fabric_e_not_ready = "FABRIC_E_NOT_READY" - fabric_e_reconfiguration_pending = "FABRIC_E_RECONFIGURATION_PENDING" - fabric_e_service_offline = "FABRIC_E_SERVICE_OFFLINE" - e_abort = "E_ABORT" - fabric_e_communication_error = "FABRIC_E_COMMUNICATION_ERROR" - fabric_e_operation_not_complete = "FABRIC_E_OPERATION_NOT_COMPLETE" - fabric_e_timeout = "FABRIC_E_TIMEOUT" - fabric_e_node_is_up = "FABRIC_E_NODE_IS_UP" - e_fail = "E_FAIL" - fabric_e_backup_is_enabled = "FABRIC_E_BACKUP_IS_ENABLED" - fabric_e_restore_source_target_partition_mismatch = "FABRIC_E_RESTORE_SOURCE_TARGET_PARTITION_MISMATCH" - fabric_e_invalid_for_stateless_services = "FABRIC_E_INVALID_FOR_STATELESS_SERVICES" - fabric_e_backup_not_enabled = "FABRIC_E_BACKUP_NOT_ENABLED" - fabric_e_backup_policy_not_existing = "FABRIC_E_BACKUP_POLICY_NOT_EXISTING" - fabric_e_fault_analysis_service_not_existing = "FABRIC_E_FAULT_ANALYSIS_SERVICE_NOT_EXISTING" - fabric_e_backup_in_progress = "FABRIC_E_BACKUP_IN_PROGRESS" - fabric_e_restore_in_progress = "FABRIC_E_RESTORE_IN_PROGRESS" - fabric_e_backup_policy_already_existing = "FABRIC_E_BACKUP_POLICY_ALREADY_EXISTING" - fabric_e_invalid_service_scaling_policy = "FABRIC_E_INVALID_SERVICE_SCALING_POLICY" - e_invalidarg = "E_INVALIDARG" - fabric_e_single_instance_application_already_exists = "FABRIC_E_SINGLE_INSTANCE_APPLICATION_ALREADY_EXISTS" - fabric_e_single_instance_application_not_found = "FABRIC_E_SINGLE_INSTANCE_APPLICATION_NOT_FOUND" - fabric_e_volume_already_exists = "FABRIC_E_VOLUME_ALREADY_EXISTS" - fabric_e_volume_not_found = "FABRIC_E_VOLUME_NOT_FOUND" - serialization_error = "SerializationError" - - class FabricEventKind(str, Enum): cluster_event = "ClusterEvent" @@ -287,64 +288,58 @@ class FabricEventKind(str, Enum): partition_analysis_event = "PartitionAnalysisEvent" application_created = "ApplicationCreated" application_deleted = "ApplicationDeleted" - application_health_report_created = "ApplicationHealthReportCreated" + application_new_health_report = "ApplicationNewHealthReport" application_health_report_expired = "ApplicationHealthReportExpired" - application_upgrade_complete = "ApplicationUpgradeComplete" - application_upgrade_domain_complete = "ApplicationUpgradeDomainComplete" - application_upgrade_rollback_complete = "ApplicationUpgradeRollbackComplete" - application_upgrade_rollback_start = "ApplicationUpgradeRollbackStart" - application_upgrade_start = "ApplicationUpgradeStart" - deployed_application_health_report_created = "DeployedApplicationHealthReportCreated" + application_upgrade_completed = "ApplicationUpgradeCompleted" + application_upgrade_domain_completed = "ApplicationUpgradeDomainCompleted" + application_upgrade_rollback_completed = "ApplicationUpgradeRollbackCompleted" + application_upgrade_rollback_started = "ApplicationUpgradeRollbackStarted" + application_upgrade_started = "ApplicationUpgradeStarted" + deployed_application_new_health_report = "DeployedApplicationNewHealthReport" deployed_application_health_report_expired = "DeployedApplicationHealthReportExpired" - process_deactivated = "ProcessDeactivated" - container_deactivated = "ContainerDeactivated" + application_process_exited = "ApplicationProcessExited" + application_container_instance_exited = "ApplicationContainerInstanceExited" node_aborted = "NodeAborted" - node_aborting = "NodeAborting" - node_added = "NodeAdded" - node_close = "NodeClose" - node_closing = "NodeClosing" - node_deactivate_complete = "NodeDeactivateComplete" - node_deactivate_start = "NodeDeactivateStart" + node_added_to_cluster = "NodeAddedToCluster" + node_closed = "NodeClosed" + node_deactivate_completed = "NodeDeactivateCompleted" + node_deactivate_started = "NodeDeactivateStarted" node_down = "NodeDown" - node_health_report_created = "NodeHealthReportCreated" + node_new_health_report = "NodeNewHealthReport" node_health_report_expired = "NodeHealthReportExpired" - node_opened_success = "NodeOpenedSuccess" + node_open_succeeded = "NodeOpenSucceeded" node_open_failed = "NodeOpenFailed" - node_opening = "NodeOpening" - node_removed = "NodeRemoved" + node_removed_from_cluster = "NodeRemovedFromCluster" node_up = "NodeUp" - partition_health_report_created = "PartitionHealthReportCreated" + partition_new_health_report = "PartitionNewHealthReport" partition_health_report_expired = "PartitionHealthReportExpired" - partition_reconfiguration_completed = "PartitionReconfigurationCompleted" + partition_reconfigured = "PartitionReconfigured" partition_primary_move_analysis = "PartitionPrimaryMoveAnalysis" service_created = "ServiceCreated" service_deleted = "ServiceDeleted" - service_health_report_created = "ServiceHealthReportCreated" + service_new_health_report = "ServiceNewHealthReport" service_health_report_expired = "ServiceHealthReportExpired" - deployed_service_health_report_created = "DeployedServiceHealthReportCreated" - deployed_service_health_report_expired = "DeployedServiceHealthReportExpired" - stateful_replica_health_report_created = "StatefulReplicaHealthReportCreated" + deployed_service_package_new_health_report = "DeployedServicePackageNewHealthReport" + deployed_service_package_health_report_expired = "DeployedServicePackageHealthReportExpired" + stateful_replica_new_health_report = "StatefulReplicaNewHealthReport" stateful_replica_health_report_expired = "StatefulReplicaHealthReportExpired" - stateless_replica_health_report_created = "StatelessReplicaHealthReportCreated" + stateless_replica_new_health_report = "StatelessReplicaNewHealthReport" stateless_replica_health_report_expired = "StatelessReplicaHealthReportExpired" - cluster_health_report_created = "ClusterHealthReportCreated" + cluster_new_health_report = "ClusterNewHealthReport" cluster_health_report_expired = "ClusterHealthReportExpired" - cluster_upgrade_complete = "ClusterUpgradeComplete" - cluster_upgrade_domain_complete = "ClusterUpgradeDomainComplete" - cluster_upgrade_rollback_complete = "ClusterUpgradeRollbackComplete" - cluster_upgrade_rollback_start = "ClusterUpgradeRollbackStart" - cluster_upgrade_start = "ClusterUpgradeStart" + cluster_upgrade_completed = "ClusterUpgradeCompleted" + cluster_upgrade_domain_completed = "ClusterUpgradeDomainCompleted" + cluster_upgrade_rollback_completed = "ClusterUpgradeRollbackCompleted" + cluster_upgrade_rollback_started = "ClusterUpgradeRollbackStarted" + cluster_upgrade_started = "ClusterUpgradeStarted" chaos_stopped = "ChaosStopped" chaos_started = "ChaosStarted" - chaos_restart_node_fault_completed = "ChaosRestartNodeFaultCompleted" - chaos_restart_code_package_fault_scheduled = "ChaosRestartCodePackageFaultScheduled" - chaos_restart_code_package_fault_completed = "ChaosRestartCodePackageFaultCompleted" - chaos_remove_replica_fault_scheduled = "ChaosRemoveReplicaFaultScheduled" - chaos_remove_replica_fault_completed = "ChaosRemoveReplicaFaultCompleted" - chaos_move_secondary_fault_scheduled = "ChaosMoveSecondaryFaultScheduled" - chaos_move_primary_fault_scheduled = "ChaosMovePrimaryFaultScheduled" - chaos_restart_replica_fault_scheduled = "ChaosRestartReplicaFaultScheduled" - chaos_restart_node_fault_scheduled = "ChaosRestartNodeFaultScheduled" + chaos_code_package_restart_scheduled = "ChaosCodePackageRestartScheduled" + chaos_replica_removal_scheduled = "ChaosReplicaRemovalScheduled" + chaos_partition_secondary_move_scheduled = "ChaosPartitionSecondaryMoveScheduled" + chaos_partition_primary_move_scheduled = "ChaosPartitionPrimaryMoveScheduled" + chaos_replica_restart_scheduled = "ChaosReplicaRestartScheduled" + chaos_node_restart_scheduled = "ChaosNodeRestartScheduled" class HealthEvaluationKind(str, Enum): @@ -731,6 +726,12 @@ class PropertyBatchInfoKind(str, Enum): failed = "Failed" #: The property batch failed. +class RetentionPolicyType(str, Enum): + + basic = "Basic" #: Indicates a basic retention policy type. + invalid = "Invalid" #: Indicates an invalid retention policy type. + + class BackupStorageKind(str, Enum): invalid = "Invalid" #: Indicates an invalid backup storage kind. All Service Fabric enumerations have the invalid type. @@ -880,24 +881,52 @@ class ScalingMechanismKind(str, Enum): add_remove_incremental_named_partition = "AddRemoveIncrementalNamedPartition" #: Indicates a mechanism for scaling where new named partitions are added or removed from a service. The value is 2. -class ServiceResourceStatus(str, Enum): +class ResourceStatus(str, Enum): - unknown = "Unknown" - active = "Active" - upgrading = "Upgrading" - deleting = "Deleting" - creating = "Creating" - failed = "Failed" + unknown = "Unknown" #: Indicates the resource status is unknown. The value is zero. + ready = "Ready" #: Indicates the resource is ready. The value is 1. + upgrading = "Upgrading" #: Indicates the resource is upgrading. The value is 2. + creating = "Creating" #: Indicates the resource is being created. The value is 3. + deleting = "Deleting" #: Indicates the resource is being deletd. The value is 4. + failed = "Failed" #: Indicates the resource is not functional due to persistent failures. See statusDetails property for more details. The value is 5. -class ApplicationResourceStatus(str, Enum): +class SecretKind(str, Enum): - invalid = "Invalid" - ready = "Ready" - upgrading = "Upgrading" - creating = "Creating" - deleting = "Deleting" - failed = "Failed" + inlined_value = "inlinedValue" #: A simple secret resource whose plaintext value is provided by the user. + + +class VolumeProvider(str, Enum): + + sf_azure_file = "SFAzureFile" #: Provides volumes that are backed by Azure Files. + + +class SizeTypes(str, Enum): + + small = "Small" + medium = "Medium" + large = "Large" + + +class ApplicationScopedVolumeKind(str, Enum): + + service_fabric_volume_disk = "ServiceFabricVolumeDisk" #: Provides Service Fabric High Availability Volume Disk + + +class NetworkKind(str, Enum): + + local = "Local" #: Indicates a container network local to a single Service Fabric cluster. The value is 1. + + +class HeaderMatchType(str, Enum): + + exact = "exact" + + +class OperatingSystemType(str, Enum): + + linux = "Linux" #: The required operating system is Linux. + windows = "Windows" #: The required operating system is Windows. class DiagnosticsSinkKind(str, Enum): @@ -906,10 +935,25 @@ class DiagnosticsSinkKind(str, Enum): azure_internal_monitoring_pipeline = "AzureInternalMonitoringPipeline" #: Diagnostics settings for Geneva. -class OperatingSystemTypes(str, Enum): +class AutoScalingMechanismKind(str, Enum): + + add_remove_replica = "AddRemoveReplica" #: Indicates that scaling should be performed by adding or removing replicas. + + +class AutoScalingMetricKind(str, Enum): + + resource = "Resource" #: Indicates that the metric is one of resources, like cpu or memory. + + +class AutoScalingResourceMetricName(str, Enum): + + cpu = "cpu" #: Indicates that the resource is CPU cores. + memory_in_gb = "memoryInGB" #: Indicates that the resource is memory in GB. + + +class AutoScalingTriggerKind(str, Enum): - linux = "Linux" - windows = "Windows" + average_load = "AverageLoad" #: Indicates that scaling should be performed based on average load of all replicas in the service. class NodeStatusFilter(str, Enum): diff --git a/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event.py index b5a9c3ac9a60..708f5214bd24 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event.py @@ -20,6 +20,8 @@ class ServiceHealthReportExpiredEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ServiceHealthReportExpiredEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event_py3.py index 72911d581f3e..bdde10d7004b 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class ServiceHealthReportExpiredEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ServiceHealthReportExpiredEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -89,8 +92,8 @@ class ServiceHealthReportExpiredEvent(ServiceEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ServiceHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ServiceHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) self.instance_id = instance_id self.source_id = source_id self.property = property diff --git a/azure-servicefabric/azure/servicefabric/models/service_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/service_new_health_report_event.py similarity index 94% rename from azure-servicefabric/azure/servicefabric/models/service_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/service_new_health_report_event.py index 807e50a197cc..138f18a15dcd 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/service_new_health_report_event.py @@ -12,7 +12,7 @@ from .service_event import ServiceEvent -class ServiceHealthReportCreatedEvent(ServiceEvent): +class ServiceNewHealthReportEvent(ServiceEvent): """Service Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ServiceHealthReportCreatedEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ServiceHealthReportCreatedEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -90,7 +93,7 @@ class ServiceHealthReportCreatedEvent(ServiceEvent): } def __init__(self, **kwargs): - super(ServiceHealthReportCreatedEvent, self).__init__(**kwargs) + super(ServiceNewHealthReportEvent, self).__init__(**kwargs) self.instance_id = kwargs.get('instance_id', None) self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) @@ -100,4 +103,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'ServiceHealthReportCreated' + self.kind = 'ServiceNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/service_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/service_new_health_report_event_py3.py similarity index 90% rename from azure-servicefabric/azure/servicefabric/models/service_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/service_new_health_report_event_py3.py index bceb16a455fc..b1e2d6bf968f 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .service_event_py3 import ServiceEvent -class ServiceHealthReportCreatedEvent(ServiceEvent): +class ServiceNewHealthReportEvent(ServiceEvent): """Service Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class ServiceHealthReportCreatedEvent(ServiceEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -74,6 +76,7 @@ class ServiceHealthReportCreatedEvent(ServiceEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -89,8 +92,8 @@ class ServiceHealthReportCreatedEvent(ServiceEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(ServiceHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(ServiceNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, service_id=service_id, **kwargs) self.instance_id = instance_id self.source_id = source_id self.property = property @@ -100,4 +103,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, service_id: str, insta self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'ServiceHealthReportCreated' + self.kind = 'ServiceNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/service_properties.py b/azure-servicefabric/azure/servicefabric/models/service_properties.py new file mode 100644 index 000000000000..f02c37f6ce98 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/service_properties.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ServiceProperties(Model): + """Describes properties of a service resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param description: User readable description of the service. + :type description: str + :param replica_count: The number of replicas of the service to create. + Defaults to 1 if not specified. + :type replica_count: int + :param auto_scaling_policies: Auto scaling policies + :type auto_scaling_policies: + list[~azure.servicefabric.models.AutoScalingPolicy] + :ivar status: Status of the service. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the service. + :vartype status_details: str + :ivar health_state: Describes the health state of an application resource. + Possible values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' + :vartype health_state: str or ~azure.servicefabric.models.HealthState + :ivar unhealthy_evaluation: When the service's health state is not 'Ok', + this additional details from service fabric Health Manager for the user to + know why the service is marked unhealthy. + :vartype unhealthy_evaluation: str + """ + + _validation = { + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + 'health_state': {'readonly': True}, + 'unhealthy_evaluation': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'replica_count': {'key': 'replicaCount', 'type': 'int'}, + 'auto_scaling_policies': {'key': 'autoScalingPolicies', 'type': '[AutoScalingPolicy]'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'health_state': {'key': 'healthState', 'type': 'str'}, + 'unhealthy_evaluation': {'key': 'unhealthyEvaluation', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ServiceProperties, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.replica_count = kwargs.get('replica_count', None) + self.auto_scaling_policies = kwargs.get('auto_scaling_policies', None) + self.status = None + self.status_details = None + self.health_state = None + self.unhealthy_evaluation = None diff --git a/azure-servicefabric/azure/servicefabric/models/service_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/service_properties_py3.py new file mode 100644 index 000000000000..6347f98d4fa5 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/service_properties_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ServiceProperties(Model): + """Describes properties of a service resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param description: User readable description of the service. + :type description: str + :param replica_count: The number of replicas of the service to create. + Defaults to 1 if not specified. + :type replica_count: int + :param auto_scaling_policies: Auto scaling policies + :type auto_scaling_policies: + list[~azure.servicefabric.models.AutoScalingPolicy] + :ivar status: Status of the service. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the service. + :vartype status_details: str + :ivar health_state: Describes the health state of an application resource. + Possible values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' + :vartype health_state: str or ~azure.servicefabric.models.HealthState + :ivar unhealthy_evaluation: When the service's health state is not 'Ok', + this additional details from service fabric Health Manager for the user to + know why the service is marked unhealthy. + :vartype unhealthy_evaluation: str + """ + + _validation = { + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + 'health_state': {'readonly': True}, + 'unhealthy_evaluation': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'replica_count': {'key': 'replicaCount', 'type': 'int'}, + 'auto_scaling_policies': {'key': 'autoScalingPolicies', 'type': '[AutoScalingPolicy]'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_details': {'key': 'statusDetails', 'type': 'str'}, + 'health_state': {'key': 'healthState', 'type': 'str'}, + 'unhealthy_evaluation': {'key': 'unhealthyEvaluation', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, replica_count: int=None, auto_scaling_policies=None, **kwargs) -> None: + super(ServiceProperties, self).__init__(**kwargs) + self.description = description + self.replica_count = replica_count + self.auto_scaling_policies = auto_scaling_policies + self.status = None + self.status_details = None + self.health_state = None + self.unhealthy_evaluation = None diff --git a/azure-servicefabric/azure/servicefabric/models/service_resource_replica_description.py b/azure-servicefabric/azure/servicefabric/models/service_replica_description.py similarity index 88% rename from azure-servicefabric/azure/servicefabric/models/service_resource_replica_description.py rename to azure-servicefabric/azure/servicefabric/models/service_replica_description.py index 68fd31343c39..5477382dfc83 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_resource_replica_description.py +++ b/azure-servicefabric/azure/servicefabric/models/service_replica_description.py @@ -12,14 +12,14 @@ from .service_replica_properties import ServiceReplicaProperties -class ServiceResourceReplicaDescription(ServiceReplicaProperties): +class ServiceReplicaDescription(ServiceReplicaProperties): """Describes a replica of a service resource. All required parameters must be populated in order to send to Azure. - :param os_type: Required. The Operating system type required by the code - in service. Possible values include: 'Linux', 'Windows' - :type os_type: str or ~azure.servicefabric.models.OperatingSystemTypes + :param os_type: Required. The operation system required by the code in + service. Possible values include: 'Linux', 'Windows' + :type os_type: str or ~azure.servicefabric.models.OperatingSystemType :param code_packages: Required. Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on @@ -50,5 +50,5 @@ class ServiceResourceReplicaDescription(ServiceReplicaProperties): } def __init__(self, **kwargs): - super(ServiceResourceReplicaDescription, self).__init__(**kwargs) + super(ServiceReplicaDescription, self).__init__(**kwargs) self.replica_name = kwargs.get('replica_name', None) diff --git a/azure-servicefabric/azure/servicefabric/models/service_resource_replica_description_py3.py b/azure-servicefabric/azure/servicefabric/models/service_replica_description_py3.py similarity index 85% rename from azure-servicefabric/azure/servicefabric/models/service_resource_replica_description_py3.py rename to azure-servicefabric/azure/servicefabric/models/service_replica_description_py3.py index 4fba0c372e90..578785768ed8 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_resource_replica_description_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_replica_description_py3.py @@ -12,14 +12,14 @@ from .service_replica_properties_py3 import ServiceReplicaProperties -class ServiceResourceReplicaDescription(ServiceReplicaProperties): +class ServiceReplicaDescription(ServiceReplicaProperties): """Describes a replica of a service resource. All required parameters must be populated in order to send to Azure. - :param os_type: Required. The Operating system type required by the code - in service. Possible values include: 'Linux', 'Windows' - :type os_type: str or ~azure.servicefabric.models.OperatingSystemTypes + :param os_type: Required. The operation system required by the code in + service. Possible values include: 'Linux', 'Windows' + :type os_type: str or ~azure.servicefabric.models.OperatingSystemType :param code_packages: Required. Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on @@ -50,5 +50,5 @@ class ServiceResourceReplicaDescription(ServiceReplicaProperties): } def __init__(self, *, os_type, code_packages, replica_name: str, network_refs=None, diagnostics=None, **kwargs) -> None: - super(ServiceResourceReplicaDescription, self).__init__(os_type=os_type, code_packages=code_packages, network_refs=network_refs, diagnostics=diagnostics, **kwargs) + super(ServiceReplicaDescription, self).__init__(os_type=os_type, code_packages=code_packages, network_refs=network_refs, diagnostics=diagnostics, **kwargs) self.replica_name = replica_name diff --git a/azure-servicefabric/azure/servicefabric/models/service_replica_properties.py b/azure-servicefabric/azure/servicefabric/models/service_replica_properties.py index 3c1c98849c95..690a2354d1dd 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_replica_properties.py +++ b/azure-servicefabric/azure/servicefabric/models/service_replica_properties.py @@ -17,9 +17,9 @@ class ServiceReplicaProperties(Model): All required parameters must be populated in order to send to Azure. - :param os_type: Required. The Operating system type required by the code - in service. Possible values include: 'Linux', 'Windows' - :type os_type: str or ~azure.servicefabric.models.OperatingSystemTypes + :param os_type: Required. The operation system required by the code in + service. Possible values include: 'Linux', 'Windows' + :type os_type: str or ~azure.servicefabric.models.OperatingSystemType :param code_packages: Required. Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on diff --git a/azure-servicefabric/azure/servicefabric/models/service_replica_properties_py3.py b/azure-servicefabric/azure/servicefabric/models/service_replica_properties_py3.py index 4b9efd620beb..0bbecea68bde 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_replica_properties_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_replica_properties_py3.py @@ -17,9 +17,9 @@ class ServiceReplicaProperties(Model): All required parameters must be populated in order to send to Azure. - :param os_type: Required. The Operating system type required by the code - in service. Possible values include: 'Linux', 'Windows' - :type os_type: str or ~azure.servicefabric.models.OperatingSystemTypes + :param os_type: Required. The operation system required by the code in + service. Possible values include: 'Linux', 'Windows' + :type os_type: str or ~azure.servicefabric.models.OperatingSystemType :param code_packages: Required. Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on diff --git a/azure-servicefabric/azure/servicefabric/models/service_resource_description.py b/azure-servicefabric/azure/servicefabric/models/service_resource_description.py index c49731207d34..8d64db0f84ff 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_resource_description.py +++ b/azure-servicefabric/azure/servicefabric/models/service_resource_description.py @@ -13,16 +13,18 @@ class ServiceResourceDescription(Model): - """Describes a service fabric service resource. + """This type describes a service resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param os_type: Required. The Operating system type required by the code - in service. Possible values include: 'Linux', 'Windows' - :type os_type: str or ~azure.servicefabric.models.OperatingSystemTypes + :param name: Required. Name of the Service resource. + :type name: str + :param os_type: Required. The operation system required by the code in + service. Possible values include: 'Linux', 'Windows' + :type os_type: str or ~azure.servicefabric.models.OperatingSystemType :param code_packages: Required. Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on @@ -39,45 +41,60 @@ class ServiceResourceDescription(Model): :param replica_count: The number of replicas of the service to create. Defaults to 1 if not specified. :type replica_count: int - :param health_state: The health state of a Service Fabric entity such as - Cluster, Node, Application, Service, Partition, Replica etc. Possible - values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' - :type health_state: str or ~azure.servicefabric.models.HealthState - :ivar status: Represents the status of the service. Possible values - include: 'Unknown', 'Active', 'Upgrading', 'Deleting', 'Creating', - 'Failed' - :vartype status: str or ~azure.servicefabric.models.ServiceResourceStatus - :param name: Required. Service resource name. - :type name: str + :param auto_scaling_policies: Auto scaling policies + :type auto_scaling_policies: + list[~azure.servicefabric.models.AutoScalingPolicy] + :ivar status: Status of the service. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the service. + :vartype status_details: str + :ivar health_state: Describes the health state of an application resource. + Possible values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' + :vartype health_state: str or ~azure.servicefabric.models.HealthState + :ivar unhealthy_evaluation: When the service's health state is not 'Ok', + this additional details from service fabric Health Manager for the user to + know why the service is marked unhealthy. + :vartype unhealthy_evaluation: str """ _validation = { + 'name': {'required': True}, 'os_type': {'required': True}, 'code_packages': {'required': True}, 'status': {'readonly': True}, - 'name': {'required': True}, + 'status_details': {'readonly': True}, + 'health_state': {'readonly': True}, + 'unhealthy_evaluation': {'readonly': True}, } _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'os_type': {'key': 'properties.osType', 'type': 'str'}, 'code_packages': {'key': 'properties.codePackages', 'type': '[ContainerCodePackageProperties]'}, 'network_refs': {'key': 'properties.networkRefs', 'type': '[NetworkRef]'}, 'diagnostics': {'key': 'properties.diagnostics', 'type': 'DiagnosticsRef'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'replica_count': {'key': 'properties.replicaCount', 'type': 'int'}, - 'health_state': {'key': 'properties.healthState', 'type': 'str'}, + 'auto_scaling_policies': {'key': 'properties.autoScalingPolicies', 'type': '[AutoScalingPolicy]'}, 'status': {'key': 'properties.status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, + 'health_state': {'key': 'properties.healthState', 'type': 'str'}, + 'unhealthy_evaluation': {'key': 'properties.unhealthyEvaluation', 'type': 'str'}, } def __init__(self, **kwargs): super(ServiceResourceDescription, self).__init__(**kwargs) + self.name = kwargs.get('name', None) self.os_type = kwargs.get('os_type', None) self.code_packages = kwargs.get('code_packages', None) self.network_refs = kwargs.get('network_refs', None) self.diagnostics = kwargs.get('diagnostics', None) self.description = kwargs.get('description', None) self.replica_count = kwargs.get('replica_count', None) - self.health_state = kwargs.get('health_state', None) + self.auto_scaling_policies = kwargs.get('auto_scaling_policies', None) self.status = None - self.name = kwargs.get('name', None) + self.status_details = None + self.health_state = None + self.unhealthy_evaluation = None diff --git a/azure-servicefabric/azure/servicefabric/models/service_resource_description_py3.py b/azure-servicefabric/azure/servicefabric/models/service_resource_description_py3.py index 9c2a47b04516..5d19458c700c 100644 --- a/azure-servicefabric/azure/servicefabric/models/service_resource_description_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/service_resource_description_py3.py @@ -13,16 +13,18 @@ class ServiceResourceDescription(Model): - """Describes a service fabric service resource. + """This type describes a service resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param os_type: Required. The Operating system type required by the code - in service. Possible values include: 'Linux', 'Windows' - :type os_type: str or ~azure.servicefabric.models.OperatingSystemTypes + :param name: Required. Name of the Service resource. + :type name: str + :param os_type: Required. The operation system required by the code in + service. Possible values include: 'Linux', 'Windows' + :type os_type: str or ~azure.servicefabric.models.OperatingSystemType :param code_packages: Required. Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on @@ -39,45 +41,60 @@ class ServiceResourceDescription(Model): :param replica_count: The number of replicas of the service to create. Defaults to 1 if not specified. :type replica_count: int - :param health_state: The health state of a Service Fabric entity such as - Cluster, Node, Application, Service, Partition, Replica etc. Possible - values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' - :type health_state: str or ~azure.servicefabric.models.HealthState - :ivar status: Represents the status of the service. Possible values - include: 'Unknown', 'Active', 'Upgrading', 'Deleting', 'Creating', - 'Failed' - :vartype status: str or ~azure.servicefabric.models.ServiceResourceStatus - :param name: Required. Service resource name. - :type name: str + :param auto_scaling_policies: Auto scaling policies + :type auto_scaling_policies: + list[~azure.servicefabric.models.AutoScalingPolicy] + :ivar status: Status of the service. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the service. + :vartype status_details: str + :ivar health_state: Describes the health state of an application resource. + Possible values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown' + :vartype health_state: str or ~azure.servicefabric.models.HealthState + :ivar unhealthy_evaluation: When the service's health state is not 'Ok', + this additional details from service fabric Health Manager for the user to + know why the service is marked unhealthy. + :vartype unhealthy_evaluation: str """ _validation = { + 'name': {'required': True}, 'os_type': {'required': True}, 'code_packages': {'required': True}, 'status': {'readonly': True}, - 'name': {'required': True}, + 'status_details': {'readonly': True}, + 'health_state': {'readonly': True}, + 'unhealthy_evaluation': {'readonly': True}, } _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'os_type': {'key': 'properties.osType', 'type': 'str'}, 'code_packages': {'key': 'properties.codePackages', 'type': '[ContainerCodePackageProperties]'}, 'network_refs': {'key': 'properties.networkRefs', 'type': '[NetworkRef]'}, 'diagnostics': {'key': 'properties.diagnostics', 'type': 'DiagnosticsRef'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'replica_count': {'key': 'properties.replicaCount', 'type': 'int'}, - 'health_state': {'key': 'properties.healthState', 'type': 'str'}, + 'auto_scaling_policies': {'key': 'properties.autoScalingPolicies', 'type': '[AutoScalingPolicy]'}, 'status': {'key': 'properties.status', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, + 'health_state': {'key': 'properties.healthState', 'type': 'str'}, + 'unhealthy_evaluation': {'key': 'properties.unhealthyEvaluation', 'type': 'str'}, } - def __init__(self, *, os_type, code_packages, name: str, network_refs=None, diagnostics=None, description: str=None, replica_count: int=None, health_state=None, **kwargs) -> None: + def __init__(self, *, name: str, os_type, code_packages, network_refs=None, diagnostics=None, description: str=None, replica_count: int=None, auto_scaling_policies=None, **kwargs) -> None: super(ServiceResourceDescription, self).__init__(**kwargs) + self.name = name self.os_type = os_type self.code_packages = code_packages self.network_refs = network_refs self.diagnostics = diagnostics self.description = description self.replica_count = replica_count - self.health_state = health_state + self.auto_scaling_policies = auto_scaling_policies self.status = None - self.name = name + self.status_details = None + self.health_state = None + self.unhealthy_evaluation = None diff --git a/azure-servicefabric/azure/servicefabric/models/setting.py b/azure-servicefabric/azure/servicefabric/models/setting.py index 23fc418e0bec..5e776e08ed1a 100644 --- a/azure-servicefabric/azure/servicefabric/models/setting.py +++ b/azure-servicefabric/azure/servicefabric/models/setting.py @@ -13,7 +13,9 @@ class Setting(Model): - """Describes a setting for the container. + """Describes a setting for the container. The setting file path can be fetched + from environment variable "Fabric_SettingPath". The path for Windows + container is "C:\\secrets". The path for Linux container is "/var/secrets". :param name: The name of the setting. :type name: str diff --git a/azure-servicefabric/azure/servicefabric/models/setting_py3.py b/azure-servicefabric/azure/servicefabric/models/setting_py3.py index 1a042b3d640e..493e42d157a2 100644 --- a/azure-servicefabric/azure/servicefabric/models/setting_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/setting_py3.py @@ -13,7 +13,9 @@ class Setting(Model): - """Describes a setting for the container. + """Describes a setting for the container. The setting file path can be fetched + from environment variable "Fabric_SettingPath". The path for Windows + container is "C:\\secrets". The path for Linux container is "/var/secrets". :param name: The name of the setting. :type name: str diff --git a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event.py index 878196c567d3..04171729ff71 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event.py @@ -20,6 +20,8 @@ class StatefulReplicaHealthReportExpiredEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -81,6 +83,7 @@ class StatefulReplicaHealthReportExpiredEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event_py3.py index cb99daf0292c..e58806878ec3 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class StatefulReplicaHealthReportExpiredEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -81,6 +83,7 @@ class StatefulReplicaHealthReportExpiredEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -97,8 +100,8 @@ class StatefulReplicaHealthReportExpiredEvent(ReplicaEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, replica_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(StatefulReplicaHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, replica_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(StatefulReplicaHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) self.replica_instance_id = replica_instance_id self.source_id = source_id self.property = property diff --git a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/stateful_replica_new_health_report_event.py similarity index 94% rename from azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/stateful_replica_new_health_report_event.py index 501b87d77c5a..ac36335b134c 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/stateful_replica_new_health_report_event.py @@ -12,7 +12,7 @@ from .replica_event import ReplicaEvent -class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): +class StatefulReplicaNewHealthReportEvent(ReplicaEvent): """Stateful Replica Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -81,6 +83,7 @@ class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -98,7 +101,7 @@ class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): } def __init__(self, **kwargs): - super(StatefulReplicaHealthReportCreatedEvent, self).__init__(**kwargs) + super(StatefulReplicaNewHealthReportEvent, self).__init__(**kwargs) self.replica_instance_id = kwargs.get('replica_instance_id', None) self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) @@ -108,4 +111,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'StatefulReplicaHealthReportCreated' + self.kind = 'StatefulReplicaNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/stateful_replica_new_health_report_event_py3.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/stateful_replica_new_health_report_event_py3.py index e0d82f82f201..0ce0b6db0514 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateful_replica_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/stateful_replica_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .replica_event_py3 import ReplicaEvent -class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): +class StatefulReplicaNewHealthReportEvent(ReplicaEvent): """Stateful Replica Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -81,6 +83,7 @@ class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -97,8 +100,8 @@ class StatefulReplicaHealthReportCreatedEvent(ReplicaEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, replica_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(StatefulReplicaHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, replica_instance_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(StatefulReplicaNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) self.replica_instance_id = replica_instance_id self.source_id = source_id self.property = property @@ -108,4 +111,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, rep self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'StatefulReplicaHealthReportCreated' + self.kind = 'StatefulReplicaNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info.py b/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info.py index 0041d85bdf40..8ee366d7c230 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info.py +++ b/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info.py @@ -41,12 +41,12 @@ class StatefulServicePartitionInfo(ServicePartitionInfo): returns the duration since it has been in that state. This field is using ISO8601 format for specifying the duration. :type last_quorum_loss_duration: timedelta - :param current_configuration_epoch: An Epoch is a configuration number for - the partition as a whole. When the configuration of the replica set - changes, for example when the Primary replica changes, the operations that - are replicated from the new Primary replica are said to be a new Epoch - from the ones which were sent by the old Primary replica. - :type current_configuration_epoch: ~azure.servicefabric.models.Epoch + :param primary_epoch: An Epoch is a configuration number for the partition + as a whole. When the configuration of the replica set changes, for example + when the Primary replica changes, the operations that are replicated from + the new Primary replica are said to be a new Epoch from the ones which + were sent by the old Primary replica. + :type primary_epoch: ~azure.servicefabric.models.Epoch """ _validation = { @@ -61,7 +61,7 @@ class StatefulServicePartitionInfo(ServicePartitionInfo): 'target_replica_set_size': {'key': 'TargetReplicaSetSize', 'type': 'long'}, 'min_replica_set_size': {'key': 'MinReplicaSetSize', 'type': 'long'}, 'last_quorum_loss_duration': {'key': 'LastQuorumLossDuration', 'type': 'duration'}, - 'current_configuration_epoch': {'key': 'CurrentConfigurationEpoch', 'type': 'Epoch'}, + 'primary_epoch': {'key': 'PrimaryEpoch', 'type': 'Epoch'}, } def __init__(self, **kwargs): @@ -69,5 +69,5 @@ def __init__(self, **kwargs): self.target_replica_set_size = kwargs.get('target_replica_set_size', None) self.min_replica_set_size = kwargs.get('min_replica_set_size', None) self.last_quorum_loss_duration = kwargs.get('last_quorum_loss_duration', None) - self.current_configuration_epoch = kwargs.get('current_configuration_epoch', None) + self.primary_epoch = kwargs.get('primary_epoch', None) self.service_kind = 'Stateful' diff --git a/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info_py3.py b/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info_py3.py index a22903d89b97..326d0db72e1e 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/stateful_service_partition_info_py3.py @@ -41,12 +41,12 @@ class StatefulServicePartitionInfo(ServicePartitionInfo): returns the duration since it has been in that state. This field is using ISO8601 format for specifying the duration. :type last_quorum_loss_duration: timedelta - :param current_configuration_epoch: An Epoch is a configuration number for - the partition as a whole. When the configuration of the replica set - changes, for example when the Primary replica changes, the operations that - are replicated from the new Primary replica are said to be a new Epoch - from the ones which were sent by the old Primary replica. - :type current_configuration_epoch: ~azure.servicefabric.models.Epoch + :param primary_epoch: An Epoch is a configuration number for the partition + as a whole. When the configuration of the replica set changes, for example + when the Primary replica changes, the operations that are replicated from + the new Primary replica are said to be a new Epoch from the ones which + were sent by the old Primary replica. + :type primary_epoch: ~azure.servicefabric.models.Epoch """ _validation = { @@ -61,13 +61,13 @@ class StatefulServicePartitionInfo(ServicePartitionInfo): 'target_replica_set_size': {'key': 'TargetReplicaSetSize', 'type': 'long'}, 'min_replica_set_size': {'key': 'MinReplicaSetSize', 'type': 'long'}, 'last_quorum_loss_duration': {'key': 'LastQuorumLossDuration', 'type': 'duration'}, - 'current_configuration_epoch': {'key': 'CurrentConfigurationEpoch', 'type': 'Epoch'}, + 'primary_epoch': {'key': 'PrimaryEpoch', 'type': 'Epoch'}, } - def __init__(self, *, health_state=None, partition_status=None, partition_information=None, target_replica_set_size: int=None, min_replica_set_size: int=None, last_quorum_loss_duration=None, current_configuration_epoch=None, **kwargs) -> None: + def __init__(self, *, health_state=None, partition_status=None, partition_information=None, target_replica_set_size: int=None, min_replica_set_size: int=None, last_quorum_loss_duration=None, primary_epoch=None, **kwargs) -> None: super(StatefulServicePartitionInfo, self).__init__(health_state=health_state, partition_status=partition_status, partition_information=partition_information, **kwargs) self.target_replica_set_size = target_replica_set_size self.min_replica_set_size = min_replica_set_size self.last_quorum_loss_duration = last_quorum_loss_duration - self.current_configuration_epoch = current_configuration_epoch + self.primary_epoch = primary_epoch self.service_kind = 'Stateful' diff --git a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event.py b/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event.py index 3645a8dc926e..13e3fc19e88b 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event.py +++ b/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event.py @@ -20,6 +20,8 @@ class StatelessReplicaHealthReportExpiredEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -78,6 +80,7 @@ class StatelessReplicaHealthReportExpiredEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, diff --git a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event_py3.py b/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event_py3.py index e8713deedc05..dc0c92632f36 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_expired_event_py3.py @@ -20,6 +20,8 @@ class StatelessReplicaHealthReportExpiredEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -78,6 +80,7 @@ class StatelessReplicaHealthReportExpiredEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -93,8 +96,8 @@ class StatelessReplicaHealthReportExpiredEvent(ReplicaEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(StatelessReplicaHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(StatelessReplicaHealthReportExpiredEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) self.source_id = source_id self.property = property self.health_state = health_state diff --git a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_created_event.py b/azure-servicefabric/azure/servicefabric/models/stateless_replica_new_health_report_event.py similarity index 93% rename from azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_created_event.py rename to azure-servicefabric/azure/servicefabric/models/stateless_replica_new_health_report_event.py index a63f7c9da3a7..7f2c8f7f488c 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_created_event.py +++ b/azure-servicefabric/azure/servicefabric/models/stateless_replica_new_health_report_event.py @@ -12,7 +12,7 @@ from .replica_event import ReplicaEvent -class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): +class StatelessReplicaNewHealthReportEvent(ReplicaEvent): """Stateless Replica Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -78,6 +80,7 @@ class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -94,7 +97,7 @@ class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): } def __init__(self, **kwargs): - super(StatelessReplicaHealthReportCreatedEvent, self).__init__(**kwargs) + super(StatelessReplicaNewHealthReportEvent, self).__init__(**kwargs) self.source_id = kwargs.get('source_id', None) self.property = kwargs.get('property', None) self.health_state = kwargs.get('health_state', None) @@ -103,4 +106,4 @@ def __init__(self, **kwargs): self.description = kwargs.get('description', None) self.remove_when_expired = kwargs.get('remove_when_expired', None) self.source_utc_timestamp = kwargs.get('source_utc_timestamp', None) - self.kind = 'StatelessReplicaHealthReportCreated' + self.kind = 'StatelessReplicaNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_created_event_py3.py b/azure-servicefabric/azure/servicefabric/models/stateless_replica_new_health_report_event_py3.py similarity index 89% rename from azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_created_event_py3.py rename to azure-servicefabric/azure/servicefabric/models/stateless_replica_new_health_report_event_py3.py index 9f48a59b2b3e..fbaeeb9a0d8e 100644 --- a/azure-servicefabric/azure/servicefabric/models/stateless_replica_health_report_created_event_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/stateless_replica_new_health_report_event_py3.py @@ -12,7 +12,7 @@ from .replica_event_py3 import ReplicaEvent -class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): +class StatelessReplicaNewHealthReportEvent(ReplicaEvent): """Stateless Replica Health Report Created event. All required parameters must be populated in order to send to Azure. @@ -20,6 +20,8 @@ class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): :param event_instance_id: Required. The identifier for the FabricEvent instance. :type event_instance_id: str + :param category: The category of event. + :type category: str :param time_stamp: Required. The time event was logged. :type time_stamp: datetime :param has_correlated_events: Shows there is existing related events @@ -78,6 +80,7 @@ class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): _attribute_map = { 'event_instance_id': {'key': 'EventInstanceId', 'type': 'str'}, + 'category': {'key': 'Category', 'type': 'str'}, 'time_stamp': {'key': 'TimeStamp', 'type': 'iso-8601'}, 'has_correlated_events': {'key': 'HasCorrelatedEvents', 'type': 'bool'}, 'kind': {'key': 'Kind', 'type': 'str'}, @@ -93,8 +96,8 @@ class StatelessReplicaHealthReportCreatedEvent(ReplicaEvent): 'source_utc_timestamp': {'key': 'SourceUtcTimestamp', 'type': 'iso-8601'}, } - def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, has_correlated_events: bool=None, **kwargs) -> None: - super(StatelessReplicaHealthReportCreatedEvent, self).__init__(event_instance_id=event_instance_id, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) + def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, replica_id: int, source_id: str, property: str, health_state: str, time_to_live_ms: int, sequence_number: int, description: str, remove_when_expired: bool, source_utc_timestamp, category: str=None, has_correlated_events: bool=None, **kwargs) -> None: + super(StatelessReplicaNewHealthReportEvent, self).__init__(event_instance_id=event_instance_id, category=category, time_stamp=time_stamp, has_correlated_events=has_correlated_events, partition_id=partition_id, replica_id=replica_id, **kwargs) self.source_id = source_id self.property = property self.health_state = health_state @@ -103,4 +106,4 @@ def __init__(self, *, event_instance_id: str, time_stamp, partition_id: str, rep self.description = description self.remove_when_expired = remove_when_expired self.source_utc_timestamp = source_utc_timestamp - self.kind = 'StatelessReplicaHealthReportCreated' + self.kind = 'StatelessReplicaNewHealthReport' diff --git a/azure-servicefabric/azure/servicefabric/models/tcp_config.py b/azure-servicefabric/azure/servicefabric/models/tcp_config.py new file mode 100644 index 000000000000..96401697877e --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/tcp_config.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TcpConfig(Model): + """Describes the tcp configuration for external connectivity for this network. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. tcp gateway config name. + :type name: str + :param port: Required. Specifies the port at which the service endpoint + below needs to be exposed. + :type port: int + :param destination: Required. Describes destination endpoint for routing + traffic. + :type destination: ~azure.servicefabric.models.GatewayDestination + """ + + _validation = { + 'name': {'required': True}, + 'port': {'required': True}, + 'destination': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'destination': {'key': 'destination', 'type': 'GatewayDestination'}, + } + + def __init__(self, **kwargs): + super(TcpConfig, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.port = kwargs.get('port', None) + self.destination = kwargs.get('destination', None) diff --git a/azure-servicefabric/azure/servicefabric/models/tcp_config_py3.py b/azure-servicefabric/azure/servicefabric/models/tcp_config_py3.py new file mode 100644 index 000000000000..7266484be6c5 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/models/tcp_config_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TcpConfig(Model): + """Describes the tcp configuration for external connectivity for this network. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. tcp gateway config name. + :type name: str + :param port: Required. Specifies the port at which the service endpoint + below needs to be exposed. + :type port: int + :param destination: Required. Describes destination endpoint for routing + traffic. + :type destination: ~azure.servicefabric.models.GatewayDestination + """ + + _validation = { + 'name': {'required': True}, + 'port': {'required': True}, + 'destination': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'destination': {'key': 'destination', 'type': 'GatewayDestination'}, + } + + def __init__(self, *, name: str, port: int, destination, **kwargs) -> None: + super(TcpConfig, self).__init__(**kwargs) + self.name = name + self.port = port + self.destination = destination diff --git a/azure-servicefabric/azure/servicefabric/models/container_volume.py b/azure-servicefabric/azure/servicefabric/models/volume_reference.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/container_volume.py rename to azure-servicefabric/azure/servicefabric/models/volume_reference.py index 47323174239c..5896c0e87923 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_volume.py +++ b/azure-servicefabric/azure/servicefabric/models/volume_reference.py @@ -12,12 +12,12 @@ from msrest.serialization import Model -class ContainerVolume(Model): - """Describes how a volume is attached to a container. +class VolumeReference(Model): + """Describes a reference to a volume resource. All required parameters must be populated in order to send to Azure. - :param name: Required. Name of the volume. + :param name: Required. Name of the volume being referenced. :type name: str :param read_only: The flag indicating whether the volume is read only. Default is 'false'. @@ -39,7 +39,7 @@ class ContainerVolume(Model): } def __init__(self, **kwargs): - super(ContainerVolume, self).__init__(**kwargs) + super(VolumeReference, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.read_only = kwargs.get('read_only', None) self.destination_path = kwargs.get('destination_path', None) diff --git a/azure-servicefabric/azure/servicefabric/models/container_volume_py3.py b/azure-servicefabric/azure/servicefabric/models/volume_reference_py3.py similarity index 87% rename from azure-servicefabric/azure/servicefabric/models/container_volume_py3.py rename to azure-servicefabric/azure/servicefabric/models/volume_reference_py3.py index 0ff73ba3b086..78adefae1e39 100644 --- a/azure-servicefabric/azure/servicefabric/models/container_volume_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/volume_reference_py3.py @@ -12,12 +12,12 @@ from msrest.serialization import Model -class ContainerVolume(Model): - """Describes how a volume is attached to a container. +class VolumeReference(Model): + """Describes a reference to a volume resource. All required parameters must be populated in order to send to Azure. - :param name: Required. Name of the volume. + :param name: Required. Name of the volume being referenced. :type name: str :param read_only: The flag indicating whether the volume is read only. Default is 'false'. @@ -39,7 +39,7 @@ class ContainerVolume(Model): } def __init__(self, *, name: str, destination_path: str, read_only: bool=None, **kwargs) -> None: - super(ContainerVolume, self).__init__(**kwargs) + super(VolumeReference, self).__init__(**kwargs) self.name = name self.read_only = read_only self.destination_path = destination_path diff --git a/azure-servicefabric/azure/servicefabric/models/volume_resource_description.py b/azure-servicefabric/azure/servicefabric/models/volume_resource_description.py index 1128c2c47240..6d7b4180b2fe 100644 --- a/azure-servicefabric/azure/servicefabric/models/volume_resource_description.py +++ b/azure-servicefabric/azure/servicefabric/models/volume_resource_description.py @@ -13,15 +13,23 @@ class VolumeResourceDescription(Model): - """Describes a service fabric volume resource. + """This type describes a volume resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. + :param name: Required. Name of the Volume resource. + :type name: str :param description: User readable description of the volume. :type description: str + :ivar status: Status of the volume. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the volume. + :vartype status_details: str :ivar provider: Required. Provider of the volume. Default value: "SFAzureFile" . :vartype provider: str @@ -29,26 +37,30 @@ class VolumeResourceDescription(Model): Azure Files file share. :type azure_file_parameters: ~azure.servicefabric.models.VolumeProviderParametersAzureFile - :param name: Required. Volume resource name. - :type name: str """ _validation = { - 'provider': {'required': True, 'constant': True}, 'name': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + 'provider': {'required': True, 'constant': True}, } _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, 'provider': {'key': 'properties.provider', 'type': 'str'}, 'azure_file_parameters': {'key': 'properties.azureFileParameters', 'type': 'VolumeProviderParametersAzureFile'}, - 'name': {'key': 'name', 'type': 'str'}, } provider = "SFAzureFile" def __init__(self, **kwargs): super(VolumeResourceDescription, self).__init__(**kwargs) + self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) + self.status = None + self.status_details = None self.azure_file_parameters = kwargs.get('azure_file_parameters', None) - self.name = kwargs.get('name', None) diff --git a/azure-servicefabric/azure/servicefabric/models/volume_resource_description_py3.py b/azure-servicefabric/azure/servicefabric/models/volume_resource_description_py3.py index aa4ef2da3c24..bc275555c697 100644 --- a/azure-servicefabric/azure/servicefabric/models/volume_resource_description_py3.py +++ b/azure-servicefabric/azure/servicefabric/models/volume_resource_description_py3.py @@ -13,15 +13,23 @@ class VolumeResourceDescription(Model): - """Describes a service fabric volume resource. + """This type describes a volume resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. + :param name: Required. Name of the Volume resource. + :type name: str :param description: User readable description of the volume. :type description: str + :ivar status: Status of the volume. Possible values include: 'Unknown', + 'Ready', 'Upgrading', 'Creating', 'Deleting', 'Failed' + :vartype status: str or ~azure.servicefabric.models.ResourceStatus + :ivar status_details: Gives additional information about the current + status of the volume. + :vartype status_details: str :ivar provider: Required. Provider of the volume. Default value: "SFAzureFile" . :vartype provider: str @@ -29,26 +37,30 @@ class VolumeResourceDescription(Model): Azure Files file share. :type azure_file_parameters: ~azure.servicefabric.models.VolumeProviderParametersAzureFile - :param name: Required. Volume resource name. - :type name: str """ _validation = { - 'provider': {'required': True, 'constant': True}, 'name': {'required': True}, + 'status': {'readonly': True}, + 'status_details': {'readonly': True}, + 'provider': {'required': True, 'constant': True}, } _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'status_details': {'key': 'properties.statusDetails', 'type': 'str'}, 'provider': {'key': 'properties.provider', 'type': 'str'}, 'azure_file_parameters': {'key': 'properties.azureFileParameters', 'type': 'VolumeProviderParametersAzureFile'}, - 'name': {'key': 'name', 'type': 'str'}, } provider = "SFAzureFile" def __init__(self, *, name: str, description: str=None, azure_file_parameters=None, **kwargs) -> None: super(VolumeResourceDescription, self).__init__(**kwargs) + self.name = name self.description = description + self.status = None + self.status_details = None self.azure_file_parameters = azure_file_parameters - self.name = name diff --git a/azure-servicefabric/azure/servicefabric/operations/__init__.py b/azure-servicefabric/azure/servicefabric/operations/__init__.py new file mode 100644 index 000000000000..0bef1cbc124c --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .mesh_secret_operations import MeshSecretOperations +from .mesh_secret_value_operations import MeshSecretValueOperations +from .mesh_volume_operations import MeshVolumeOperations +from .mesh_network_operations import MeshNetworkOperations +from .mesh_application_operations import MeshApplicationOperations +from .mesh_service_operations import MeshServiceOperations +from .mesh_code_package_operations import MeshCodePackageOperations +from .mesh_service_replica_operations import MeshServiceReplicaOperations +from .mesh_gateway_operations import MeshGatewayOperations + +__all__ = [ + 'MeshSecretOperations', + 'MeshSecretValueOperations', + 'MeshVolumeOperations', + 'MeshNetworkOperations', + 'MeshApplicationOperations', + 'MeshServiceOperations', + 'MeshCodePackageOperations', + 'MeshServiceReplicaOperations', + 'MeshGatewayOperations', +] diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_application_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_application_operations.py new file mode 100644 index 000000000000..1a6da8960b81 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_application_operations.py @@ -0,0 +1,261 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshApplicationOperations(object): + """MeshApplicationOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def create_or_update( + self, application_resource_name, application_resource_description, custom_headers=None, raw=False, **operation_config): + """Creates or updates a Application resource. + + Creates a Application resource with the specified name, description and + properties. If Application resource with the same name exists, then it + is updated with the specified description and properties. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param application_resource_description: Description for creating a + Application resource. + :type application_resource_description: + ~azure.servicefabric.models.ApplicationResourceDescription + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ApplicationResourceDescription or ClientRawResponse if + raw=true + :rtype: ~azure.servicefabric.models.ApplicationResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct body + body_content = self._serialize.body(application_resource_description, 'ApplicationResourceDescription') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201, 202]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ApplicationResourceDescription', response) + if response.status_code == 201: + deserialized = self._deserialize('ApplicationResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/Resources/Applications/{applicationResourceName}'} + + def get( + self, application_resource_name, custom_headers=None, raw=False, **operation_config): + """Gets the Application resource with the given name. + + Gets the information about the Application resource with the given + name. The information include the description and other properties of + the Application. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ApplicationResourceDescription or ClientRawResponse if + raw=true + :rtype: ~azure.servicefabric.models.ApplicationResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ApplicationResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Applications/{applicationResourceName}'} + + def delete( + self, application_resource_name, custom_headers=None, raw=False, **operation_config): + """Deletes the Application resource. + + Deletes the Application resource identified by the name. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.FabricErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/Resources/Applications/{applicationResourceName}'} + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists all the application resources. + + Gets the information about all application resources in a given + resource group. The information include the description and other + properties of the Application. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedApplicationResourceDescriptionList or ClientRawResponse + if raw=true + :rtype: + ~azure.servicefabric.models.PagedApplicationResourceDescriptionList or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedApplicationResourceDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Applications'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_code_package_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_code_package_operations.py new file mode 100644 index 000000000000..ecdba08433c3 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_code_package_operations.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshCodePackageOperations(object): + """MeshCodePackageOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def get_container_logs( + self, application_resource_name, service_resource_name, replica_name, code_package_name, tail=None, custom_headers=None, raw=False, **operation_config): + """Gets the logs from the container. + + Gets the logs for the container of the specified code package of the + service replica. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param service_resource_name: The identity of the service. + :type service_resource_name: str + :param replica_name: Service Fabric replica name. + :type replica_name: str + :param code_package_name: The name of code package of the service. + :type code_package_name: str + :param tail: Number of lines to show from the end of the logs. Default + is 100. 'all' to show the complete logs. + :type tail: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ContainerLogs or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.ContainerLogs or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get_container_logs.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True), + 'serviceResourceName': self._serialize.url("service_resource_name", service_resource_name, 'str', skip_quote=True), + 'replicaName': self._serialize.url("replica_name", replica_name, 'str', skip_quote=True), + 'codePackageName': self._serialize.url("code_package_name", code_package_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if tail is not None: + query_parameters['Tail'] = self._serialize.query("tail", tail, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ContainerLogs', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_container_logs.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services/{serviceResourceName}/Replicas/{replicaName}/CodePackages/{codePackageName}/Logs'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_gateway_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_gateway_operations.py new file mode 100644 index 000000000000..9c56b705322f --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_gateway_operations.py @@ -0,0 +1,260 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshGatewayOperations(object): + """MeshGatewayOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def create_or_update( + self, gateway_resource_name, gateway_resource_description, custom_headers=None, raw=False, **operation_config): + """Creates or updates a Gateway resource. + + Creates a Gateway resource with the specified name, description and + properties. If Gateway resource with the same name exists, then it is + updated with the specified description and properties. Use Gateway + resource to provide public connectivity to application services. + + :param gateway_resource_name: The identity of the gateway. + :type gateway_resource_name: str + :param gateway_resource_description: Description for creating a + Gateway resource. + :type gateway_resource_description: + ~azure.servicefabric.models.GatewayResourceDescription + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: GatewayResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.GatewayResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'gatewayResourceName': self._serialize.url("gateway_resource_name", gateway_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct body + body_content = self._serialize.body(gateway_resource_description, 'GatewayResourceDescription') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201, 202]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('GatewayResourceDescription', response) + if response.status_code == 201: + deserialized = self._deserialize('GatewayResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/Resources/Gateways/{gatewayResourceName}'} + + def get( + self, gateway_resource_name, custom_headers=None, raw=False, **operation_config): + """Gets the Gateway resource with the given name. + + Gets the information about the Gateway resource with the given name. + The information include the description and other properties of the + Gateway. + + :param gateway_resource_name: The identity of the gateway. + :type gateway_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: GatewayResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.GatewayResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'gatewayResourceName': self._serialize.url("gateway_resource_name", gateway_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('GatewayResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Gateways/{gatewayResourceName}'} + + def delete( + self, gateway_resource_name, custom_headers=None, raw=False, **operation_config): + """Deletes the Gateway resource. + + Deletes the Gateway resource identified by the name. + + :param gateway_resource_name: The identity of the gateway. + :type gateway_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'gatewayResourceName': self._serialize.url("gateway_resource_name", gateway_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.FabricErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/Resources/Gateways/{gatewayResourceName}'} + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists all the gateway resources. + + Gets the information about all gateway resources in a given resource + group. The information include the description and other properties of + the Gateway. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedGatewayResourceDescriptionList or ClientRawResponse if + raw=true + :rtype: + ~azure.servicefabric.models.PagedGatewayResourceDescriptionList or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedGatewayResourceDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Gateways'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_network_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_network_operations.py new file mode 100644 index 000000000000..5940acbaddfd --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_network_operations.py @@ -0,0 +1,263 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshNetworkOperations(object): + """MeshNetworkOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def create_or_update( + self, network_resource_name, name, properties, custom_headers=None, raw=False, **operation_config): + """Creates or updates a Network resource. + + Creates a Network resource with the specified name, description and + properties. If Network resource with the same name exists, then it is + updated with the specified description and properties. Network resource + provides connectivity between application services. + + :param network_resource_name: The identity of the network. + :type network_resource_name: str + :param name: Name of the Network resource. + :type name: str + :param properties: Describes properties of a network resource. + :type properties: + ~azure.servicefabric.models.NetworkResourceProperties + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: NetworkResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.NetworkResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + network_resource_description = models.NetworkResourceDescription(name=name, properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'networkResourceName': self._serialize.url("network_resource_name", network_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct body + body_content = self._serialize.body(network_resource_description, 'NetworkResourceDescription') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201, 202]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('NetworkResourceDescription', response) + if response.status_code == 201: + deserialized = self._deserialize('NetworkResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/Resources/Networks/{networkResourceName}'} + + def get( + self, network_resource_name, custom_headers=None, raw=False, **operation_config): + """Gets the Network resource with the given name. + + Gets the information about the Network resource with the given name. + The information include the description and other properties of the + Network. + + :param network_resource_name: The identity of the network. + :type network_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: NetworkResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.NetworkResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'networkResourceName': self._serialize.url("network_resource_name", network_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('NetworkResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Networks/{networkResourceName}'} + + def delete( + self, network_resource_name, custom_headers=None, raw=False, **operation_config): + """Deletes the Network resource. + + Deletes the Network resource identified by the name. + + :param network_resource_name: The identity of the network. + :type network_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'networkResourceName': self._serialize.url("network_resource_name", network_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.FabricErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/Resources/Networks/{networkResourceName}'} + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists all the network resources. + + Gets the information about all network resources in a given resource + group. The information include the description and other properties of + the Network. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedNetworkResourceDescriptionList or ClientRawResponse if + raw=true + :rtype: + ~azure.servicefabric.models.PagedNetworkResourceDescriptionList or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedNetworkResourceDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Networks'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_secret_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_secret_operations.py new file mode 100644 index 000000000000..9f737b771991 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_secret_operations.py @@ -0,0 +1,260 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshSecretOperations(object): + """MeshSecretOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def create_or_update( + self, secret_resource_name, properties, name, custom_headers=None, raw=False, **operation_config): + """Creates or updates a Secret resource. + + Creates a Secret resource with the specified name, description and + properties. If Secret resource with the same name exists, then it is + updated with the specified description and properties. Once created, + the kind and contentType of a secret resource cannot be updated. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param properties: Describes the properties of a secret resource. + :type properties: ~azure.servicefabric.models.SecretResourceProperties + :param name: Name of the Secret resource. + :type name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SecretResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.SecretResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + secret_resource_description = models.SecretResourceDescription(properties=properties, name=name) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct body + body_content = self._serialize.body(secret_resource_description, 'SecretResourceDescription') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201, 202]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SecretResourceDescription', response) + if response.status_code == 201: + deserialized = self._deserialize('SecretResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/Resources/Secrets/{secretResourceName}'} + + def get( + self, secret_resource_name, custom_headers=None, raw=False, **operation_config): + """Gets the Secret resource with the given name. + + Gets the information about the Secret resource with the given name. The + information include the description and other properties of the Secret. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SecretResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.SecretResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SecretResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Secrets/{secretResourceName}'} + + def delete( + self, secret_resource_name, custom_headers=None, raw=False, **operation_config): + """Deletes the Secret resource. + + Deletes the specified Secret resource and all of its named values. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.FabricErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/Resources/Secrets/{secretResourceName}'} + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists all the secret resources. + + Gets the information about all secret resources in a given resource + group. The information include the description and other properties of + the Secret. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedSecretResourceDescriptionList or ClientRawResponse if + raw=true + :rtype: ~azure.servicefabric.models.PagedSecretResourceDescriptionList + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedSecretResourceDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Secrets'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_secret_value_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_secret_value_operations.py new file mode 100644 index 000000000000..2f767822d0d3 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_secret_value_operations.py @@ -0,0 +1,343 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshSecretValueOperations(object): + """MeshSecretValueOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def add_value( + self, secret_resource_name, secret_value_resource_name, name, value=None, custom_headers=None, raw=False, **operation_config): + """Adds the specified value as a new version of the specified secret + resource. + + Creates a new value of the specified secret resource. The name of the + value is typically the version identifier. Once created the value + cannot be changed. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param secret_value_resource_name: The name of the secret resource + value which is typically the version identifier for the value. + :type secret_value_resource_name: str + :param name: Version identifier of the secret value. + :type name: str + :param value: The actual value of the secret. + :type value: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SecretValueResourceDescription or ClientRawResponse if + raw=true + :rtype: ~azure.servicefabric.models.SecretValueResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + secret_value_resource_description = models.SecretValueResourceDescription(name=name, value=value) + + # Construct URL + url = self.add_value.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True), + 'secretValueResourceName': self._serialize.url("secret_value_resource_name", secret_value_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct body + body_content = self._serialize.body(secret_value_resource_description, 'SecretValueResourceDescription') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201, 202]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SecretValueResourceDescription', response) + if response.status_code == 201: + deserialized = self._deserialize('SecretValueResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + add_value.metadata = {'url': '/Resources/Secrets/{secretResourceName}/values/{secretValueResourceName}'} + + def get( + self, secret_resource_name, secret_value_resource_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified secret value resource. + + Get the information about the specified named secret value resources. + The information does not include the actual value of the secret. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param secret_value_resource_name: The name of the secret resource + value which is typically the version identifier for the value. + :type secret_value_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SecretValueResourceDescription or ClientRawResponse if + raw=true + :rtype: ~azure.servicefabric.models.SecretValueResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True), + 'secretValueResourceName': self._serialize.url("secret_value_resource_name", secret_value_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SecretValueResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Secrets/{secretResourceName}/values/{secretValueResourceName}'} + + def delete( + self, secret_resource_name, secret_value_resource_name, custom_headers=None, raw=False, **operation_config): + """Deletes the specified value of the named secret resource. + + Deletes the secret value resource identified by the name. The name of + the resource is typically the version associated with that value. + Deletion will fail if the specified value is in use. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param secret_value_resource_name: The name of the secret resource + value which is typically the version identifier for the value. + :type secret_value_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True), + 'secretValueResourceName': self._serialize.url("secret_value_resource_name", secret_value_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.FabricErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/Resources/Secrets/{secretResourceName}/values/{secretValueResourceName}'} + + def list( + self, secret_resource_name, custom_headers=None, raw=False, **operation_config): + """List names of all values of the the specified secret resource. + + Gets information about all secret value resources of the specified + secret resource. The information includes the names of the secret value + resources, but not the actual values. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedSecretValueResourceDescriptionList or ClientRawResponse + if raw=true + :rtype: + ~azure.servicefabric.models.PagedSecretValueResourceDescriptionList or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedSecretValueResourceDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Secrets/{secretResourceName}/values'} + + def show( + self, secret_resource_name, secret_value_resource_name, custom_headers=None, raw=False, **operation_config): + """Lists the specified value of the secret resource. + + Lists the decrypted value of the specified named value of the secret + resource. This is a privileged operation. + + :param secret_resource_name: The name of the secret resource. + :type secret_resource_name: str + :param secret_value_resource_name: The name of the secret resource + value which is typically the version identifier for the value. + :type secret_value_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SecretValue or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.SecretValue or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.show.metadata['url'] + path_format_arguments = { + 'secretResourceName': self._serialize.url("secret_resource_name", secret_resource_name, 'str', skip_quote=True), + 'secretValueResourceName': self._serialize.url("secret_value_resource_name", secret_value_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SecretValue', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + show.metadata = {'url': '/Resources/Secrets/{secretResourceName}/values/{secretValueResourceName}/list_value'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_service_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_service_operations.py new file mode 100644 index 000000000000..fc1e8655dfdd --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_service_operations.py @@ -0,0 +1,155 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshServiceOperations(object): + """MeshServiceOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def get( + self, application_resource_name, service_resource_name, custom_headers=None, raw=False, **operation_config): + """Gets the Service resource with the given name. + + Gets the information about the Service resource with the given name. + The information include the description and other properties of the + Service. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param service_resource_name: The identity of the service. + :type service_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ServiceResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.ServiceResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True), + 'serviceResourceName': self._serialize.url("service_resource_name", service_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ServiceResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services/{serviceResourceName}'} + + def list( + self, application_resource_name, custom_headers=None, raw=False, **operation_config): + """Lists all the service resources. + + Gets the information about all services of an application resource. The + information include the description and other properties of the + Service. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedServiceResourceDescriptionList or ClientRawResponse if + raw=true + :rtype: + ~azure.servicefabric.models.PagedServiceResourceDescriptionList or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedServiceResourceDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_service_replica_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_service_replica_operations.py new file mode 100644 index 000000000000..234aa14b9571 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_service_replica_operations.py @@ -0,0 +1,159 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshServiceReplicaOperations(object): + """MeshServiceReplicaOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def get( + self, application_resource_name, service_resource_name, replica_name, custom_headers=None, raw=False, **operation_config): + """Gets the given replica of the service of an application. + + Gets the information about the service replica with the given name. The + information include the description and other properties of the service + replica. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param service_resource_name: The identity of the service. + :type service_resource_name: str + :param replica_name: Service Fabric replica name. + :type replica_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ServiceReplicaDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.ServiceReplicaDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True), + 'serviceResourceName': self._serialize.url("service_resource_name", service_resource_name, 'str', skip_quote=True), + 'replicaName': self._serialize.url("replica_name", replica_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ServiceReplicaDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services/{serviceResourceName}/Replicas/{replicaName}'} + + def list( + self, application_resource_name, service_resource_name, custom_headers=None, raw=False, **operation_config): + """Lists all the replicas of a service. + + Gets the information about all replicas of a service. The information + include the description and other properties of the service replica. + + :param application_resource_name: The identity of the application. + :type application_resource_name: str + :param service_resource_name: The identity of the service. + :type service_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedServiceReplicaDescriptionList or ClientRawResponse if + raw=true + :rtype: ~azure.servicefabric.models.PagedServiceReplicaDescriptionList + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True), + 'serviceResourceName': self._serialize.url("service_resource_name", service_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedServiceReplicaDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services/{serviceResourceName}/Replicas'} diff --git a/azure-servicefabric/azure/servicefabric/operations/mesh_volume_operations.py b/azure-servicefabric/azure/servicefabric/operations/mesh_volume_operations.py new file mode 100644 index 000000000000..f8b241d3a017 --- /dev/null +++ b/azure-servicefabric/azure/servicefabric/operations/mesh_volume_operations.py @@ -0,0 +1,257 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class MeshVolumeOperations(object): + """MeshVolumeOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The version of the API. This parameter is required and its value must be '6.4-preview'. Constant value: "6.4-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + + self.config = config + self.api_version = "6.4-preview" + + def create_or_update( + self, volume_resource_name, volume_resource_description, custom_headers=None, raw=False, **operation_config): + """Creates or updates a Volume resource. + + Creates a Volume resource with the specified name, description and + properties. If Volume resource with the same name exists, then it is + updated with the specified description and properties. + + :param volume_resource_name: The identity of the volume. + :type volume_resource_name: str + :param volume_resource_description: Description for creating a Volume + resource. + :type volume_resource_description: + ~azure.servicefabric.models.VolumeResourceDescription + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: VolumeResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.VolumeResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'volumeResourceName': self._serialize.url("volume_resource_name", volume_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct body + body_content = self._serialize.body(volume_resource_description, 'VolumeResourceDescription') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201, 202]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('VolumeResourceDescription', response) + if response.status_code == 201: + deserialized = self._deserialize('VolumeResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/Resources/Volumes/{volumeResourceName}'} + + def get( + self, volume_resource_name, custom_headers=None, raw=False, **operation_config): + """Gets the Volume resource with the given name. + + Gets the information about the Volume resource with the given name. The + information include the description and other properties of the Volume. + + :param volume_resource_name: The identity of the volume. + :type volume_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: VolumeResourceDescription or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.VolumeResourceDescription or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'volumeResourceName': self._serialize.url("volume_resource_name", volume_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('VolumeResourceDescription', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/Resources/Volumes/{volumeResourceName}'} + + def delete( + self, volume_resource_name, custom_headers=None, raw=False, **operation_config): + """Deletes the Volume resource. + + Deletes the Volume resource identified by the name. + + :param volume_resource_name: The identity of the volume. + :type volume_resource_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'volumeResourceName': self._serialize.url("volume_resource_name", volume_resource_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.FabricErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/Resources/Volumes/{volumeResourceName}'} + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists all the volume resources. + + Gets the information about all volume resources in a given resource + group. The information include the description and other properties of + the Volume. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PagedVolumeResourceDescriptionList or ClientRawResponse if + raw=true + :rtype: ~azure.servicefabric.models.PagedVolumeResourceDescriptionList + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + # Construct URL + url = self.list.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PagedVolumeResourceDescriptionList', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/Resources/Volumes'} diff --git a/azure-servicefabric/azure/servicefabric/service_fabric_client_ap_is.py b/azure-servicefabric/azure/servicefabric/service_fabric_client_ap_is.py index 55d4870757c3..72ad83a9d3a7 100644 --- a/azure-servicefabric/azure/servicefabric/service_fabric_client_ap_is.py +++ b/azure-servicefabric/azure/servicefabric/service_fabric_client_ap_is.py @@ -13,7 +13,15 @@ from msrest import Configuration, Serializer, Deserializer from .version import VERSION from msrest.pipeline import ClientRawResponse -from msrest.exceptions import HttpOperationError +from .operations.mesh_secret_operations import MeshSecretOperations +from .operations.mesh_secret_value_operations import MeshSecretValueOperations +from .operations.mesh_volume_operations import MeshVolumeOperations +from .operations.mesh_network_operations import MeshNetworkOperations +from .operations.mesh_application_operations import MeshApplicationOperations +from .operations.mesh_service_operations import MeshServiceOperations +from .operations.mesh_code_package_operations import MeshCodePackageOperations +from .operations.mesh_service_replica_operations import MeshServiceReplicaOperations +from .operations.mesh_gateway_operations import MeshGatewayOperations from . import models @@ -49,6 +57,25 @@ class ServiceFabricClientAPIs(SDKClient): :ivar config: Configuration for client. :vartype config: ServiceFabricClientAPIsConfiguration + :ivar mesh_secret: MeshSecret operations + :vartype mesh_secret: azure.servicefabric.operations.MeshSecretOperations + :ivar mesh_secret_value: MeshSecretValue operations + :vartype mesh_secret_value: azure.servicefabric.operations.MeshSecretValueOperations + :ivar mesh_volume: MeshVolume operations + :vartype mesh_volume: azure.servicefabric.operations.MeshVolumeOperations + :ivar mesh_network: MeshNetwork operations + :vartype mesh_network: azure.servicefabric.operations.MeshNetworkOperations + :ivar mesh_application: MeshApplication operations + :vartype mesh_application: azure.servicefabric.operations.MeshApplicationOperations + :ivar mesh_service: MeshService operations + :vartype mesh_service: azure.servicefabric.operations.MeshServiceOperations + :ivar mesh_code_package: MeshCodePackage operations + :vartype mesh_code_package: azure.servicefabric.operations.MeshCodePackageOperations + :ivar mesh_service_replica: MeshServiceReplica operations + :vartype mesh_service_replica: azure.servicefabric.operations.MeshServiceReplicaOperations + :ivar mesh_gateway: MeshGateway operations + :vartype mesh_gateway: azure.servicefabric.operations.MeshGatewayOperations + :param credentials: Subscription credentials which uniquely identify client subscription. :type credentials: None @@ -62,10 +89,28 @@ def __init__( super(ServiceFabricClientAPIs, self).__init__(self.config.credentials, self.config) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self.api_version = '6.3.0.9' + self.api_version = '6.4.0.36' self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) + self.mesh_secret = MeshSecretOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_secret_value = MeshSecretValueOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_volume = MeshVolumeOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_network = MeshNetworkOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_application = MeshApplicationOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_service = MeshServiceOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_code_package = MeshCodePackageOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_service_replica = MeshServiceReplicaOperations( + self._client, self.config, self._serialize, self._deserialize) + self.mesh_gateway = MeshGatewayOperations( + self._client, self.config, self._serialize, self._deserialize) def get_cluster_manifest( self, timeout=60, custom_headers=None, raw=False, **operation_config): @@ -1318,9 +1363,10 @@ def unprovision_cluster( def rollback_cluster_upgrade( self, timeout=60, custom_headers=None, raw=False, **operation_config): - """Rollback the upgrade of a Service Fabric cluster. + """Roll back the upgrade of a Service Fabric cluster. - Rollback the code or configuration upgrade of a Service Fabric cluster. + Roll back the code or configuration upgrade of a Service Fabric + cluster. :param timeout: The server timeout for performing the operation in seconds. This timeout specifies the time duration that the client is @@ -1664,6 +1710,65 @@ def get_aad_metadata( return deserialized get_aad_metadata.metadata = {'url': '/$/GetAadMetadata'} + def get_cluster_version( + self, timeout=60, custom_headers=None, raw=False, **operation_config): + """Get the current Service Fabric cluster version. + + If a cluster upgrade is happening, then this API will return the lowest + (older) version of the current and target cluster runtime versions. + + :param timeout: The server timeout for performing the operation in + seconds. This timeout specifies the time duration that the client is + willing to wait for the requested operation to complete. The default + value for this parameter is 60 seconds. + :type timeout: long + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ClusterVersion or ClientRawResponse if raw=true + :rtype: ~azure.servicefabric.models.ClusterVersion or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + api_version = "6.4" + + # Construct URL + url = self.get_cluster_version.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'long', maximum=4294967295, minimum=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ClusterVersion', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_cluster_version.metadata = {'url': '/$/GetClusterVersion'} + def get_node_info_list( self, continuation_token=None, node_status_filter="default", max_results=0, timeout=60, custom_headers=None, raw=False, **operation_config): """Gets the list of nodes in the Service Fabric cluster. @@ -6126,6 +6231,12 @@ def resolve_service( :type partition_key_type: int :param partition_key_value: Partition key. This is required if the partition scheme for the service is Int64Range or Named. + This is not the partition ID, but rather, either the integer key + value, or the name of the partition ID. + For example, if your service is using ranged partitions from 0 to 10, + then they PartitionKeyValue would be an + integer in that range. Query service description to see the range or + name. :type partition_key_value: str :param previous_rsp_version: The value in the Version field of the response that was received previously. This is required if the user @@ -6234,7 +6345,7 @@ def get_partition_info_list( :raises: :class:`FabricErrorException` """ - api_version = "6.0" + api_version = "6.4" # Construct URL url = self.get_partition_info_list.metadata['url'] @@ -9742,6 +9853,62 @@ def start_compose_deployment_upgrade( return client_raw_response start_compose_deployment_upgrade.metadata = {'url': '/ComposeDeployments/{deploymentName}/$/Upgrade'} + def start_rollback_compose_deployment_upgrade( + self, deployment_name, timeout=60, custom_headers=None, raw=False, **operation_config): + """Starts rolling back a compose deployment upgrade in the Service Fabric + cluster. + + Rollback a service fabric compose deployment upgrade. + + :param deployment_name: The identity of the deployment. + :type deployment_name: str + :param timeout: The server timeout for performing the operation in + seconds. This timeout specifies the time duration that the client is + willing to wait for the requested operation to complete. The default + value for this parameter is 60 seconds. + :type timeout: long + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`FabricErrorException` + """ + api_version = "6.4-preview" + + # Construct URL + url = self.start_rollback_compose_deployment_upgrade.metadata['url'] + path_format_arguments = { + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', skip_quote=True) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'long', maximum=4294967295, minimum=1) + + # Construct headers + header_parameters = {} + if custom_headers: + header_parameters.update(custom_headers) + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.FabricErrorException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + start_rollback_compose_deployment_upgrade.metadata = {'url': '/ComposeDeployments/{deploymentName}/$/RollbackUpgrade'} + def get_chaos( self, timeout=60, custom_headers=None, raw=False, **operation_config): """Get the status of Chaos. @@ -10285,8 +10452,8 @@ def delete_image_store_content( """Deletes existing image store content. Deletes existing image store content being found within the given image - store relative path. This can be used to delete uploaded application - packages once they are provisioned. + store relative path. This command can be used to delete uploaded + application packages once they are provisioned. :param content_path: Relative path to file or folder in the image store from its root. @@ -11579,7 +11746,7 @@ def get_fault_operation_list( """Gets a list of user-induced fault operations filtered by provided input. - Gets the a list of user-induced fault operations filtered by provided + Gets the list of user-induced fault operations filtered by provided input. :param type_filter: Used to filter on OperationType for user-induced @@ -11684,7 +11851,7 @@ def cancel_operation( :param operation_id: A GUID that identifies a call of this API. This is passed into the corresponding GetProgress API :type operation_id: str - :param force: Indicates whether to gracefully rollback and clean up + :param force: Indicates whether to gracefully roll back and clean up internal system state modified by executing the user-induced operation. :type force: bool @@ -11758,7 +11925,7 @@ def create_backup_policy( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.create_backup_policy.metadata['url'] @@ -11816,7 +11983,7 @@ def delete_backup_policy( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.delete_backup_policy.metadata['url'] @@ -11887,7 +12054,7 @@ def get_backup_policy_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_backup_policy_list.metadata['url'] @@ -11951,7 +12118,7 @@ def get_backup_policy_by_name( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_backup_policy_by_name.metadata['url'] @@ -12032,7 +12199,7 @@ def get_all_entities_backed_up_by_policy( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_all_entities_backed_up_by_policy.metadata['url'] @@ -12102,7 +12269,7 @@ def update_backup_policy( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.update_backup_policy.metadata['url'] @@ -12178,7 +12345,7 @@ def enable_application_backup( """ enable_backup_description = models.EnableBackupDescription(backup_policy_name=backup_policy_name) - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.enable_application_backup.metadata['url'] @@ -12215,7 +12382,7 @@ def enable_application_backup( enable_application_backup.metadata = {'url': '/Applications/{applicationId}/$/EnableBackup'} def disable_application_backup( - self, application_id, timeout=60, custom_headers=None, raw=False, **operation_config): + self, application_id, clean_backup, timeout=60, custom_headers=None, raw=False, **operation_config): """Disables periodic backup of Service Fabric application. Disables periodic backup of Service Fabric application which was @@ -12230,6 +12397,10 @@ def disable_application_backup( application identity would be "myapp~app1" in 6.0+ and "myapp/app1" in previous versions. :type application_id: str + :param clean_backup: Boolean flag to delete backups. It can be set to + true for deleting all the backups which were created for the backup + entity that is getting disabled for backup. + :type clean_backup: bool :param timeout: The server timeout for performing the operation in seconds. This timeout specifies the time duration that the client is willing to wait for the requested operation to complete. The default @@ -12245,7 +12416,11 @@ def disable_application_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + disable_backup_description = None + if clean_backup is not None: + disable_backup_description = models.DisableBackupDescription(clean_backup=clean_backup) + + api_version = "6.4" # Construct URL url = self.disable_application_backup.metadata['url'] @@ -12262,11 +12437,18 @@ def disable_application_backup( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) + # Construct body + if disable_backup_description is not None: + body_content = self._serialize.body(disable_backup_description, 'DisableBackupDescription') + else: + body_content = None + # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) + request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [202]: @@ -12326,7 +12508,7 @@ def get_application_backup_configuration_info( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_application_backup_configuration_info.metadata['url'] @@ -12435,7 +12617,7 @@ def get_application_backup_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_application_backup_list.metadata['url'] @@ -12519,7 +12701,7 @@ def suspend_application_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.suspend_application_backup.metadata['url'] @@ -12584,7 +12766,7 @@ def resume_application_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.resume_application_backup.metadata['url'] @@ -12659,7 +12841,7 @@ def enable_service_backup( """ enable_backup_description = models.EnableBackupDescription(backup_policy_name=backup_policy_name) - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.enable_service_backup.metadata['url'] @@ -12696,7 +12878,7 @@ def enable_service_backup( enable_service_backup.metadata = {'url': '/Services/{serviceId}/$/EnableBackup'} def disable_service_backup( - self, service_id, timeout=60, custom_headers=None, raw=False, **operation_config): + self, service_id, clean_backup, timeout=60, custom_headers=None, raw=False, **operation_config): """Disables periodic backup of Service Fabric service which was previously enabled. @@ -12714,6 +12896,10 @@ def disable_service_backup( service identity would be "myapp~app1~svc1" in 6.0+ and "myapp/app1/svc1" in previous versions. :type service_id: str + :param clean_backup: Boolean flag to delete backups. It can be set to + true for deleting all the backups which were created for the backup + entity that is getting disabled for backup. + :type clean_backup: bool :param timeout: The server timeout for performing the operation in seconds. This timeout specifies the time duration that the client is willing to wait for the requested operation to complete. The default @@ -12729,7 +12915,11 @@ def disable_service_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + disable_backup_description = None + if clean_backup is not None: + disable_backup_description = models.DisableBackupDescription(clean_backup=clean_backup) + + api_version = "6.4" # Construct URL url = self.disable_service_backup.metadata['url'] @@ -12746,11 +12936,18 @@ def disable_service_backup( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) + # Construct body + if disable_backup_description is not None: + body_content = self._serialize.body(disable_backup_description, 'DisableBackupDescription') + else: + body_content = None + # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) + request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [202]: @@ -12809,7 +13006,7 @@ def get_service_backup_configuration_info( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_service_backup_configuration_info.metadata['url'] @@ -12916,7 +13113,7 @@ def get_service_backup_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_service_backup_list.metadata['url'] @@ -12998,7 +13195,7 @@ def suspend_service_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.suspend_service_backup.metadata['url'] @@ -13061,7 +13258,7 @@ def resume_service_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.resume_service_backup.metadata['url'] @@ -13127,7 +13324,7 @@ def enable_partition_backup( """ enable_backup_description = models.EnableBackupDescription(backup_policy_name=backup_policy_name) - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.enable_partition_backup.metadata['url'] @@ -13164,7 +13361,7 @@ def enable_partition_backup( enable_partition_backup.metadata = {'url': '/Partitions/{partitionId}/$/EnableBackup'} def disable_partition_backup( - self, partition_id, timeout=60, custom_headers=None, raw=False, **operation_config): + self, partition_id, clean_backup, timeout=60, custom_headers=None, raw=False, **operation_config): """Disables periodic backup of Service Fabric partition which was previously enabled. @@ -13177,6 +13374,10 @@ def disable_partition_backup( :param partition_id: The identity of the partition. :type partition_id: str + :param clean_backup: Boolean flag to delete backups. It can be set to + true for deleting all the backups which were created for the backup + entity that is getting disabled for backup. + :type clean_backup: bool :param timeout: The server timeout for performing the operation in seconds. This timeout specifies the time duration that the client is willing to wait for the requested operation to complete. The default @@ -13192,7 +13393,11 @@ def disable_partition_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + disable_backup_description = None + if clean_backup is not None: + disable_backup_description = models.DisableBackupDescription(clean_backup=clean_backup) + + api_version = "6.4" # Construct URL url = self.disable_partition_backup.metadata['url'] @@ -13209,11 +13414,18 @@ def disable_partition_backup( # Construct headers header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' if custom_headers: header_parameters.update(custom_headers) + # Construct body + if disable_backup_description is not None: + body_content = self._serialize.body(disable_backup_description, 'DisableBackupDescription') + else: + body_content = None + # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) + request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [202]: @@ -13250,7 +13462,7 @@ def get_partition_backup_configuration_info( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partition_backup_configuration_info.metadata['url'] @@ -13331,7 +13543,7 @@ def get_partition_backup_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partition_backup_list.metadata['url'] @@ -13401,7 +13613,7 @@ def suspend_partition_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.suspend_partition_backup.metadata['url'] @@ -13457,7 +13669,7 @@ def resume_partition_backup( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.resume_partition_backup.metadata['url'] @@ -13535,7 +13747,7 @@ def backup_partition( if backup_storage is not None: backup_partition_description = models.BackupPartitionDescription(backup_storage=backup_storage) - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.backup_partition.metadata['url'] @@ -13601,7 +13813,7 @@ def get_partition_backup_progress( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partition_backup_progress.metadata['url'] @@ -13685,7 +13897,7 @@ def restore_partition( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.restore_partition.metadata['url'] @@ -13749,7 +13961,7 @@ def get_partition_restore_progress( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partition_restore_progress.metadata['url'] @@ -13834,7 +14046,7 @@ def get_backups_from_backup_location( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_backups_from_backup_location.metadata['url'] @@ -14529,7 +14741,7 @@ def get_cluster_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_cluster_event_list.metadata['url'] @@ -14699,7 +14911,7 @@ def get_node_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_node_event_list.metadata['url'] @@ -14787,7 +14999,7 @@ def get_nodes_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_nodes_event_list.metadata['url'] @@ -14880,7 +15092,7 @@ def get_application_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_application_event_list.metadata['url'] @@ -14968,7 +15180,7 @@ def get_applications_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_applications_event_list.metadata['url'] @@ -15060,7 +15272,7 @@ def get_service_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_service_event_list.metadata['url'] @@ -15148,7 +15360,7 @@ def get_services_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_services_event_list.metadata['url'] @@ -15234,7 +15446,7 @@ def get_partition_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partition_event_list.metadata['url'] @@ -15322,7 +15534,7 @@ def get_partitions_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partitions_event_list.metadata['url'] @@ -15410,7 +15622,7 @@ def get_partition_replica_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partition_replica_event_list.metadata['url'] @@ -15501,7 +15713,7 @@ def get_partition_replicas_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_partition_replicas_event_list.metadata['url'] @@ -15573,7 +15785,7 @@ def get_correlated_event_list( :raises: :class:`FabricErrorException` """ - api_version = "6.2-preview" + api_version = "6.4" # Construct URL url = self.get_correlated_event_list.metadata['url'] @@ -15612,591 +15824,3 @@ def get_correlated_event_list( return deserialized get_correlated_event_list.metadata = {'url': '/EventsStore/CorrelatedEvents/{eventInstanceId}/$/Events'} - - def create_application_resource( - self, application_resource_name, application_resource_description, custom_headers=None, raw=False, **operation_config): - """Creates or updates an application resource. - - Creates an application with the specified name and description. If an - application with the same name already exists, then its description are - updated to the one indicated in this request. - - :param application_resource_name: Service Fabric application resource - name. - :type application_resource_name: str - :param application_resource_description: Description for creating an - application resource. - :type application_resource_description: - ~azure.servicefabric.models.ApplicationResourceDescription - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: None or ClientRawResponse if raw=true - :rtype: None or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`FabricErrorException` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.create_application_resource.metadata['url'] - path_format_arguments = { - 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct body - body_content = self._serialize.body(application_resource_description, 'ApplicationResourceDescription') - - # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [201, 202]: - raise models.FabricErrorException(self._deserialize, response) - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - create_application_resource.metadata = {'url': '/Resources/Applications/{applicationResourceName}'} - - def get_application_resource( - self, application_resource_name, custom_headers=None, raw=False, **operation_config): - """Gets the application with the given name. - - Gets the application with the given name. This includes the information - about the application's services and other runtime information. - - :param application_resource_name: Service Fabric application resource - name. - :type application_resource_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: ApplicationResourceDescription or ClientRawResponse if - raw=true - :rtype: ~azure.servicefabric.models.ApplicationResourceDescription or - ~msrest.pipeline.ClientRawResponse - :raises: - :class:`FabricErrorException` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.get_application_resource.metadata['url'] - path_format_arguments = { - 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise models.FabricErrorException(self._deserialize, response) - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('ApplicationResourceDescription', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - get_application_resource.metadata = {'url': '/Resources/Applications/{applicationResourceName}'} - - def delete_application_resource( - self, application_resource_name, custom_headers=None, raw=False, **operation_config): - """Deletes the specified application. - - Deletes the application identified by the name. - - :param application_resource_name: Service Fabric application resource - name. - :type application_resource_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: None or ClientRawResponse if raw=true - :rtype: None or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`FabricErrorException` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.delete_application_resource.metadata['url'] - path_format_arguments = { - 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200, 202, 204]: - raise models.FabricErrorException(self._deserialize, response) - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - delete_application_resource.metadata = {'url': '/Resources/Applications/{applicationResourceName}'} - - def get_services( - self, application_resource_name, custom_headers=None, raw=False, **operation_config): - """Gets all the services in the application resource. - - The operation returns the service descriptions of all the services in - the application resource. . - - :param application_resource_name: Service Fabric application resource - name. - :type application_resource_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: PagedServiceResourceDescriptionList or ClientRawResponse if - raw=true - :rtype: - ~azure.servicefabric.models.PagedServiceResourceDescriptionList or - ~msrest.pipeline.ClientRawResponse - :raises: - :class:`HttpOperationError` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.get_services.metadata['url'] - path_format_arguments = { - 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise HttpOperationError(self._deserialize, response) - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('PagedServiceResourceDescriptionList', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - get_services.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services'} - - def get_service( - self, application_resource_name, service_resource_name, custom_headers=None, raw=False, **operation_config): - """Gets the description of the specified service in an application - resource. - - Gets the description of the service resource. - - :param application_resource_name: Service Fabric application resource - name. - :type application_resource_name: str - :param service_resource_name: Service Fabric service resource name. - :type service_resource_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: ServiceResourceDescription or ClientRawResponse if raw=true - :rtype: ~azure.servicefabric.models.ServiceResourceDescription or - ~msrest.pipeline.ClientRawResponse - :raises: - :class:`HttpOperationError` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.get_service.metadata['url'] - path_format_arguments = { - 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True), - 'serviceResourceName': self._serialize.url("service_resource_name", service_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise HttpOperationError(self._deserialize, response) - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('ServiceResourceDescription', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - get_service.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services/{serviceResourceName}'} - - def get_replicas( - self, application_resource_name, service_resource_name, custom_headers=None, raw=False, **operation_config): - """Gets replicas of a given service in an application resource. - - Gets the information about all replicas of a given service of an - application. The information includes the runtime properties of the - replica instance. - - :param application_resource_name: Service Fabric application resource - name. - :type application_resource_name: str - :param service_resource_name: Service Fabric service resource name. - :type service_resource_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: PagedServiceResourceReplicaDescriptionList or - ClientRawResponse if raw=true - :rtype: - ~azure.servicefabric.models.PagedServiceResourceReplicaDescriptionList - or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`HttpOperationError` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.get_replicas.metadata['url'] - path_format_arguments = { - 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True), - 'serviceResourceName': self._serialize.url("service_resource_name", service_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise HttpOperationError(self._deserialize, response) - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('PagedServiceResourceReplicaDescriptionList', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - get_replicas.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services/{serviceResourceName}/replicas'} - - def get_replica( - self, application_resource_name, service_resource_name, replica_name, custom_headers=None, raw=False, **operation_config): - """Gets a specific replica of a given service in an application resource. - - Gets the information about the specified replica of a given service of - an application. The information includes the runtime properties of the - replica instance. - - :param application_resource_name: Service Fabric application resource - name. - :type application_resource_name: str - :param service_resource_name: Service Fabric service resource name. - :type service_resource_name: str - :param replica_name: Service Fabric replica name. - :type replica_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: ServiceResourceReplicaDescription or ClientRawResponse if - raw=true - :rtype: ~azure.servicefabric.models.ServiceResourceReplicaDescription - or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`HttpOperationError` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.get_replica.metadata['url'] - path_format_arguments = { - 'applicationResourceName': self._serialize.url("application_resource_name", application_resource_name, 'str', skip_quote=True), - 'serviceResourceName': self._serialize.url("service_resource_name", service_resource_name, 'str', skip_quote=True), - 'replicaName': self._serialize.url("replica_name", replica_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise HttpOperationError(self._deserialize, response) - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('ServiceResourceReplicaDescription', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - get_replica.metadata = {'url': '/Resources/Applications/{applicationResourceName}/Services/{serviceResourceName}/Replicas/{replicaName}'} - - def create_volume_resource( - self, volume_resource_name, volume_resource_description, custom_headers=None, raw=False, **operation_config): - """Creates or updates a volume resource. - - Creates a volume resource with the specified name and description. If a - volume with the same name already exists, then its description is - updated to the one indicated in this request. - - :param volume_resource_name: Service Fabric volume resource name. - :type volume_resource_name: str - :param volume_resource_description: Description for creating a volume - resource. - :type volume_resource_description: - ~azure.servicefabric.models.VolumeResourceDescription - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: None or ClientRawResponse if raw=true - :rtype: None or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`FabricErrorException` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.create_volume_resource.metadata['url'] - path_format_arguments = { - 'volumeResourceName': self._serialize.url("volume_resource_name", volume_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct body - body_content = self._serialize.body(volume_resource_description, 'VolumeResourceDescription') - - # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [201, 202]: - raise models.FabricErrorException(self._deserialize, response) - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - create_volume_resource.metadata = {'url': '/Resources/Volumes/{volumeResourceName}'} - - def get_volume_resource( - self, volume_resource_name, custom_headers=None, raw=False, **operation_config): - """Gets the volume resource. - - Gets the information about the volume resource with a given name. This - information includes the volume description and other runtime - information. - - :param volume_resource_name: Service Fabric volume resource name. - :type volume_resource_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: VolumeResourceDescription or ClientRawResponse if raw=true - :rtype: ~azure.servicefabric.models.VolumeResourceDescription or - ~msrest.pipeline.ClientRawResponse - :raises: - :class:`FabricErrorException` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.get_volume_resource.metadata['url'] - path_format_arguments = { - 'volumeResourceName': self._serialize.url("volume_resource_name", volume_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise models.FabricErrorException(self._deserialize, response) - - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('VolumeResourceDescription', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - get_volume_resource.metadata = {'url': '/Resources/Volumes/{volumeResourceName}'} - - def delete_volume_resource( - self, volume_resource_name, custom_headers=None, raw=False, **operation_config): - """Deletes the volume resource. - - Deletes the volume identified by the name. - - :param volume_resource_name: Service Fabric volume resource name. - :type volume_resource_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: None or ClientRawResponse if raw=true - :rtype: None or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`FabricErrorException` - """ - api_version = "6.3-preview" - - # Construct URL - url = self.delete_volume_resource.metadata['url'] - path_format_arguments = { - 'volumeResourceName': self._serialize.url("volume_resource_name", volume_resource_name, 'str', skip_quote=True) - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} - if custom_headers: - header_parameters.update(custom_headers) - - # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200, 202, 204]: - raise models.FabricErrorException(self._deserialize, response) - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - delete_volume_resource.metadata = {'url': '/Resources/Volumes/{volumeResourceName}'} diff --git a/azure-servicefabric/azure/servicefabric/version.py b/azure-servicefabric/azure/servicefabric/version.py index 04fad6f3ebf3..d00fda4b436a 100644 --- a/azure-servicefabric/azure/servicefabric/version.py +++ b/azure-servicefabric/azure/servicefabric/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "6.3.0.0" +VERSION = "6.4.0.0"