From 1df6153839bba0754885a59f10a09d6ca7fd7431 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 29 May 2019 07:40:30 +0000 Subject: [PATCH 1/4] Generated from 4e1af64b8fd59b4c121cb27978372ef9f516fcbb finally modify --- .../azure/mgmt/datafactory/models/__init__.py | 150 +++++++++++++++--- .../datafactory/models/amazon_s3_location.py | 55 +++++++ .../models/amazon_s3_location_py3.py | 55 +++++++ .../models/amazon_s3_read_setting.py | 78 +++++++++ .../models/amazon_s3_read_setting_py3.py | 78 +++++++++ .../models/azure_blob_fs_location.py | 50 ++++++ .../models/azure_blob_fs_location_py3.py | 50 ++++++ .../models/azure_blob_fs_read_setting.py | 73 +++++++++ .../models/azure_blob_fs_read_setting_py3.py | 73 +++++++++ .../datafactory/models/azure_blob_fs_sink.py | 8 +- .../models/azure_blob_fs_sink_py3.py | 8 +- .../models/azure_blob_fs_write_setting.py | 45 ++++++ .../models/azure_blob_fs_write_setting_py3.py | 45 ++++++ .../models/azure_blob_storage_location.py | 50 ++++++ .../models/azure_blob_storage_location_py3.py | 50 ++++++ .../models/azure_blob_storage_read_setting.py | 73 +++++++++ .../azure_blob_storage_read_setting_py3.py | 73 +++++++++ .../azure_blob_storage_write_setting.py | 45 ++++++ .../azure_blob_storage_write_setting_py3.py | 45 ++++++ .../models/azure_data_lake_store_location.py | 45 ++++++ .../azure_data_lake_store_location_py3.py | 45 ++++++ .../azure_data_lake_store_read_setting.py | 73 +++++++++ .../azure_data_lake_store_read_setting_py3.py | 73 +++++++++ .../models/azure_data_lake_store_sink.py | 12 +- .../models/azure_data_lake_store_sink_py3.py | 14 +- .../azure_data_lake_store_write_setting.py | 45 ++++++ ...azure_data_lake_store_write_setting_py3.py | 45 ++++++ .../models/azure_search_index_sink.py | 7 +- .../models/azure_search_index_sink_py3.py | 7 +- .../mgmt/datafactory/models/azure_sql_sink.py | 87 ++++++++++ .../datafactory/models/azure_sql_sink_py3.py | 87 ++++++++++ .../datafactory/models/azure_sql_source.py | 73 +++++++++ .../models/azure_sql_source_py3.py | 73 +++++++++ .../mgmt/datafactory/models/blob_sink.py | 8 +- .../mgmt/datafactory/models/blob_sink_py3.py | 8 +- .../datafactory/models/cassandra_source.py | 9 +- .../models/cassandra_source_py3.py | 9 +- .../models/connector_read_setting.py | 45 ++++++ .../models/connector_read_setting_py3.py | 45 ++++++ .../models/connector_write_setting.py | 49 ++++++ .../models/connector_write_setting_py3.py | 49 ++++++ .../mgmt/datafactory/models/copy_activity.py | 8 +- .../datafactory/models/copy_activity_py3.py | 10 +- .../mgmt/datafactory/models/copy_sink.py | 9 +- .../mgmt/datafactory/models/copy_sink_py3.py | 9 +- .../mgmt/datafactory/models/copy_source.py | 10 +- .../datafactory/models/copy_source_py3.py | 10 +- .../data_factory_management_client_enums.py | 64 +------- .../azure/mgmt/datafactory/models/dataset.py | 5 +- .../models/dataset_deflate_compression.py | 7 +- .../models/dataset_deflate_compression_py3.py | 7 +- .../models/dataset_gzip_compression.py | 7 +- .../models/dataset_gzip_compression_py3.py | 7 +- .../datafactory/models/dataset_location.py | 49 ++++++ .../models/dataset_location_py3.py | 49 ++++++ .../mgmt/datafactory/models/dataset_py3.py | 5 +- .../models/dataset_zip_deflate_compression.py | 7 +- .../dataset_zip_deflate_compression_py3.py | 7 +- .../models/delimited_text_dataset.py | 122 ++++++++++++++ .../models/delimited_text_dataset_py3.py | 122 ++++++++++++++ .../models/delimited_text_read_setting.py | 48 ++++++ .../models/delimited_text_read_setting_py3.py | 48 ++++++ .../datafactory/models/delimited_text_sink.py | 70 ++++++++ .../models/delimited_text_sink_py3.py | 70 ++++++++ .../models/delimited_text_source.py | 61 +++++++ .../models/delimited_text_source_py3.py | 61 +++++++ .../models/delimited_text_write_setting.py | 49 ++++++ .../delimited_text_write_setting_py3.py | 49 ++++++ .../models/document_db_collection_sink.py | 5 + .../models/document_db_collection_sink_py3.py | 7 +- .../models/dynamics_ax_resource_dataset.py | 4 +- .../dynamics_ax_resource_dataset_py3.py | 6 +- .../mgmt/datafactory/models/dynamics_sink.py | 7 +- .../datafactory/models/dynamics_sink_py3.py | 7 +- .../datafactory/models/entity_reference.py | 34 ++++ .../models/entity_reference_py3.py | 34 ++++ .../models/file_server_location.py | 45 ++++++ .../models/file_server_location_py3.py | 45 ++++++ .../models/file_server_read_setting.py | 73 +++++++++ .../models/file_server_read_setting_py3.py | 73 +++++++++ .../models/file_server_write_setting.py | 45 ++++++ .../models/file_server_write_setting_py3.py | 45 ++++++ .../datafactory/models/file_system_sink.py | 8 +- .../models/file_system_sink_py3.py | 8 +- ...y_translator.py => format_read_setting.py} | 17 +- ...ator_py3.py => format_read_setting_py3.py} | 19 +-- .../models/format_write_setting.py | 39 +++++ .../models/format_write_setting_py3.py | 39 +++++ .../datafactory/models/ftp_read_setting.py | 63 ++++++++ .../models/ftp_read_setting_py3.py | 63 ++++++++ .../datafactory/models/ftp_server_location.py | 45 ++++++ .../models/ftp_server_location_py3.py | 45 ++++++ .../mgmt/datafactory/models/hdfs_location.py | 45 ++++++ .../datafactory/models/hdfs_location_py3.py | 45 ++++++ .../datafactory/models/hdfs_read_setting.py | 77 +++++++++ .../models/hdfs_read_setting_py3.py | 77 +++++++++ .../datafactory/models/http_read_setting.py | 63 ++++++++ .../models/http_read_setting_py3.py | 63 ++++++++ .../models/http_server_location.py | 50 ++++++ .../models/http_server_location_py3.py | 50 ++++++ ...tegration_runtime_data_proxy_properties.py | 37 +++++ ...ation_runtime_data_proxy_properties_py3.py | 37 +++++ .../integration_runtime_ssis_properties.py | 6 + ...integration_runtime_ssis_properties_py3.py | 8 +- .../mgmt/datafactory/models/json_format.py | 8 +- .../datafactory/models/json_format_py3.py | 8 +- .../models/oracle_partition_settings.py | 46 ++++++ .../models/oracle_partition_settings_py3.py | 46 ++++++ .../mgmt/datafactory/models/oracle_source.py | 11 ++ .../datafactory/models/oracle_source_py3.py | 13 +- .../datafactory/models/parquet_dataset.py | 76 +++++++++ .../datafactory/models/parquet_dataset_py3.py | 76 +++++++++ .../mgmt/datafactory/models/parquet_sink.py | 61 +++++++ .../datafactory/models/parquet_sink_py3.py | 61 +++++++ .../mgmt/datafactory/models/parquet_source.py | 56 +++++++ .../datafactory/models/parquet_source_py3.py | 56 +++++++ .../datafactory/models/salesforce_sink.py | 7 +- .../datafactory/models/salesforce_sink_py3.py | 7 +- .../datafactory/models/salesforce_source.py | 7 +- .../models/salesforce_source_py3.py | 7 +- .../models/sap_cloud_for_customer_sink.py | 7 +- .../models/sap_cloud_for_customer_sink_py3.py | 7 +- .../models/sap_ecc_resource_dataset.py | 4 +- .../models/sap_ecc_resource_dataset_py3.py | 6 +- .../mgmt/datafactory/models/sap_ecc_source.py | 4 +- .../datafactory/models/sap_ecc_source_py3.py | 6 +- .../mgmt/datafactory/models/sftp_location.py | 45 ++++++ .../datafactory/models/sftp_location_py3.py | 45 ++++++ .../datafactory/models/sftp_read_setting.py | 68 ++++++++ .../models/sftp_read_setting_py3.py | 68 ++++++++ .../datafactory/models/sql_server_sink.py | 87 ++++++++++ .../datafactory/models/sql_server_sink_py3.py | 87 ++++++++++ .../datafactory/models/sql_server_source.py | 73 +++++++++ .../models/sql_server_source_py3.py | 73 +++++++++ .../azure/mgmt/datafactory/models/sql_sink.py | 6 + .../mgmt/datafactory/models/sql_sink_py3.py | 8 +- .../models/stored_procedure_parameter.py | 2 +- .../models/stored_procedure_parameter_py3.py | 2 +- .../datafactory/models/tabular_translator.py | 57 ------- .../models/tabular_translator_py3.py | 57 ------- 140 files changed, 5183 insertions(+), 391 deletions(-) create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py rename azure-mgmt-datafactory/azure/mgmt/datafactory/models/{copy_translator.py => format_read_setting.py} (73%) rename azure-mgmt-datafactory/azure/mgmt/datafactory/models/{copy_translator_py3.py => format_read_setting_py3.py} (68%) create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py create mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py delete mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py delete mode 100644 azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index f8279c1a99bd..47d04dbdce9d 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -255,6 +255,18 @@ from .azure_sql_table_dataset_py3 import AzureSqlTableDataset from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset + from .hdfs_location_py3 import HdfsLocation + from .http_server_location_py3 import HttpServerLocation + from .sftp_location_py3 import SftpLocation + from .ftp_server_location_py3 import FtpServerLocation + from .file_server_location_py3 import FileServerLocation + from .amazon_s3_location_py3 import AmazonS3Location + from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation + from .azure_blob_fs_location_py3 import AzureBlobFSLocation + from .azure_blob_storage_location_py3 import AzureBlobStorageLocation + from .dataset_location_py3 import DatasetLocation + from .delimited_text_dataset_py3 import DelimitedTextDataset + from .parquet_dataset_py3 import ParquetDataset from .amazon_s3_dataset_py3 import AmazonS3Dataset from .activity_policy_py3 import ActivityPolicy from .azure_function_activity_py3 import AzureFunctionActivity @@ -313,6 +325,7 @@ from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource + from .oracle_partition_settings_py3 import OraclePartitionSettings from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource @@ -321,6 +334,8 @@ from .file_system_source_py3 import FileSystemSource from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .azure_sql_source_py3 import AzureSqlSource + from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource from .rest_source_py3 import RestSource from .sap_open_hub_source_py3 import SapOpenHubSource @@ -332,6 +347,20 @@ from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource from .azure_table_source_py3 import AzureTableSource + from .format_read_setting_py3 import FormatReadSetting + from .delimited_text_read_setting_py3 import DelimitedTextReadSetting + from .hdfs_read_setting_py3 import HdfsReadSetting + from .http_read_setting_py3 import HttpReadSetting + from .sftp_read_setting_py3 import SftpReadSetting + from .ftp_read_setting_py3 import FtpReadSetting + from .file_server_read_setting_py3 import FileServerReadSetting + from .amazon_s3_read_setting_py3 import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting_py3 import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting_py3 import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting_py3 import AzureBlobStorageReadSetting + from .connector_read_setting_py3 import ConnectorReadSetting + from .delimited_text_source_py3 import DelimitedTextSource + from .parquet_source_py3 import ParquetSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity from .log_storage_settings_py3 import LogStorageSettings @@ -351,8 +380,6 @@ from .hd_insight_hive_activity_py3 import HDInsightHiveActivity from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings from .staging_settings_py3 import StagingSettings - from .tabular_translator_py3 import TabularTranslator - from .copy_translator_py3 import CopyTranslator from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink from .salesforce_sink_py3 import SalesforceSink from .azure_data_explorer_sink_py3 import AzureDataExplorerSink @@ -364,13 +391,24 @@ from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings from .sql_dw_sink_py3 import SqlDWSink + from .azure_sql_sink_py3 import AzureSqlSink + from .sql_server_sink_py3 import SqlServerSink from .sql_sink_py3 import SqlSink from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink + from .parquet_sink_py3 import ParquetSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .format_write_setting_py3 import FormatWriteSetting + from .delimited_text_write_setting_py3 import DelimitedTextWriteSetting + from .file_server_write_setting_py3 import FileServerWriteSetting + from .azure_data_lake_store_write_setting_py3 import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting_py3 import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting_py3 import AzureBlobStorageWriteSetting + from .connector_write_setting_py3 import ConnectorWriteSetting + from .delimited_text_sink_py3 import DelimitedTextSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity from .execution_activity_py3 import ExecutionActivity @@ -396,6 +434,8 @@ from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime + from .entity_reference_py3 import EntityReference + from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties @@ -663,6 +703,18 @@ from .azure_sql_table_dataset import AzureSqlTableDataset from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset + from .hdfs_location import HdfsLocation + from .http_server_location import HttpServerLocation + from .sftp_location import SftpLocation + from .ftp_server_location import FtpServerLocation + from .file_server_location import FileServerLocation + from .amazon_s3_location import AmazonS3Location + from .azure_data_lake_store_location import AzureDataLakeStoreLocation + from .azure_blob_fs_location import AzureBlobFSLocation + from .azure_blob_storage_location import AzureBlobStorageLocation + from .dataset_location import DatasetLocation + from .delimited_text_dataset import DelimitedTextDataset + from .parquet_dataset import ParquetDataset from .amazon_s3_dataset import AmazonS3Dataset from .activity_policy import ActivityPolicy from .azure_function_activity import AzureFunctionActivity @@ -721,6 +773,7 @@ from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource + from .oracle_partition_settings import OraclePartitionSettings from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource @@ -729,6 +782,8 @@ from .file_system_source import FileSystemSource from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter + from .azure_sql_source import AzureSqlSource + from .sql_server_source import SqlServerSource from .sql_source import SqlSource from .rest_source import RestSource from .sap_open_hub_source import SapOpenHubSource @@ -740,6 +795,20 @@ from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource from .azure_table_source import AzureTableSource + from .format_read_setting import FormatReadSetting + from .delimited_text_read_setting import DelimitedTextReadSetting + from .hdfs_read_setting import HdfsReadSetting + from .http_read_setting import HttpReadSetting + from .sftp_read_setting import SftpReadSetting + from .ftp_read_setting import FtpReadSetting + from .file_server_read_setting import FileServerReadSetting + from .amazon_s3_read_setting import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting import AzureBlobStorageReadSetting + from .connector_read_setting import ConnectorReadSetting + from .delimited_text_source import DelimitedTextSource + from .parquet_source import ParquetSource from .copy_source import CopySource from .lookup_activity import LookupActivity from .log_storage_settings import LogStorageSettings @@ -759,8 +828,6 @@ from .hd_insight_hive_activity import HDInsightHiveActivity from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings from .staging_settings import StagingSettings - from .tabular_translator import TabularTranslator - from .copy_translator import CopyTranslator from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink from .salesforce_sink import SalesforceSink from .azure_data_explorer_sink import AzureDataExplorerSink @@ -772,13 +839,24 @@ from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings from .sql_dw_sink import SqlDWSink + from .azure_sql_sink import AzureSqlSink + from .sql_server_sink import SqlServerSink from .sql_sink import SqlSink from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink + from .parquet_sink import ParquetSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .format_write_setting import FormatWriteSetting + from .delimited_text_write_setting import DelimitedTextWriteSetting + from .file_server_write_setting import FileServerWriteSetting + from .azure_data_lake_store_write_setting import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting import AzureBlobStorageWriteSetting + from .connector_write_setting import ConnectorWriteSetting + from .delimited_text_sink import DelimitedTextSink from .copy_sink import CopySink from .copy_activity import CopyActivity from .execution_activity import ExecutionActivity @@ -804,6 +882,8 @@ from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization from .linked_integration_runtime_type import LinkedIntegrationRuntimeType from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime + from .entity_reference import EntityReference + from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties @@ -874,25 +954,18 @@ TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, - DatasetCompressionLevel, - JsonFormatFilePattern, AzureFunctionActivityMethod, WebActivityMethod, - CassandraSourceReadConsistencyLevels, StoredProcedureParameterType, - SalesforceSourceReadBehavior, HDInsightActivityDebugInfoOption, - SalesforceSinkWriteBehavior, - AzureSearchIndexWriteBehaviorType, - CopyBehaviorType, PolybaseSettingsRejectType, - SapCloudForCustomerSinkWriteBehavior, WebHookActivityMethod, IntegrationRuntimeType, SelfHostedIntegrationRuntimeNodeStatus, IntegrationRuntimeUpdateResult, IntegrationRuntimeInternalChannelEncryptionMode, ManagedIntegrationRuntimeNodeStatus, + IntegrationRuntimeEntityReferenceType, IntegrationRuntimeSsisCatalogPricingTier, IntegrationRuntimeLicenseType, IntegrationRuntimeEdition, @@ -1146,6 +1219,18 @@ 'AzureSqlTableDataset', 'AzureTableDataset', 'AzureBlobDataset', + 'HdfsLocation', + 'HttpServerLocation', + 'SftpLocation', + 'FtpServerLocation', + 'FileServerLocation', + 'AmazonS3Location', + 'AzureDataLakeStoreLocation', + 'AzureBlobFSLocation', + 'AzureBlobStorageLocation', + 'DatasetLocation', + 'DelimitedTextDataset', + 'ParquetDataset', 'AmazonS3Dataset', 'ActivityPolicy', 'AzureFunctionActivity', @@ -1204,6 +1289,7 @@ 'MongoDbSource', 'CassandraSource', 'WebSource', + 'OraclePartitionSettings', 'OracleSource', 'AzureDataExplorerSource', 'AzureMySqlSource', @@ -1212,6 +1298,8 @@ 'FileSystemSource', 'SqlDWSource', 'StoredProcedureParameter', + 'AzureSqlSource', + 'SqlServerSource', 'SqlSource', 'RestSource', 'SapOpenHubSource', @@ -1223,6 +1311,20 @@ 'DocumentDbCollectionSource', 'BlobSource', 'AzureTableSource', + 'FormatReadSetting', + 'DelimitedTextReadSetting', + 'HdfsReadSetting', + 'HttpReadSetting', + 'SftpReadSetting', + 'FtpReadSetting', + 'FileServerReadSetting', + 'AmazonS3ReadSetting', + 'AzureDataLakeStoreReadSetting', + 'AzureBlobFSReadSetting', + 'AzureBlobStorageReadSetting', + 'ConnectorReadSetting', + 'DelimitedTextSource', + 'ParquetSource', 'CopySource', 'LookupActivity', 'LogStorageSettings', @@ -1242,8 +1344,6 @@ 'HDInsightHiveActivity', 'RedirectIncompatibleRowSettings', 'StagingSettings', - 'TabularTranslator', - 'CopyTranslator', 'CosmosDbMongoDbApiSink', 'SalesforceSink', 'AzureDataExplorerSink', @@ -1255,13 +1355,24 @@ 'OracleSink', 'PolybaseSettings', 'SqlDWSink', + 'AzureSqlSink', + 'SqlServerSink', 'SqlSink', 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', + 'ParquetSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', + 'FormatWriteSetting', + 'DelimitedTextWriteSetting', + 'FileServerWriteSetting', + 'AzureDataLakeStoreWriteSetting', + 'AzureBlobFSWriteSetting', + 'AzureBlobStorageWriteSetting', + 'ConnectorWriteSetting', + 'DelimitedTextSink', 'CopySink', 'CopyActivity', 'ExecutionActivity', @@ -1287,6 +1398,8 @@ 'LinkedIntegrationRuntimeKeyAuthorization', 'LinkedIntegrationRuntimeType', 'SelfHostedIntegrationRuntime', + 'EntityReference', + 'IntegrationRuntimeDataProxyProperties', 'IntegrationRuntimeCustomSetupScriptProperties', 'IntegrationRuntimeSsisCatalogInfo', 'IntegrationRuntimeSsisProperties', @@ -1356,25 +1469,18 @@ 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', - 'DatasetCompressionLevel', - 'JsonFormatFilePattern', 'AzureFunctionActivityMethod', 'WebActivityMethod', - 'CassandraSourceReadConsistencyLevels', 'StoredProcedureParameterType', - 'SalesforceSourceReadBehavior', 'HDInsightActivityDebugInfoOption', - 'SalesforceSinkWriteBehavior', - 'AzureSearchIndexWriteBehaviorType', - 'CopyBehaviorType', 'PolybaseSettingsRejectType', - 'SapCloudForCustomerSinkWriteBehavior', 'WebHookActivityMethod', 'IntegrationRuntimeType', 'SelfHostedIntegrationRuntimeNodeStatus', 'IntegrationRuntimeUpdateResult', 'IntegrationRuntimeInternalChannelEncryptionMode', 'ManagedIntegrationRuntimeNodeStatus', + 'IntegrationRuntimeEntityReferenceType', 'IntegrationRuntimeSsisCatalogPricingTier', 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeEdition', diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py new file mode 100644 index 000000000000..74c77a16f0f2 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py new file mode 100644 index 000000000000..36afce341ada --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py new file mode 100644 index 000000000000..4de7e0ebb7b9 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3ReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py new file mode 100644 index 000000000000..deda331ea561 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py new file mode 100644 index 000000000000..c21525bbac4c --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py new file mode 100644 index 000000000000..afbae52fdeb0 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py new file mode 100644 index 000000000000..11490a288417 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py new file mode 100644 index 000000000000..28f3b4f7ceb4 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py index 59e070c64fe8..a47b173c6581 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -40,10 +40,8 @@ class AzureBlobFSSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class AzureBlobFSSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py index 35ad6a97dbfe..e2b28bf30a8c 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -40,10 +40,8 @@ class AzureBlobFSSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class AzureBlobFSSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py new file mode 100644 index 000000000000..d5b2d850da58 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSWriteSetting, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py new file mode 100644 index 000000000000..62196ff73838 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py new file mode 100644 index 000000000000..1efbbeaec352 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py new file mode 100644 index 000000000000..63b122573039 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py new file mode 100644 index 000000000000..ee07a3576f29 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py new file mode 100644 index 000000000000..3e3d35774a46 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py new file mode 100644 index 000000000000..a6499dfda798 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSetting, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py new file mode 100644 index 000000000000..9abb68c06055 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobStorageWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py new file mode 100644 index 000000000000..a4bf521a2005 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py new file mode 100644 index 000000000000..e7955731fc31 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py new file mode 100644 index 000000000000..0f0dfe7f7c58 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py new file mode 100644 index 000000000000..b9159463d681 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py index 145c7c61358a..e882698c2ca6 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -40,10 +40,10 @@ class AzureDataLakeStoreSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -58,10 +58,12 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreSink, self).__init__(**kwargs) self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) self.type = 'AzureDataLakeStoreSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py index d3e16339fef2..0f96cea725e2 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -40,10 +40,10 @@ class AzureDataLakeStoreSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object """ _validation = { @@ -58,10 +58,12 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py new file mode 100644 index 000000000000..d7875f545e77 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSetting, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py new file mode 100644 index 000000000000..e05ddcbaeaac --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py index 9aae64af8da0..af2505be7a5c 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -41,9 +41,8 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + into Azure Search Index. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py index 3cd887a2512c..9e57f2f1feb3 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -41,9 +41,8 @@ class AzureSearchIndexSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents - into Azure Search Index. Possible values include: 'Merge', 'Upload' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType + into Azure Search Index. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py new file mode 100644 index 000000000000..441bf0c4279f --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'AzureSqlSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py new file mode 100644 index 000000000000..6aa431ae57d6 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'AzureSqlSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py new file mode 100644 index 000000000000..b6c62f9a3164 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'AzureSqlSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py new file mode 100644 index 000000000000..cb5c33d28bb2 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py index 8a050cf9cc64..284e0fcecde5 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -49,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -70,7 +68,7 @@ class BlobSink(CopySink): 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py index 8fca0ac5cacc..370acc72e017 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -49,10 +49,8 @@ class BlobSink(CopySink): :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -70,7 +68,7 @@ class BlobSink(CopySink): 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py index e7ba96c18682..8a52f03cd5ba 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -42,11 +42,8 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + case-insensitive. + :type consistency_level: object """ _validation = { @@ -60,7 +57,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py index bd95d158b868..6957385bab86 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -42,11 +42,8 @@ class CassandraSource(CopySource): the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is - case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', - 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', - 'LOCAL_SERIAL' - :type consistency_level: str or - ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels + case-insensitive. + :type consistency_level: object """ _validation = { @@ -60,7 +57,7 @@ class CassandraSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py new file mode 100644 index 000000000000..676a418983f4 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py new file mode 100644 index 000000000000..05bb5e2f87f3 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py new file mode 100644 index 000000000000..65daf9f07794 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py new file mode 100644 index 000000000000..7f4ea65c916d --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py index f73d34fcb3ce..2e7c00d551ba 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -67,6 +67,8 @@ class CopyActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -91,7 +93,7 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, @@ -99,6 +101,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -115,6 +118,7 @@ def __init__(self, **kwargs): self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py index a02cd5d89e10..f8a1fee5625d 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -41,7 +41,7 @@ class CopyActivity(ExecutionActivity): :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. - :type translator: ~azure.mgmt.datafactory.models.CopyTranslator + :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -67,6 +67,8 @@ class CopyActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -91,7 +93,7 @@ class CopyActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, - 'translator': {'key': 'typeProperties.translator', 'type': 'CopyTranslator'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, @@ -99,11 +101,12 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -115,6 +118,7 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.preserve_rules = preserve_rules + self.preserve = preserve self.inputs = inputs self.outputs = outputs self.type = 'Copy' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 6f714f7947d1..43117547e1ea 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -18,9 +18,10 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, - AzureQueueSink, SapCloudForCustomerSink + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -64,7 +65,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index ea4f320e9bc6..4f9ebc84173c 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -18,9 +18,10 @@ class CopySink(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, AzureTableSink, - AzureQueueSink, SapCloudForCustomerSink + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -64,7 +65,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 4f3da1e8cf85..091070316cfe 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -28,9 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, - SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapOpenHubSource, SapEccSource, SapCloudForCustomerSource, + SalesforceSource, RelationalSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -65,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index eb439548481a..a5b8437e944a 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -28,9 +28,11 @@ class CopySource(Model): AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, SqlSource, RestSource, SapOpenHubSource, SapEccSource, - SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapOpenHubSource, SapEccSource, SapCloudForCustomerSource, + SalesforceSource, RelationalSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -65,7 +67,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index b372cb91d8ef..ded527b2602a 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -321,18 +321,6 @@ class SybaseAuthenticationType(str, Enum): windows = "Windows" -class DatasetCompressionLevel(str, Enum): - - optimal = "Optimal" - fastest = "Fastest" - - -class JsonFormatFilePattern(str, Enum): - - set_of_objects = "setOfObjects" - array_of_objects = "arrayOfObjects" - - class AzureFunctionActivityMethod(str, Enum): get = "GET" @@ -352,36 +340,17 @@ class WebActivityMethod(str, Enum): delete = "DELETE" -class CassandraSourceReadConsistencyLevels(str, Enum): - - all = "ALL" - each_quorum = "EACH_QUORUM" - quorum = "QUORUM" - local_quorum = "LOCAL_QUORUM" - one = "ONE" - two = "TWO" - three = "THREE" - local_one = "LOCAL_ONE" - serial = "SERIAL" - local_serial = "LOCAL_SERIAL" - - class StoredProcedureParameterType(str, Enum): string = "String" int_enum = "Int" + int64 = "Int64" decimal_enum = "Decimal" guid = "Guid" boolean = "Boolean" date_enum = "Date" -class SalesforceSourceReadBehavior(str, Enum): - - query = "Query" - query_all = "QueryAll" - - class HDInsightActivityDebugInfoOption(str, Enum): none = "None" @@ -389,37 +358,12 @@ class HDInsightActivityDebugInfoOption(str, Enum): failure = "Failure" -class SalesforceSinkWriteBehavior(str, Enum): - - insert = "Insert" - upsert = "Upsert" - - -class AzureSearchIndexWriteBehaviorType(str, Enum): - - merge = "Merge" - upload = "Upload" - - -class CopyBehaviorType(str, Enum): - - preserve_hierarchy = "PreserveHierarchy" - flatten_hierarchy = "FlattenHierarchy" - merge_files = "MergeFiles" - - class PolybaseSettingsRejectType(str, Enum): value = "value" percentage = "percentage" -class SapCloudForCustomerSinkWriteBehavior(str, Enum): - - insert = "Insert" - update = "Update" - - class WebHookActivityMethod(str, Enum): post = "POST" @@ -464,6 +408,12 @@ class ManagedIntegrationRuntimeNodeStatus(str, Enum): unavailable = "Unavailable" +class IntegrationRuntimeEntityReferenceType(str, Enum): + + integration_runtime_reference = "IntegrationRuntimeReference" + linked_service_reference = "LinkedServiceReference" + + class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): basic = "Basic" diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index af540b1e6429..eb7454388615 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -39,7 +39,8 @@ class Dataset(Model): Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, - AzureTableDataset, AzureBlobDataset, AmazonS3Dataset + AzureTableDataset, AzureBlobDataset, DelimitedTextDataset, ParquetDataset, + AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -89,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py index c16c0611b364..9c97e2bfa5e3 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py index 715fe91a12a3..11d00081bc1c 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py index 48317d06f34e..4925127c7f0f 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py index 99b1081469f8..97346e06366d 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py @@ -22,9 +22,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py new file mode 100644 index 000000000000..2c318a91cccb --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py new file mode 100644 index 000000000000..d4e32d753197 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index adc64b228236..c7831425143f 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -39,7 +39,8 @@ class Dataset(Model): Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, - AzureTableDataset, AzureBlobDataset, AmazonS3Dataset + AzureTableDataset, AzureBlobDataset, DelimitedTextDataset, ParquetDataset, + AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -89,7 +90,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py index 9312098be5a3..ed80bf3cbcf2 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py index 74fbb92ce1ab..20abd6fe1088 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py @@ -22,9 +22,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: - 'Optimal', 'Fastest' - :type level: str or ~azure.mgmt.datafactory.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -34,7 +33,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py new file mode 100644 index 000000000000..bfee26fcd12c --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + self.type = 'DelimitedText' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py new file mode 100644 index 000000000000..c2597e6a022b --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py new file mode 100644 index 000000000000..01dc21c02206 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + :param treat_empty_as_null: Specify whether to treat null or empty string + as a null value when reading data from an input file. + :type treat_empty_as_null: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextReadSetting, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py new file mode 100644 index 000000000000..597b134e808b --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting_py3 import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + :param treat_empty_as_null: Specify whether to treat null or empty string + as a null value when reading data from an input file. + :type treat_empty_as_null: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, treat_empty_as_null: bool=None, **kwargs) -> None: + super(DelimitedTextReadSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count + self.treat_empty_as_null = treat_empty_as_null diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py new file mode 100644 index 000000000000..ae93f209c8b3 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py new file mode 100644 index 000000000000..a1ba953a2662 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py new file mode 100644 index 000000000000..9f2067d24b9c --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py new file mode 100644 index 000000000000..b158f97bde81 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py new file mode 100644 index 000000000000..21fe168f1316 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextWriteSetting, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py new file mode 100644 index 000000000000..ac0e3b2d00cc --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting_py3 import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py index 25e80ee45466..c2908dc1dd05 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -43,6 +43,9 @@ class DocumentDbCollectionSink(CopySink): :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -58,9 +61,11 @@ class DocumentDbCollectionSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): super(DocumentDbCollectionSink, self).__init__(**kwargs) self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) self.type = 'DocumentDbCollectionSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py index 111897036215..f1410cd211a4 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -43,6 +43,9 @@ class DocumentDbCollectionSink(CopySink): :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object """ _validation = { @@ -58,9 +61,11 @@ class DocumentDbCollectionSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator + self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py index 233c4c99d4df..392b8ac7b971 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -45,7 +45,7 @@ class DynamicsAXResourceDataset(Dataset): :type type: str :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,7 +64,7 @@ class DynamicsAXResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py index 788c9084ee9b..6cade3e4aa59 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -45,7 +45,7 @@ class DynamicsAXResourceDataset(Dataset): :type type: str :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,10 +64,10 @@ class DynamicsAXResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'DynamicsAXResource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py index 45bac7b52064..5afce6ced25b 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -44,8 +44,7 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str + :vartype write_behavior: object :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -65,11 +64,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = "Upsert" + write_behavior = None def __init__(self, **kwargs): super(DynamicsSink, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py index 5f736f9cf658..ffdb08363bfd 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -44,8 +44,7 @@ class DynamicsSink(CopySink): :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. - Default value: "Upsert" . - :vartype write_behavior: str + :vartype write_behavior: object :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). @@ -65,11 +64,11 @@ class DynamicsSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - write_behavior = "Upsert" + write_behavior = None def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py new file mode 100644 index 000000000000..5db1448a5a55 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py new file mode 100644 index 000000000000..f87698b67a64 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py new file mode 100644 index 000000000000..edce5fe68a65 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py new file mode 100644 index 000000000000..f7fb8354bcbc --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py new file mode 100644 index 000000000000..6ba2a5f56b79 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py new file mode 100644 index 000000000000..4393692d63f3 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py new file mode 100644 index 000000000000..9342210abdfb --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSetting, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py new file mode 100644 index 000000000000..1ed4bf220417 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py index 75baab87456e..8b8f238c9534 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -40,10 +40,8 @@ class FileSystemSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class FileSystemSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py index 92388128726e..24f8623cbb02 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -40,10 +40,8 @@ class FileSystemSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param copy_behavior: The type of copy behavior for copy sink. Possible - values include: 'PreserveHierarchy', 'FlattenHierarchy', 'MergeFiles' - :type copy_behavior: str or - ~azure.mgmt.datafactory.models.CopyBehaviorType + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object """ _validation = { @@ -58,7 +56,7 @@ class FileSystemSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'copy_behavior': {'key': 'copyBehavior', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py similarity index 73% rename from azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py rename to azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py index 2b0242ef997c..730cec9f525f 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py @@ -12,18 +12,15 @@ from msrest.serialization import Model -class CopyTranslator(Model): - """A copy activity translator. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator +class FormatReadSetting(Model): + """Format read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Required. The read setting type. :type type: str """ @@ -36,11 +33,7 @@ class CopyTranslator(Model): 'type': {'key': 'type', 'type': 'str'}, } - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } - def __init__(self, **kwargs): - super(CopyTranslator, self).__init__(**kwargs) + super(FormatReadSetting, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = None + self.type = kwargs.get('type', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py similarity index 68% rename from azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py rename to azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py index 3fef58394fd0..ed68bf35f009 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_translator_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py @@ -12,18 +12,15 @@ from msrest.serialization import Model -class CopyTranslator(Model): - """A copy activity translator. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: TabularTranslator +class FormatReadSetting(Model): + """Format read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. + :param type: Required. The read setting type. :type type: str """ @@ -36,11 +33,7 @@ class CopyTranslator(Model): 'type': {'key': 'type', 'type': 'str'}, } - _subtype_map = { - 'type': {'TabularTranslator': 'TabularTranslator'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(CopyTranslator, self).__init__(**kwargs) + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSetting, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type = None + self.type = type diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py new file mode 100644 index 000000000000..0fd6966859d5 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py new file mode 100644 index 000000000000..3e5609066208 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py new file mode 100644 index 000000000000..137a56948deb --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FtpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py new file mode 100644 index 000000000000..5294301e4fd8 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py new file mode 100644 index 000000000000..5d5e933036df --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py new file mode 100644 index 000000000000..ac296bcfca31 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py new file mode 100644 index 000000000000..a8f5d1ba332c --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py new file mode 100644 index 000000000000..2e07575bef0f --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py new file mode 100644 index 000000000000..4fdadbc2fcd0 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py new file mode 100644 index 000000000000..164a6f497e52 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py new file mode 100644 index 000000000000..696a9fdb3faf --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpReadSetting, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py new file mode 100644 index 000000000000..3d5d75a80785 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py new file mode 100644 index 000000000000..94106fae9d15 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py new file mode 100644 index 000000000000..c52c53dcf357 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py new file mode 100644 index 000000000000..ebc0e9b38d6f --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py new file mode 100644 index 000000000000..532b774cad3d --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py index e1a091166529..293f071aa0b3 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,6 +45,7 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } @@ -50,4 +55,5 @@ def __init__(self, **kwargs): self.catalog_info = kwargs.get('catalog_info', None) self.license_type = kwargs.get('license_type', None) self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) self.edition = kwargs.get('edition', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py index eb70dd23ddb7..f75775e29a7f 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py @@ -30,6 +30,10 @@ class IntegrationRuntimeSsisProperties(Model): a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or @@ -41,13 +45,15 @@ class IntegrationRuntimeSsisProperties(Model): 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, edition=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info self.license_type = license_type self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties self.edition = edition diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py index 736f9500018f..80f4ff0aaf8b 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py index a9a7f20ea103..2fdb44cc3b7f 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py @@ -30,10 +30,8 @@ class JsonFormat(DatasetStorageFormat): :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is - 'setOfObjects'. It is case-sensitive. Possible values include: - 'setOfObjects', 'arrayOfObjects' - :type file_pattern: str or - ~azure.mgmt.datafactory.models.JsonFormatFilePattern + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). @@ -67,7 +65,7 @@ class JsonFormat(DatasetStorageFormat): 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py new file mode 100644 index 000000000000..87f69763a470 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: list[object] + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': '[object]'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..6a13a4d647e8 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: list[object] + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': '[object]'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py index 12b3aa31353f..84ad79ed19c7 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -40,6 +40,13 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +61,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } def __init__(self, **kwargs): super(OracleSource, self).__init__(**kwargs) self.oracle_reader_query = kwargs.get('oracle_reader_query', None) self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) self.type = 'OracleSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py index 43afe27fda2f..dfcbd2e0330d 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -40,6 +40,13 @@ class OracleSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings """ _validation = { @@ -54,10 +61,14 @@ class OracleSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings self.type = 'OracleSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py new file mode 100644 index 000000000000..ffaf8e1f6d93 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.type = 'Parquet' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py new file mode 100644 index 000000000000..4d754450ce15 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py new file mode 100644 index 000000000000..3fea726312b4 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.type = 'ParquetSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py new file mode 100644 index 000000000000..0673e35e73f1 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ParquetSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py new file mode 100644 index 000000000000..02e74641d506 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py new file mode 100644 index 000000000000..bfe077dd9999 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py index 9a1291bd4bfe..4d1a93c08915 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -41,9 +41,8 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + Insert. + :type write_behavior: object :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -71,7 +70,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py index 54a56618d01e..ed7591fbb59b 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -41,9 +41,8 @@ class SalesforceSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - Insert. Possible values include: 'Insert', 'Upsert' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + Insert. + :type write_behavior: object :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). @@ -71,7 +70,7 @@ class SalesforceSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py index 4f2590c3ab9d..57a10411f487 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -37,9 +37,8 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + Query. + :type read_behavior: object """ _validation = { @@ -53,7 +52,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py index 4441e92eaff3..08e6776f5f98 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -37,9 +37,8 @@ class SalesforceSource(CopySource): string). :type query: object :param read_behavior: The read behavior for the operation. Default is - Query. Possible values include: 'Query', 'QueryAll' - :type read_behavior: str or - ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + Query. + :type read_behavior: object """ _validation = { @@ -53,7 +52,7 @@ class SalesforceSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, - 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py index e5a37858abb5..ae99093f277e 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -41,9 +41,8 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + 'Insert'. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py index 29f01fdd6891..bdbc2cefcbd1 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -41,9 +41,8 @@ class SapCloudForCustomerSink(CopySink): :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is - 'Insert'. Possible values include: 'Insert', 'Update' - :type write_behavior: str or - ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior + 'Insert'. + :type write_behavior: object """ _validation = { @@ -58,7 +57,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py index e4f10113aecd..f79367f49b3d 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,7 +64,7 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py index 08bf742dc415..76aaeb9bb9f2 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py @@ -45,7 +45,7 @@ class SapEccResourceDataset(Dataset): :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). - :type path: str + :type path: object """ _validation = { @@ -64,10 +64,10 @@ class SapEccResourceDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'path': {'key': 'typeProperties.path', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, } - def __init__(self, *, linked_service_name, path: str, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'SapEccResource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py index 6a2d17862d6b..6379c33713d4 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -35,7 +35,7 @@ class SapEccSource(CopySource): :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -48,7 +48,7 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, **kwargs): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py index 95a11500bd24..4412cac39960 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -35,7 +35,7 @@ class SapEccSource(CopySource): :type type: str :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). - :type query: str + :type query: object """ _validation = { @@ -48,10 +48,10 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.type = 'SapEccSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py new file mode 100644 index 000000000000..5b8fd4e42ba2 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py new file mode 100644 index 000000000000..c5e2feafa971 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py new file mode 100644 index 000000000000..e0cd7ea8fda1 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py new file mode 100644 index 000000000000..39beb756905a --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py new file mode 100644 index 000000000000..45b1f1273903 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlServerSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py new file mode 100644 index 000000000000..dbe1bf44e418 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlServerSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py new file mode 100644 index 000000000000..f9aa011047ea --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlServerSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py new file mode 100644 index 000000000000..27d12985e595 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py index 9c2ebd2b389f..7ec0313aab4b 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -52,6 +52,10 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object """ _validation = { @@ -70,6 +74,7 @@ class SqlSink(CopySink): 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } def __init__(self, **kwargs): @@ -78,4 +83,5 @@ def __init__(self, **kwargs): self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) self.type = 'SqlSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py index 115cc3a899e9..1f6bb9685082 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -52,6 +52,10 @@ class SqlSink(CopySink): :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object """ _validation = { @@ -70,12 +74,14 @@ class SqlSink(CopySink): 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.type = 'SqlSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py index 748cf7cba53c..ff16595aa8c7 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py index bd967ce52876..2842ef9ae35c 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py @@ -19,7 +19,7 @@ class StoredProcedureParameter(Model): Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: - 'String', 'Int', 'Decimal', 'Guid', 'Boolean', 'Date' + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py deleted file mode 100644 index 043c537ad860..000000000000 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_translator import CopyTranslator - - -class TabularTranslator(CopyTranslator): - """A copy activity tabular translator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, - Group: MyGroup, Name: MyName" Type: string (or Expression with resultType - string). - :type column_mappings: object - :param schema_mapping: The schema mapping to map between tabular data and - hierarchical data. Example: {"Column1": "$.Column1", "Column2": - "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or - Expression with resultType object). - :type schema_mapping: object - :param collection_reference: The JSON Path of the Nested Array that is - going to do cross-apply. Type: object (or Expression with resultType - object). - :type collection_reference: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - } - - def __init__(self, **kwargs): - super(TabularTranslator, self).__init__(**kwargs) - self.column_mappings = kwargs.get('column_mappings', None) - self.schema_mapping = kwargs.get('schema_mapping', None) - self.collection_reference = kwargs.get('collection_reference', None) - self.type = 'TabularTranslator' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py deleted file mode 100644 index cb1c11e5bb53..000000000000 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tabular_translator_py3.py +++ /dev/null @@ -1,57 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -from .copy_translator_py3 import CopyTranslator - - -class TabularTranslator(CopyTranslator): - """A copy activity tabular translator. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - :param column_mappings: Column mappings. Example: "UserId: MyUserId, - Group: MyGroup, Name: MyName" Type: string (or Expression with resultType - string). - :type column_mappings: object - :param schema_mapping: The schema mapping to map between tabular data and - hierarchical data. Example: {"Column1": "$.Column1", "Column2": - "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or - Expression with resultType object). - :type schema_mapping: object - :param collection_reference: The JSON Path of the Nested Array that is - going to do cross-apply. Type: object (or Expression with resultType - object). - :type collection_reference: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, - 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, - 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, - } - - def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, collection_reference=None, **kwargs) -> None: - super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) - self.column_mappings = column_mappings - self.schema_mapping = schema_mapping - self.collection_reference = collection_reference - self.type = 'TabularTranslator' From d71bfb5ea3f8047839adc15403f6360f80ea94f8 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 29 May 2019 09:10:27 +0000 Subject: [PATCH 2/4] Generated from 316b6eb6a6b9e9390a7c5eb6baa3c419947e7265 modify ParquetSink --- .../azure/mgmt/datafactory/models/__init__.py | 30 +++++++++---------- .../mgmt/datafactory/models/parquet_sink.py | 4 +++ .../datafactory/models/parquet_sink_py3.py | 6 +++- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 47d04dbdce9d..9f925571446e 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -397,17 +397,17 @@ from .document_db_collection_sink_py3 import DocumentDbCollectionSink from .file_system_sink_py3 import FileSystemSink from .blob_sink_py3 import BlobSink + from .file_server_write_setting_py3 import FileServerWriteSetting + from .azure_data_lake_store_write_setting_py3 import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting_py3 import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting_py3 import AzureBlobStorageWriteSetting + from .connector_write_setting_py3 import ConnectorWriteSetting from .parquet_sink_py3 import ParquetSink from .azure_table_sink_py3 import AzureTableSink from .azure_queue_sink_py3 import AzureQueueSink from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink from .format_write_setting_py3 import FormatWriteSetting from .delimited_text_write_setting_py3 import DelimitedTextWriteSetting - from .file_server_write_setting_py3 import FileServerWriteSetting - from .azure_data_lake_store_write_setting_py3 import AzureDataLakeStoreWriteSetting - from .azure_blob_fs_write_setting_py3 import AzureBlobFSWriteSetting - from .azure_blob_storage_write_setting_py3 import AzureBlobStorageWriteSetting - from .connector_write_setting_py3 import ConnectorWriteSetting from .delimited_text_sink_py3 import DelimitedTextSink from .copy_sink_py3 import CopySink from .copy_activity_py3 import CopyActivity @@ -845,17 +845,17 @@ from .document_db_collection_sink import DocumentDbCollectionSink from .file_system_sink import FileSystemSink from .blob_sink import BlobSink + from .file_server_write_setting import FileServerWriteSetting + from .azure_data_lake_store_write_setting import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting import AzureBlobStorageWriteSetting + from .connector_write_setting import ConnectorWriteSetting from .parquet_sink import ParquetSink from .azure_table_sink import AzureTableSink from .azure_queue_sink import AzureQueueSink from .sap_cloud_for_customer_sink import SapCloudForCustomerSink from .format_write_setting import FormatWriteSetting from .delimited_text_write_setting import DelimitedTextWriteSetting - from .file_server_write_setting import FileServerWriteSetting - from .azure_data_lake_store_write_setting import AzureDataLakeStoreWriteSetting - from .azure_blob_fs_write_setting import AzureBlobFSWriteSetting - from .azure_blob_storage_write_setting import AzureBlobStorageWriteSetting - from .connector_write_setting import ConnectorWriteSetting from .delimited_text_sink import DelimitedTextSink from .copy_sink import CopySink from .copy_activity import CopyActivity @@ -1361,17 +1361,17 @@ 'DocumentDbCollectionSink', 'FileSystemSink', 'BlobSink', + 'FileServerWriteSetting', + 'AzureDataLakeStoreWriteSetting', + 'AzureBlobFSWriteSetting', + 'AzureBlobStorageWriteSetting', + 'ConnectorWriteSetting', 'ParquetSink', 'AzureTableSink', 'AzureQueueSink', 'SapCloudForCustomerSink', 'FormatWriteSetting', 'DelimitedTextWriteSetting', - 'FileServerWriteSetting', - 'AzureDataLakeStoreWriteSetting', - 'AzureBlobFSWriteSetting', - 'AzureBlobStorageWriteSetting', - 'ConnectorWriteSetting', 'DelimitedTextSink', 'CopySink', 'CopyActivity', diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py index 3fea726312b4..38c634ed10dd 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -40,6 +40,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting """ _validation = { @@ -54,8 +56,10 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, } def __init__(self, **kwargs): super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) self.type = 'ParquetSink' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py index 0673e35e73f1..96c0c1b57926 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -40,6 +40,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting """ _validation = { @@ -54,8 +56,10 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings self.type = 'ParquetSink' From dc06667ef7c091270064cd374327e719d5b2a5a8 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Wed, 29 May 2019 09:51:07 +0000 Subject: [PATCH 3/4] Generated from ee70850679e3171b01674bc23820b1270ae444f9 delete treatEmptyAsNull --- .../mgmt/datafactory/models/delimited_text_read_setting.py | 5 ----- .../datafactory/models/delimited_text_read_setting_py3.py | 7 +------ 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py index 01dc21c02206..004eb595a05e 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py @@ -26,9 +26,6 @@ class DelimitedTextReadSetting(FormatReadSetting): when reading data from input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object - :param treat_empty_as_null: Specify whether to treat null or empty string - as a null value when reading data from an input file. - :type treat_empty_as_null: bool """ _validation = { @@ -39,10 +36,8 @@ class DelimitedTextReadSetting(FormatReadSetting): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'bool'}, } def __init__(self, **kwargs): super(DelimitedTextReadSetting, self).__init__(**kwargs) self.skip_line_count = kwargs.get('skip_line_count', None) - self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py index 597b134e808b..87915fcb3db7 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py @@ -26,9 +26,6 @@ class DelimitedTextReadSetting(FormatReadSetting): when reading data from input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object - :param treat_empty_as_null: Specify whether to treat null or empty string - as a null value when reading data from an input file. - :type treat_empty_as_null: bool """ _validation = { @@ -39,10 +36,8 @@ class DelimitedTextReadSetting(FormatReadSetting): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, - 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'bool'}, } - def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, treat_empty_as_null: bool=None, **kwargs) -> None: + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: super(DelimitedTextReadSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) self.skip_line_count = skip_line_count - self.treat_empty_as_null = treat_empty_as_null From 338c52cf2162a55b583d7016cf192dd722a580e3 Mon Sep 17 00:00:00 2001 From: Azure SDK for Python bot Date: Thu, 30 May 2019 03:08:39 +0000 Subject: [PATCH 4/4] Generated from 4b230d1bb5edcbbed626e7547dbb07f392fe2dcc remove tablename --- .../azure/mgmt/datafactory/__init__.py | 18 + .../data_factory_management_client.py | 146 ++ .../azure/mgmt/datafactory/models/__init__.py | 1489 +++++++++++++++++ .../models/access_policy_response.py | 36 + .../models/access_policy_response_py3.py | 36 + .../azure/mgmt/datafactory/models/activity.py | 63 + .../datafactory/models/activity_dependency.py | 46 + .../models/activity_dependency_py3.py | 46 + .../datafactory/models/activity_policy.py | 59 + .../datafactory/models/activity_policy_py3.py | 59 + .../mgmt/datafactory/models/activity_py3.py | 63 + .../mgmt/datafactory/models/activity_run.py | 102 ++ .../datafactory/models/activity_run_py3.py | 102 ++ .../models/activity_runs_query_response.py | 39 + .../activity_runs_query_response_py3.py | 39 + .../models/amazon_mws_linked_service.py | 106 ++ .../models/amazon_mws_linked_service_py3.py | 106 ++ .../models/amazon_mws_object_dataset.py | 72 + .../models/amazon_mws_object_dataset_py3.py | 72 + .../datafactory/models/amazon_mws_source.py | 57 + .../models/amazon_mws_source_py3.py | 57 + .../models/amazon_redshift_linked_service.py | 86 + .../amazon_redshift_linked_service_py3.py | 86 + .../models/amazon_redshift_source.py | 65 + .../models/amazon_redshift_source_py3.py | 65 + .../datafactory/models/amazon_s3_dataset.py | 107 ++ .../models/amazon_s3_dataset_py3.py | 107 ++ .../models/amazon_s3_linked_service.py | 77 + .../models/amazon_s3_linked_service_py3.py | 77 + .../datafactory/models/amazon_s3_location.py | 55 + .../models/amazon_s3_location_py3.py | 55 + .../models/amazon_s3_read_setting.py | 78 + .../models/amazon_s3_read_setting_py3.py | 78 + .../models/append_variable_activity.py | 60 + .../models/append_variable_activity_py3.py | 60 + .../mgmt/datafactory/models/avro_format.py | 46 + .../datafactory/models/avro_format_py3.py | 46 + .../models/azure_batch_linked_service.py | 88 + .../models/azure_batch_linked_service_py3.py | 88 + .../datafactory/models/azure_blob_dataset.py | 100 ++ .../models/azure_blob_dataset_py3.py | 100 ++ .../models/azure_blob_fs_dataset.py | 85 + .../models/azure_blob_fs_dataset_py3.py | 85 + .../models/azure_blob_fs_linked_service.py | 86 + .../azure_blob_fs_linked_service_py3.py | 86 + .../models/azure_blob_fs_location.py | 50 + .../models/azure_blob_fs_location_py3.py | 50 + .../models/azure_blob_fs_read_setting.py | 73 + .../models/azure_blob_fs_read_setting_py3.py | 73 + .../datafactory/models/azure_blob_fs_sink.py | 65 + .../models/azure_blob_fs_sink_py3.py | 65 + .../models/azure_blob_fs_source.py | 68 + .../models/azure_blob_fs_source_py3.py | 68 + .../models/azure_blob_fs_write_setting.py | 45 + .../models/azure_blob_fs_write_setting_py3.py | 45 + .../azure_blob_storage_linked_service.py | 104 ++ .../azure_blob_storage_linked_service_py3.py | 104 ++ .../models/azure_blob_storage_location.py | 50 + .../models/azure_blob_storage_location_py3.py | 50 + .../models/azure_blob_storage_read_setting.py | 73 + .../azure_blob_storage_read_setting_py3.py | 73 + .../azure_blob_storage_write_setting.py | 45 + .../azure_blob_storage_write_setting_py3.py | 45 + .../azure_data_explorer_linked_service.py | 86 + .../azure_data_explorer_linked_service_py3.py | 86 + .../models/azure_data_explorer_sink.py | 76 + .../models/azure_data_explorer_sink_py3.py | 76 + .../models/azure_data_explorer_source.py | 70 + .../models/azure_data_explorer_source_py3.py | 70 + .../azure_data_explorer_table_dataset.py | 72 + .../azure_data_explorer_table_dataset_py3.py | 72 + ...zure_data_lake_analytics_linked_service.py | 99 ++ ..._data_lake_analytics_linked_service_py3.py | 99 ++ .../models/azure_data_lake_store_dataset.py | 86 + .../azure_data_lake_store_dataset_py3.py | 86 + .../azure_data_lake_store_linked_service.py | 98 ++ ...zure_data_lake_store_linked_service_py3.py | 98 ++ .../models/azure_data_lake_store_location.py | 45 + .../azure_data_lake_store_location_py3.py | 45 + .../azure_data_lake_store_read_setting.py | 73 + .../azure_data_lake_store_read_setting_py3.py | 73 + .../models/azure_data_lake_store_sink.py | 69 + .../models/azure_data_lake_store_sink_py3.py | 69 + .../models/azure_data_lake_store_source.py | 58 + .../azure_data_lake_store_source_py3.py | 58 + .../azure_data_lake_store_write_setting.py | 45 + ...azure_data_lake_store_write_setting_py3.py | 45 + .../models/azure_databricks_linked_service.py | 126 ++ .../azure_databricks_linked_service_py3.py | 126 ++ .../models/azure_function_activity.py | 85 + .../models/azure_function_activity_py3.py | 85 + .../models/azure_function_linked_service.py | 69 + .../azure_function_linked_service_py3.py | 69 + .../models/azure_key_vault_linked_service.py | 60 + .../azure_key_vault_linked_service_py3.py | 60 + .../azure_key_vault_secret_reference.py | 51 + .../azure_key_vault_secret_reference_py3.py | 51 + .../azure_ml_batch_execution_activity.py | 82 + .../azure_ml_batch_execution_activity_py3.py | 82 + .../models/azure_ml_linked_service.py | 94 ++ .../models/azure_ml_linked_service_py3.py | 94 ++ .../azure_ml_update_resource_activity.py | 81 + .../azure_ml_update_resource_activity_py3.py | 81 + .../models/azure_ml_web_service_file.py | 43 + .../models/azure_ml_web_service_file_py3.py | 43 + .../models/azure_my_sql_linked_service.py | 71 + .../models/azure_my_sql_linked_service_py3.py | 71 + .../datafactory/models/azure_my_sql_source.py | 57 + .../models/azure_my_sql_source_py3.py | 57 + .../models/azure_my_sql_table_dataset.py | 72 + .../models/azure_my_sql_table_dataset_py3.py | 72 + .../azure_postgre_sql_linked_service.py | 70 + .../azure_postgre_sql_linked_service_py3.py | 70 + .../models/azure_postgre_sql_source.py | 57 + .../models/azure_postgre_sql_source_py3.py | 57 + .../models/azure_postgre_sql_table_dataset.py | 72 + .../azure_postgre_sql_table_dataset_py3.py | 72 + .../datafactory/models/azure_queue_sink.py | 61 + .../models/azure_queue_sink_py3.py | 61 + .../models/azure_search_index_dataset.py | 73 + .../models/azure_search_index_dataset_py3.py | 73 + .../models/azure_search_index_sink.py | 66 + .../models/azure_search_index_sink_py3.py | 66 + .../models/azure_search_linked_service.py | 69 + .../models/azure_search_linked_service_py3.py | 69 + .../azure_sql_database_linked_service.py | 87 + .../azure_sql_database_linked_service_py3.py | 87 + .../models/azure_sql_dw_linked_service.py | 88 + .../models/azure_sql_dw_linked_service_py3.py | 88 + .../models/azure_sql_dw_table_dataset.py | 72 + .../models/azure_sql_dw_table_dataset_py3.py | 72 + .../mgmt/datafactory/models/azure_sql_sink.py | 87 + .../datafactory/models/azure_sql_sink_py3.py | 87 + .../datafactory/models/azure_sql_source.py | 73 + .../models/azure_sql_source_py3.py | 73 + .../models/azure_sql_table_dataset.py | 72 + .../models/azure_sql_table_dataset_py3.py | 72 + .../models/azure_storage_linked_service.py | 83 + .../azure_storage_linked_service_py3.py | 83 + .../datafactory/models/azure_table_dataset.py | 73 + .../models/azure_table_dataset_py3.py | 73 + .../datafactory/models/azure_table_sink.py | 81 + .../models/azure_table_sink_py3.py | 81 + .../datafactory/models/azure_table_source.py | 63 + .../models/azure_table_source_py3.py | 63 + .../azure_table_storage_linked_service.py | 83 + .../azure_table_storage_linked_service_py3.py | 83 + .../datafactory/models/blob_events_trigger.py | 85 + .../models/blob_events_trigger_py3.py | 85 + .../mgmt/datafactory/models/blob_sink.py | 80 + .../mgmt/datafactory/models/blob_sink_py3.py | 80 + .../mgmt/datafactory/models/blob_source.py | 68 + .../datafactory/models/blob_source_py3.py | 68 + .../mgmt/datafactory/models/blob_trigger.py | 78 + .../datafactory/models/blob_trigger_py3.py | 78 + .../models/cassandra_linked_service.py | 84 + .../models/cassandra_linked_service_py3.py | 84 + .../datafactory/models/cassandra_source.py | 67 + .../models/cassandra_source_py3.py | 67 + .../models/cassandra_table_dataset.py | 77 + .../models/cassandra_table_dataset_py3.py | 77 + .../models/concur_linked_service.py | 92 + .../models/concur_linked_service_py3.py | 92 + .../models/concur_object_dataset.py | 72 + .../models/concur_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/concur_source.py | 57 + .../datafactory/models/concur_source_py3.py | 57 + .../models/connector_read_setting.py | 45 + .../models/connector_read_setting_py3.py | 45 + .../models/connector_write_setting.py | 49 + .../models/connector_write_setting_py3.py | 49 + .../datafactory/models/control_activity.py | 60 + .../models/control_activity_py3.py | 60 + .../mgmt/datafactory/models/copy_activity.py | 124 ++ .../datafactory/models/copy_activity_py3.py | 124 ++ .../mgmt/datafactory/models/copy_sink.py | 79 + .../mgmt/datafactory/models/copy_sink_py3.py | 79 + .../mgmt/datafactory/models/copy_source.py | 79 + .../datafactory/models/copy_source_py3.py | 79 + .../models/cosmos_db_linked_service.py | 71 + .../models/cosmos_db_linked_service_py3.py | 71 + ...smos_db_mongo_db_api_collection_dataset.py | 73 + ..._db_mongo_db_api_collection_dataset_py3.py | 73 + .../cosmos_db_mongo_db_api_linked_service.py | 67 + ...smos_db_mongo_db_api_linked_service_py3.py | 67 + .../models/cosmos_db_mongo_db_api_sink.py | 68 + .../models/cosmos_db_mongo_db_api_sink_py3.py | 68 + .../models/cosmos_db_mongo_db_api_source.py | 71 + .../cosmos_db_mongo_db_api_source_py3.py | 71 + .../models/couchbase_linked_service.py | 70 + .../models/couchbase_linked_service_py3.py | 70 + .../datafactory/models/couchbase_source.py | 57 + .../models/couchbase_source_py3.py | 57 + .../models/couchbase_table_dataset.py | 72 + .../models/couchbase_table_dataset_py3.py | 72 + ...eate_linked_integration_runtime_request.py | 43 + ..._linked_integration_runtime_request_py3.py | 43 + .../datafactory/models/create_run_response.py | 34 + .../models/create_run_response_py3.py | 34 + .../datafactory/models/custom_activity.py | 91 + .../datafactory/models/custom_activity_py3.py | 91 + .../custom_activity_reference_object.py | 33 + .../custom_activity_reference_object_py3.py | 33 + .../custom_data_source_linked_service.py | 58 + .../custom_data_source_linked_service_py3.py | 58 + .../mgmt/datafactory/models/custom_dataset.py | 71 + .../datafactory/models/custom_dataset_py3.py | 71 + .../data_factory_management_client_enums.py | 448 +++++ .../data_lake_analytics_usql_activity.py | 98 ++ .../data_lake_analytics_usql_activity_py3.py | 98 ++ .../models/databricks_notebook_activity.py | 76 + .../databricks_notebook_activity_py3.py | 76 + .../models/databricks_spark_jar_activity.py | 75 + .../databricks_spark_jar_activity_py3.py | 75 + .../databricks_spark_python_activity.py | 75 + .../databricks_spark_python_activity_py3.py | 75 + .../azure/mgmt/datafactory/models/dataset.py | 106 ++ .../models/dataset_bzip2_compression.py | 38 + .../models/dataset_bzip2_compression_py3.py | 38 + .../datafactory/models/dataset_compression.py | 47 + .../models/dataset_compression_py3.py | 47 + .../models/dataset_deflate_compression.py | 42 + .../models/dataset_deflate_compression_py3.py | 42 + .../mgmt/datafactory/models/dataset_folder.py | 29 + .../datafactory/models/dataset_folder_py3.py | 29 + .../models/dataset_gzip_compression.py | 42 + .../models/dataset_gzip_compression_py3.py | 42 + .../datafactory/models/dataset_location.py | 49 + .../models/dataset_location_py3.py | 49 + .../mgmt/datafactory/models/dataset_py3.py | 106 ++ .../datafactory/models/dataset_reference.py | 48 + .../models/dataset_reference_py3.py | 48 + .../datafactory/models/dataset_resource.py | 53 + .../models/dataset_resource_paged.py | 27 + .../models/dataset_resource_py3.py | 53 + .../models/dataset_storage_format.py | 57 + .../models/dataset_storage_format_py3.py | 57 + .../models/dataset_zip_deflate_compression.py | 42 + .../dataset_zip_deflate_compression_py3.py | 42 + .../datafactory/models/db2_linked_service.py | 86 + .../models/db2_linked_service_py3.py | 86 + .../datafactory/models/delete_activity.py | 87 + .../datafactory/models/delete_activity_py3.py | 87 + .../models/delimited_text_dataset.py | 122 ++ .../models/delimited_text_dataset_py3.py | 122 ++ .../models/delimited_text_read_setting.py | 43 + .../models/delimited_text_read_setting_py3.py | 43 + .../datafactory/models/delimited_text_sink.py | 70 + .../models/delimited_text_sink_py3.py | 70 + .../models/delimited_text_source.py | 61 + .../models/delimited_text_source_py3.py | 61 + .../models/delimited_text_write_setting.py | 49 + .../delimited_text_write_setting_py3.py | 49 + .../models/dependency_reference.py | 42 + .../models/dependency_reference_py3.py | 42 + .../datafactory/models/distcp_settings.py | 49 + .../datafactory/models/distcp_settings_py3.py | 49 + .../models/document_db_collection_dataset.py | 73 + .../document_db_collection_dataset_py3.py | 73 + .../models/document_db_collection_sink.py | 71 + .../models/document_db_collection_sink_py3.py | 71 + .../models/document_db_collection_source.py | 62 + .../document_db_collection_source_py3.py | 62 + .../models/drill_linked_service.py | 69 + .../models/drill_linked_service_py3.py | 69 + .../mgmt/datafactory/models/drill_source.py | 57 + .../datafactory/models/drill_source_py3.py | 57 + .../datafactory/models/drill_table_dataset.py | 72 + .../models/drill_table_dataset_py3.py | 72 + .../models/dynamics_ax_linked_service.py | 93 + .../models/dynamics_ax_linked_service_py3.py | 93 + .../models/dynamics_ax_resource_dataset.py | 73 + .../dynamics_ax_resource_dataset_py3.py | 73 + .../datafactory/models/dynamics_ax_source.py | 57 + .../models/dynamics_ax_source_py3.py | 57 + .../models/dynamics_entity_dataset.py | 72 + .../models/dynamics_entity_dataset_py3.py | 72 + .../models/dynamics_linked_service.py | 109 ++ .../models/dynamics_linked_service_py3.py | 109 ++ .../mgmt/datafactory/models/dynamics_sink.py | 76 + .../datafactory/models/dynamics_sink_py3.py | 76 + .../datafactory/models/dynamics_source.py | 58 + .../datafactory/models/dynamics_source_py3.py | 58 + .../models/eloqua_linked_service.py | 91 + .../models/eloqua_linked_service_py3.py | 91 + .../models/eloqua_object_dataset.py | 72 + .../models/eloqua_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/eloqua_source.py | 57 + .../datafactory/models/eloqua_source_py3.py | 57 + .../datafactory/models/entity_reference.py | 34 + .../models/entity_reference_py3.py | 34 + .../models/execute_pipeline_activity.py | 65 + .../models/execute_pipeline_activity_py3.py | 65 + .../models/execute_ssis_package_activity.py | 120 ++ .../execute_ssis_package_activity_py3.py | 120 ++ .../datafactory/models/execution_activity.py | 74 + .../models/execution_activity_py3.py | 74 + .../models/exposure_control_request.py | 32 + .../models/exposure_control_request_py3.py | 32 + .../models/exposure_control_response.py | 40 + .../models/exposure_control_response_py3.py | 40 + .../mgmt/datafactory/models/expression.py | 43 + .../mgmt/datafactory/models/expression_py3.py | 43 + .../azure/mgmt/datafactory/models/factory.py | 81 + .../models/factory_git_hub_configuration.py | 58 + .../factory_git_hub_configuration_py3.py | 58 + .../datafactory/models/factory_identity.py | 49 + .../models/factory_identity_py3.py | 49 + .../mgmt/datafactory/models/factory_paged.py | 27 + .../mgmt/datafactory/models/factory_py3.py | 81 + .../models/factory_repo_configuration.py | 65 + .../models/factory_repo_configuration_py3.py | 65 + .../datafactory/models/factory_repo_update.py | 33 + .../models/factory_repo_update_py3.py | 33 + .../models/factory_update_parameters.py | 32 + .../models/factory_update_parameters_py3.py | 32 + .../models/factory_vsts_configuration.py | 62 + .../models/factory_vsts_configuration_py3.py | 62 + .../models/file_server_linked_service.py | 74 + .../models/file_server_linked_service_py3.py | 74 + .../models/file_server_location.py | 45 + .../models/file_server_location_py3.py | 45 + .../models/file_server_read_setting.py | 73 + .../models/file_server_read_setting_py3.py | 73 + .../models/file_server_write_setting.py | 45 + .../models/file_server_write_setting_py3.py | 45 + .../datafactory/models/file_share_dataset.py | 101 ++ .../models/file_share_dataset_py3.py | 101 ++ .../datafactory/models/file_system_sink.py | 65 + .../models/file_system_sink_py3.py | 65 + .../datafactory/models/file_system_source.py | 58 + .../models/file_system_source_py3.py | 58 + .../datafactory/models/filter_activity.py | 61 + .../datafactory/models/filter_activity_py3.py | 61 + .../datafactory/models/for_each_activity.py | 73 + .../models/for_each_activity_py3.py | 73 + .../datafactory/models/format_read_setting.py | 39 + .../models/format_read_setting_py3.py | 39 + .../models/format_write_setting.py | 39 + .../models/format_write_setting_py3.py | 39 + .../datafactory/models/ftp_read_setting.py | 63 + .../models/ftp_read_setting_py3.py | 63 + .../models/ftp_server_linked_service.py | 98 ++ .../models/ftp_server_linked_service_py3.py | 98 ++ .../datafactory/models/ftp_server_location.py | 45 + .../models/ftp_server_location_py3.py | 45 + .../models/get_metadata_activity.py | 67 + .../models/get_metadata_activity_py3.py | 67 + .../get_ssis_object_metadata_request.py | 28 + .../get_ssis_object_metadata_request_py3.py | 28 + .../models/git_hub_access_token_request.py | 44 + .../git_hub_access_token_request_py3.py | 44 + .../models/git_hub_access_token_response.py | 28 + .../git_hub_access_token_response_py3.py | 28 + .../models/google_ad_words_linked_service.py | 119 ++ .../google_ad_words_linked_service_py3.py | 119 ++ .../models/google_ad_words_object_dataset.py | 72 + .../google_ad_words_object_dataset_py3.py | 72 + .../models/google_ad_words_source.py | 57 + .../models/google_ad_words_source_py3.py | 57 + .../models/google_big_query_linked_service.py | 124 ++ .../google_big_query_linked_service_py3.py | 124 ++ .../models/google_big_query_object_dataset.py | 72 + .../google_big_query_object_dataset_py3.py | 72 + .../models/google_big_query_source.py | 57 + .../models/google_big_query_source_py3.py | 57 + .../models/greenplum_linked_service.py | 69 + .../models/greenplum_linked_service_py3.py | 69 + .../datafactory/models/greenplum_source.py | 57 + .../models/greenplum_source_py3.py | 57 + .../models/greenplum_table_dataset.py | 72 + .../models/greenplum_table_dataset_py3.py | 72 + .../models/hbase_linked_service.py | 114 ++ .../models/hbase_linked_service_py3.py | 114 ++ .../models/hbase_object_dataset.py | 72 + .../models/hbase_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/hbase_source.py | 57 + .../datafactory/models/hbase_source_py3.py | 57 + .../models/hd_insight_hive_activity.py | 96 ++ .../models/hd_insight_hive_activity_py3.py | 96 ++ .../models/hd_insight_linked_service.py | 90 + .../models/hd_insight_linked_service_py3.py | 90 + .../models/hd_insight_map_reduce_activity.py | 99 ++ .../hd_insight_map_reduce_activity_py3.py | 99 ++ .../hd_insight_on_demand_linked_service.py | 225 +++ ...hd_insight_on_demand_linked_service_py3.py | 225 +++ .../models/hd_insight_pig_activity.py | 87 + .../models/hd_insight_pig_activity_py3.py | 87 + .../models/hd_insight_spark_activity.py | 100 ++ .../models/hd_insight_spark_activity_py3.py | 100 ++ .../models/hd_insight_streaming_activity.py | 122 ++ .../hd_insight_streaming_activity_py3.py | 122 ++ .../datafactory/models/hdfs_linked_service.py | 81 + .../models/hdfs_linked_service_py3.py | 81 + .../mgmt/datafactory/models/hdfs_location.py | 45 + .../datafactory/models/hdfs_location_py3.py | 45 + .../datafactory/models/hdfs_read_setting.py | 77 + .../models/hdfs_read_setting_py3.py | 77 + .../mgmt/datafactory/models/hdfs_source.py | 62 + .../datafactory/models/hdfs_source_py3.py | 62 + .../datafactory/models/hive_linked_service.py | 147 ++ .../models/hive_linked_service_py3.py | 147 ++ .../datafactory/models/hive_object_dataset.py | 72 + .../models/hive_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/hive_source.py | 57 + .../datafactory/models/hive_source_py3.py | 57 + .../mgmt/datafactory/models/http_dataset.py | 99 ++ .../datafactory/models/http_dataset_py3.py | 99 ++ .../datafactory/models/http_linked_service.py | 105 ++ .../models/http_linked_service_py3.py | 105 ++ .../datafactory/models/http_read_setting.py | 63 + .../models/http_read_setting_py3.py | 63 + .../models/http_server_location.py | 50 + .../models/http_server_location_py3.py | 50 + .../mgmt/datafactory/models/http_source.py | 60 + .../datafactory/models/http_source_py3.py | 60 + .../models/hubspot_linked_service.py | 96 ++ .../models/hubspot_linked_service_py3.py | 96 ++ .../models/hubspot_object_dataset.py | 72 + .../models/hubspot_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/hubspot_source.py | 57 + .../datafactory/models/hubspot_source_py3.py | 57 + .../models/if_condition_activity.py | 72 + .../models/if_condition_activity_py3.py | 72 + .../models/impala_linked_service.py | 117 ++ .../models/impala_linked_service_py3.py | 117 ++ .../models/impala_object_dataset.py | 72 + .../models/impala_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/impala_source.py | 57 + .../datafactory/models/impala_source_py3.py | 57 + .../datafactory/models/integration_runtime.py | 51 + .../models/integration_runtime_auth_keys.py | 32 + .../integration_runtime_auth_keys_py3.py | 32 + .../integration_runtime_compute_properties.py | 60 + ...egration_runtime_compute_properties_py3.py | 60 + .../integration_runtime_connection_info.py | 70 + ...integration_runtime_connection_info_py3.py | 70 + ..._runtime_custom_setup_script_properties.py | 33 + ...time_custom_setup_script_properties_py3.py | 33 + ...tegration_runtime_data_proxy_properties.py | 37 + ...ation_runtime_data_proxy_properties_py3.py | 37 + .../integration_runtime_monitoring_data.py | 33 + ...integration_runtime_monitoring_data_py3.py | 33 + .../integration_runtime_node_ip_address.py | 35 + ...integration_runtime_node_ip_address_py3.py | 35 + ...ntegration_runtime_node_monitoring_data.py | 79 + ...ration_runtime_node_monitoring_data_py3.py | 79 + .../models/integration_runtime_py3.py | 51 + .../models/integration_runtime_reference.py | 48 + .../integration_runtime_reference_py3.py | 48 + ...ation_runtime_regenerate_key_parameters.py | 30 + ...n_runtime_regenerate_key_parameters_py3.py | 30 + .../models/integration_runtime_resource.py | 53 + .../integration_runtime_resource_paged.py | 27 + .../integration_runtime_resource_py3.py | 53 + .../integration_runtime_ssis_catalog_info.py | 55 + ...tegration_runtime_ssis_catalog_info_py3.py | 55 + .../integration_runtime_ssis_properties.py | 59 + ...integration_runtime_ssis_properties_py3.py | 59 + .../models/integration_runtime_status.py | 64 + ...ntegration_runtime_status_list_response.py | 40 + ...ration_runtime_status_list_response_py3.py | 40 + .../models/integration_runtime_status_py3.py | 64 + .../integration_runtime_status_response.py | 42 + ...integration_runtime_status_response_py3.py | 42 + .../integration_runtime_vnet_properties.py | 38 + ...integration_runtime_vnet_properties_py3.py | 38 + .../datafactory/models/jira_linked_service.py | 98 ++ .../models/jira_linked_service_py3.py | 98 ++ .../datafactory/models/jira_object_dataset.py | 72 + .../models/jira_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/jira_source.py | 57 + .../datafactory/models/jira_source_py3.py | 57 + .../mgmt/datafactory/models/json_format.py | 82 + .../datafactory/models/json_format_py3.py | 82 + .../models/linked_integration_runtime.py | 58 + ...d_integration_runtime_key_authorization.py | 39 + ...tegration_runtime_key_authorization_py3.py | 39 + .../models/linked_integration_runtime_py3.py | 58 + ..._integration_runtime_rbac_authorization.py | 41 + ...egration_runtime_rbac_authorization_py3.py | 41 + .../linked_integration_runtime_request.py | 35 + .../linked_integration_runtime_request_py3.py | 35 + .../models/linked_integration_runtime_type.py | 42 + .../linked_integration_runtime_type_py3.py | 42 + .../mgmt/datafactory/models/linked_service.py | 98 ++ .../datafactory/models/linked_service_py3.py | 98 ++ .../models/linked_service_reference.py | 48 + .../models/linked_service_reference_py3.py | 48 + .../models/linked_service_resource.py | 53 + .../models/linked_service_resource_paged.py | 27 + .../models/linked_service_resource_py3.py | 53 + .../models/log_storage_settings.py | 46 + .../models/log_storage_settings_py3.py | 46 + .../datafactory/models/lookup_activity.py | 74 + .../datafactory/models/lookup_activity_py3.py | 74 + .../models/magento_linked_service.py | 85 + .../models/magento_linked_service_py3.py | 85 + .../models/magento_object_dataset.py | 72 + .../models/magento_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/magento_source.py | 57 + .../datafactory/models/magento_source_py3.py | 57 + .../models/managed_integration_runtime.py | 65 + .../managed_integration_runtime_error.py | 55 + .../managed_integration_runtime_error_py3.py | 55 + .../managed_integration_runtime_node.py | 52 + .../managed_integration_runtime_node_py3.py | 52 + ...ed_integration_runtime_operation_result.py | 65 + ...ntegration_runtime_operation_result_py3.py | 65 + .../models/managed_integration_runtime_py3.py | 65 + .../managed_integration_runtime_status.py | 78 + .../managed_integration_runtime_status_py3.py | 78 + .../models/maria_db_linked_service.py | 69 + .../models/maria_db_linked_service_py3.py | 69 + .../datafactory/models/maria_db_source.py | 57 + .../datafactory/models/maria_db_source_py3.py | 57 + .../models/maria_db_table_dataset.py | 72 + .../models/maria_db_table_dataset_py3.py | 72 + .../models/marketo_linked_service.py | 90 + .../models/marketo_linked_service_py3.py | 90 + .../models/marketo_object_dataset.py | 72 + .../models/marketo_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/marketo_source.py | 57 + .../datafactory/models/marketo_source_py3.py | 57 + .../models/mongo_db_collection_dataset.py | 73 + .../models/mongo_db_collection_dataset_py3.py | 73 + .../mongo_db_cursor_methods_properties.py | 53 + .../mongo_db_cursor_methods_properties_py3.py | 53 + .../models/mongo_db_linked_service.py | 109 ++ .../models/mongo_db_linked_service_py3.py | 109 ++ .../datafactory/models/mongo_db_source.py | 57 + .../datafactory/models/mongo_db_source_py3.py | 57 + .../models/mongo_db_v2_collection_dataset.py | 73 + .../mongo_db_v2_collection_dataset_py3.py | 73 + .../models/mongo_db_v2_linked_service.py | 66 + .../models/mongo_db_v2_linked_service_py3.py | 66 + .../datafactory/models/mongo_db_v2_source.py | 71 + .../models/mongo_db_v2_source_py3.py | 71 + .../models/multiple_pipeline_trigger.py | 68 + .../models/multiple_pipeline_trigger_py3.py | 68 + .../models/my_sql_linked_service.py | 70 + .../models/my_sql_linked_service_py3.py | 70 + .../models/netezza_linked_service.py | 69 + .../models/netezza_linked_service_py3.py | 69 + .../mgmt/datafactory/models/netezza_source.py | 57 + .../datafactory/models/netezza_source_py3.py | 57 + .../models/netezza_table_dataset.py | 72 + .../models/netezza_table_dataset_py3.py | 72 + .../models/odata_linked_service.py | 127 ++ .../models/odata_linked_service_py3.py | 127 ++ .../models/odata_resource_dataset.py | 72 + .../models/odata_resource_dataset_py3.py | 72 + .../datafactory/models/odbc_linked_service.py | 86 + .../models/odbc_linked_service_py3.py | 86 + .../mgmt/datafactory/models/odbc_sink.py | 66 + .../mgmt/datafactory/models/odbc_sink_py3.py | 66 + .../datafactory/models/office365_dataset.py | 79 + .../models/office365_dataset_py3.py | 79 + .../models/office365_linked_service.py | 83 + .../models/office365_linked_service_py3.py | 83 + .../datafactory/models/office365_source.py | 52 + .../models/office365_source_py3.py | 52 + .../mgmt/datafactory/models/operation.py | 41 + .../datafactory/models/operation_display.py | 41 + .../models/operation_display_py3.py | 41 + .../models/operation_log_specification.py | 37 + .../models/operation_log_specification_py3.py | 37 + .../models/operation_metric_availability.py | 33 + .../operation_metric_availability_py3.py | 33 + .../models/operation_metric_dimension.py | 37 + .../models/operation_metric_dimension_py3.py | 37 + .../models/operation_metric_specification.py | 68 + .../operation_metric_specification_py3.py | 68 + .../datafactory/models/operation_paged.py | 27 + .../mgmt/datafactory/models/operation_py3.py | 41 + .../models/operation_service_specification.py | 34 + .../operation_service_specification_py3.py | 34 + .../models/oracle_linked_service.py | 71 + .../models/oracle_linked_service_py3.py | 71 + .../models/oracle_partition_settings.py | 46 + .../models/oracle_partition_settings_py3.py | 46 + .../oracle_service_cloud_linked_service.py | 95 ++ ...oracle_service_cloud_linked_service_py3.py | 95 ++ .../oracle_service_cloud_object_dataset.py | 72 + ...oracle_service_cloud_object_dataset_py3.py | 72 + .../models/oracle_service_cloud_source.py | 57 + .../models/oracle_service_cloud_source_py3.py | 57 + .../mgmt/datafactory/models/oracle_sink.py | 66 + .../datafactory/models/oracle_sink_py3.py | 66 + .../mgmt/datafactory/models/oracle_source.py | 74 + .../datafactory/models/oracle_source_py3.py | 74 + .../models/oracle_table_dataset.py | 72 + .../models/oracle_table_dataset_py3.py | 72 + .../mgmt/datafactory/models/orc_format.py | 46 + .../mgmt/datafactory/models/orc_format_py3.py | 46 + .../models/parameter_specification.py | 39 + .../models/parameter_specification_py3.py | 39 + .../datafactory/models/parquet_dataset.py | 76 + .../datafactory/models/parquet_dataset_py3.py | 76 + .../mgmt/datafactory/models/parquet_format.py | 46 + .../datafactory/models/parquet_format_py3.py | 46 + .../mgmt/datafactory/models/parquet_sink.py | 65 + .../datafactory/models/parquet_sink_py3.py | 65 + .../mgmt/datafactory/models/parquet_source.py | 56 + .../datafactory/models/parquet_source_py3.py | 56 + .../models/paypal_linked_service.py | 92 + .../models/paypal_linked_service_py3.py | 92 + .../models/paypal_object_dataset.py | 72 + .../models/paypal_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/paypal_source.py | 57 + .../datafactory/models/paypal_source_py3.py | 57 + .../models/phoenix_linked_service.py | 121 ++ .../models/phoenix_linked_service_py3.py | 121 ++ .../models/phoenix_object_dataset.py | 72 + .../models/phoenix_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/phoenix_source.py | 57 + .../datafactory/models/phoenix_source_py3.py | 57 + .../datafactory/models/pipeline_folder.py | 29 + .../datafactory/models/pipeline_folder_py3.py | 29 + .../datafactory/models/pipeline_reference.py | 48 + .../models/pipeline_reference_py3.py | 48 + .../datafactory/models/pipeline_resource.py | 84 + .../models/pipeline_resource_paged.py | 27 + .../models/pipeline_resource_py3.py | 84 + .../mgmt/datafactory/models/pipeline_run.py | 99 ++ .../models/pipeline_run_invoked_by.py | 45 + .../models/pipeline_run_invoked_by_py3.py | 45 + .../datafactory/models/pipeline_run_py3.py | 99 ++ .../models/pipeline_runs_query_response.py | 39 + .../pipeline_runs_query_response_py3.py | 39 + .../datafactory/models/polybase_settings.py | 53 + .../models/polybase_settings_py3.py | 53 + .../models/postgre_sql_linked_service.py | 70 + .../models/postgre_sql_linked_service_py3.py | 70 + .../models/presto_linked_service.py | 132 ++ .../models/presto_linked_service_py3.py | 132 ++ .../models/presto_object_dataset.py | 72 + .../models/presto_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/presto_source.py | 57 + .../datafactory/models/presto_source_py3.py | 57 + .../models/quick_books_linked_service.py | 100 ++ .../models/quick_books_linked_service_py3.py | 100 ++ .../models/quick_books_object_dataset.py | 72 + .../models/quick_books_object_dataset_py3.py | 72 + .../datafactory/models/quick_books_source.py | 57 + .../models/quick_books_source_py3.py | 57 + .../datafactory/models/recurrence_schedule.py | 50 + .../models/recurrence_schedule_occurrence.py | 38 + .../recurrence_schedule_occurrence_py3.py | 38 + .../models/recurrence_schedule_py3.py | 50 + .../redirect_incompatible_row_settings.py | 47 + .../redirect_incompatible_row_settings_py3.py | 47 + .../models/redshift_unload_settings.py | 48 + .../models/redshift_unload_settings_py3.py | 48 + .../datafactory/models/relational_source.py | 57 + .../models/relational_source_py3.py | 57 + .../models/relational_table_dataset.py | 72 + .../models/relational_table_dataset_py3.py | 72 + .../models/rerun_trigger_resource.py | 54 + .../models/rerun_trigger_resource_paged.py | 27 + .../models/rerun_trigger_resource_py3.py | 54 + .../models/rerun_tumbling_window_trigger.py | 78 + ...mbling_window_trigger_action_parameters.py | 47 + ...ng_window_trigger_action_parameters_py3.py | 47 + .../rerun_tumbling_window_trigger_py3.py | 78 + .../azure/mgmt/datafactory/models/resource.py | 58 + .../mgmt/datafactory/models/resource_py3.py | 58 + .../models/responsys_linked_service.py | 94 ++ .../models/responsys_linked_service_py3.py | 94 ++ .../models/responsys_object_dataset.py | 72 + .../models/responsys_object_dataset_py3.py | 72 + .../datafactory/models/responsys_source.py | 57 + .../models/responsys_source_py3.py | 57 + .../models/rest_resource_dataset.py | 93 + .../models/rest_resource_dataset_py3.py | 93 + .../models/rest_service_linked_service.py | 107 ++ .../models/rest_service_linked_service_py3.py | 107 ++ .../mgmt/datafactory/models/rest_source.py | 65 + .../datafactory/models/rest_source_py3.py | 65 + .../mgmt/datafactory/models/retry_policy.py | 38 + .../datafactory/models/retry_policy_py3.py | 38 + .../models/run_filter_parameters.py | 54 + .../models/run_filter_parameters_py3.py | 54 + .../datafactory/models/run_query_filter.py | 53 + .../models/run_query_filter_py3.py | 53 + .../datafactory/models/run_query_order_by.py | 46 + .../models/run_query_order_by_py3.py | 46 + .../models/salesforce_linked_service.py | 82 + .../models/salesforce_linked_service_py3.py | 82 + ...lesforce_marketing_cloud_linked_service.py | 91 + ...orce_marketing_cloud_linked_service_py3.py | 91 + ...lesforce_marketing_cloud_object_dataset.py | 72 + ...orce_marketing_cloud_object_dataset_py3.py | 72 + .../salesforce_marketing_cloud_source.py | 57 + .../salesforce_marketing_cloud_source_py3.py | 57 + .../models/salesforce_object_dataset.py | 72 + .../models/salesforce_object_dataset_py3.py | 72 + .../datafactory/models/salesforce_sink.py | 83 + .../datafactory/models/salesforce_sink_py3.py | 83 + .../datafactory/models/salesforce_source.py | 62 + .../models/salesforce_source_py3.py | 62 + .../models/sap_bw_linked_service.py | 88 + .../models/sap_bw_linked_service_py3.py | 88 + .../sap_cloud_for_customer_linked_service.py | 76 + ...p_cloud_for_customer_linked_service_py3.py | 76 + ...sap_cloud_for_customer_resource_dataset.py | 73 + ...cloud_for_customer_resource_dataset_py3.py | 73 + .../models/sap_cloud_for_customer_sink.py | 66 + .../models/sap_cloud_for_customer_sink_py3.py | 66 + .../models/sap_cloud_for_customer_source.py | 57 + .../sap_cloud_for_customer_source_py3.py | 57 + .../models/sap_ecc_linked_service.py | 76 + .../models/sap_ecc_linked_service_py3.py | 76 + .../models/sap_ecc_resource_dataset.py | 73 + .../models/sap_ecc_resource_dataset_py3.py | 73 + .../mgmt/datafactory/models/sap_ecc_source.py | 57 + .../datafactory/models/sap_ecc_source_py3.py | 57 + .../models/sap_hana_linked_service.py | 80 + .../models/sap_hana_linked_service_py3.py | 80 + .../models/sap_open_hub_linked_service.py | 99 ++ .../models/sap_open_hub_linked_service_py3.py | 99 ++ .../datafactory/models/sap_open_hub_source.py | 53 + .../models/sap_open_hub_source_py3.py | 53 + .../models/sap_open_hub_table_dataset.py | 87 + .../models/sap_open_hub_table_dataset_py3.py | 87 + .../datafactory/models/schedule_trigger.py | 64 + .../models/schedule_trigger_py3.py | 64 + .../models/schedule_trigger_recurrence.py | 54 + .../models/schedule_trigger_recurrence_py3.py | 54 + .../mgmt/datafactory/models/script_action.py | 49 + .../datafactory/models/script_action_py3.py | 49 + .../mgmt/datafactory/models/secret_base.py | 41 + .../datafactory/models/secret_base_py3.py | 41 + .../mgmt/datafactory/models/secure_string.py | 40 + .../datafactory/models/secure_string_py3.py | 40 + ...dency_tumbling_window_trigger_reference.py | 46 + ...y_tumbling_window_trigger_reference_py3.py | 46 + .../models/self_hosted_integration_runtime.py | 46 + .../self_hosted_integration_runtime_node.py | 139 ++ ...elf_hosted_integration_runtime_node_py3.py | 139 ++ .../self_hosted_integration_runtime_py3.py | 46 + .../self_hosted_integration_runtime_status.py | 146 ++ ...f_hosted_integration_runtime_status_py3.py | 146 ++ .../models/service_now_linked_service.py | 106 ++ .../models/service_now_linked_service_py3.py | 106 ++ .../models/service_now_object_dataset.py | 72 + .../models/service_now_object_dataset_py3.py | 72 + .../datafactory/models/service_now_source.py | 57 + .../models/service_now_source_py3.py | 57 + .../models/set_variable_activity.py | 59 + .../models/set_variable_activity_py3.py | 59 + .../mgmt/datafactory/models/sftp_location.py | 45 + .../datafactory/models/sftp_location_py3.py | 45 + .../datafactory/models/sftp_read_setting.py | 68 + .../models/sftp_read_setting_py3.py | 68 + .../models/sftp_server_linked_service.py | 119 ++ .../models/sftp_server_linked_service_py3.py | 119 ++ .../models/shopify_linked_service.py | 86 + .../models/shopify_linked_service_py3.py | 86 + .../models/shopify_object_dataset.py | 72 + .../models/shopify_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/shopify_source.py | 57 + .../datafactory/models/shopify_source_py3.py | 57 + .../models/spark_linked_service.py | 131 ++ .../models/spark_linked_service_py3.py | 131 ++ .../models/spark_object_dataset.py | 72 + .../models/spark_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/spark_source.py | 57 + .../datafactory/models/spark_source_py3.py | 57 + .../mgmt/datafactory/models/sql_dw_sink.py | 77 + .../datafactory/models/sql_dw_sink_py3.py | 77 + .../mgmt/datafactory/models/sql_dw_source.py | 70 + .../datafactory/models/sql_dw_source_py3.py | 70 + .../models/sql_server_linked_service.py | 74 + .../models/sql_server_linked_service_py3.py | 74 + .../datafactory/models/sql_server_sink.py | 87 + .../datafactory/models/sql_server_sink_py3.py | 87 + .../datafactory/models/sql_server_source.py | 73 + .../models/sql_server_source_py3.py | 73 + .../sql_server_stored_procedure_activity.py | 70 + ...ql_server_stored_procedure_activity_py3.py | 70 + .../models/sql_server_table_dataset.py | 72 + .../models/sql_server_table_dataset_py3.py | 72 + .../azure/mgmt/datafactory/models/sql_sink.py | 87 + .../mgmt/datafactory/models/sql_sink_py3.py | 87 + .../mgmt/datafactory/models/sql_source.py | 69 + .../mgmt/datafactory/models/sql_source_py3.py | 69 + .../models/square_linked_service.py | 98 ++ .../models/square_linked_service_py3.py | 98 ++ .../models/square_object_dataset.py | 72 + .../models/square_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/square_source.py | 57 + .../datafactory/models/square_source_py3.py | 57 + .../datafactory/models/ssis_environment.py | 51 + .../models/ssis_environment_py3.py | 51 + .../models/ssis_environment_reference.py | 40 + .../models/ssis_environment_reference_py3.py | 40 + .../models/ssis_execution_credential.py | 44 + .../models/ssis_execution_credential_py3.py | 44 + .../models/ssis_execution_parameter.py | 35 + .../models/ssis_execution_parameter_py3.py | 35 + .../mgmt/datafactory/models/ssis_folder.py | 43 + .../datafactory/models/ssis_folder_py3.py | 43 + .../models/ssis_object_metadata.py | 53 + .../ssis_object_metadata_list_response.py | 33 + .../ssis_object_metadata_list_response_py3.py | 33 + .../models/ssis_object_metadata_py3.py | 53 + .../ssis_object_metadata_status_response.py | 40 + ...sis_object_metadata_status_response_py3.py | 40 + .../mgmt/datafactory/models/ssis_package.py | 59 + .../models/ssis_package_location.py | 35 + .../models/ssis_package_location_py3.py | 35 + .../datafactory/models/ssis_package_py3.py | 59 + .../mgmt/datafactory/models/ssis_parameter.py | 72 + .../datafactory/models/ssis_parameter_py3.py | 72 + .../mgmt/datafactory/models/ssis_project.py | 60 + .../datafactory/models/ssis_project_py3.py | 60 + .../models/ssis_property_override.py | 40 + .../models/ssis_property_override_py3.py | 40 + .../mgmt/datafactory/models/ssis_variable.py | 52 + .../datafactory/models/ssis_variable_py3.py | 52 + .../datafactory/models/staging_settings.py | 51 + .../models/staging_settings_py3.py | 51 + .../models/stored_procedure_parameter.py | 35 + .../models/stored_procedure_parameter_py3.py | 35 + .../mgmt/datafactory/models/sub_resource.py | 50 + .../datafactory/models/sub_resource_py3.py | 50 + .../models/sybase_linked_service.py | 91 + .../models/sybase_linked_service_py3.py | 91 + .../models/teradata_linked_service.py | 80 + .../models/teradata_linked_service_py3.py | 80 + .../mgmt/datafactory/models/text_format.py | 99 ++ .../datafactory/models/text_format_py3.py | 99 ++ .../azure/mgmt/datafactory/models/trigger.py | 68 + .../models/trigger_dependency_reference.py | 46 + .../trigger_dependency_reference_py3.py | 46 + .../models/trigger_pipeline_reference.py | 32 + .../models/trigger_pipeline_reference_py3.py | 32 + .../mgmt/datafactory/models/trigger_py3.py | 68 + .../datafactory/models/trigger_reference.py | 44 + .../models/trigger_reference_py3.py | 44 + .../datafactory/models/trigger_resource.py | 53 + .../models/trigger_resource_paged.py | 27 + .../models/trigger_resource_py3.py | 53 + .../mgmt/datafactory/models/trigger_run.py | 78 + .../datafactory/models/trigger_run_py3.py | 78 + .../models/trigger_runs_query_response.py | 39 + .../models/trigger_runs_query_response_py3.py | 39 + .../models/tumbling_window_trigger.py | 112 ++ ...ing_window_trigger_dependency_reference.py | 50 + ...window_trigger_dependency_reference_py3.py | 50 + .../models/tumbling_window_trigger_py3.py | 112 ++ .../mgmt/datafactory/models/until_activity.py | 72 + .../datafactory/models/until_activity_py3.py | 72 + ...update_integration_runtime_node_request.py | 34 + ...te_integration_runtime_node_request_py3.py | 34 + .../update_integration_runtime_request.py | 38 + .../update_integration_runtime_request_py3.py | 38 + .../datafactory/models/user_access_policy.py | 51 + .../models/user_access_policy_py3.py | 51 + .../mgmt/datafactory/models/user_property.py | 40 + .../datafactory/models/user_property_py3.py | 40 + .../datafactory/models/validation_activity.py | 81 + .../models/validation_activity_py3.py | 81 + .../models/variable_specification.py | 39 + .../models/variable_specification_py3.py | 39 + .../models/vertica_linked_service.py | 69 + .../models/vertica_linked_service_py3.py | 69 + .../mgmt/datafactory/models/vertica_source.py | 57 + .../datafactory/models/vertica_source_py3.py | 57 + .../models/vertica_table_dataset.py | 72 + .../models/vertica_table_dataset_py3.py | 72 + .../mgmt/datafactory/models/wait_activity.py | 56 + .../datafactory/models/wait_activity_py3.py | 56 + .../mgmt/datafactory/models/web_activity.py | 98 ++ .../models/web_activity_authentication.py | 53 + .../models/web_activity_authentication_py3.py | 53 + .../datafactory/models/web_activity_py3.py | 98 ++ .../models/web_anonymous_authentication.py | 41 + .../web_anonymous_authentication_py3.py | 41 + .../models/web_basic_authentication.py | 52 + .../models/web_basic_authentication_py3.py | 52 + .../web_client_certificate_authentication.py | 53 + ...b_client_certificate_authentication_py3.py | 53 + .../datafactory/models/web_hook_activity.py | 92 + .../models/web_hook_activity_py3.py | 92 + .../datafactory/models/web_linked_service.py | 59 + .../models/web_linked_service_py3.py | 59 + .../web_linked_service_type_properties.py | 50 + .../web_linked_service_type_properties_py3.py | 50 + .../mgmt/datafactory/models/web_source.py | 52 + .../mgmt/datafactory/models/web_source_py3.py | 52 + .../datafactory/models/web_table_dataset.py | 78 + .../models/web_table_dataset_py3.py | 78 + .../datafactory/models/xero_linked_service.py | 93 + .../models/xero_linked_service_py3.py | 93 + .../datafactory/models/xero_object_dataset.py | 72 + .../models/xero_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/xero_source.py | 57 + .../datafactory/models/xero_source_py3.py | 57 + .../datafactory/models/zoho_linked_service.py | 85 + .../models/zoho_linked_service_py3.py | 85 + .../datafactory/models/zoho_object_dataset.py | 72 + .../models/zoho_object_dataset_py3.py | 72 + .../mgmt/datafactory/models/zoho_source.py | 57 + .../datafactory/models/zoho_source_py3.py | 57 + .../mgmt/datafactory/operations/__init__.py | 42 + .../operations/activity_runs_operations.py | 110 ++ .../operations/datasets_operations.py | 314 ++++ .../operations/exposure_control_operations.py | 179 ++ .../operations/factories_operations.py | 644 +++++++ .../integration_runtime_nodes_operations.py | 316 ++++ ...tion_runtime_object_metadata_operations.py | 218 +++ .../integration_runtimes_operations.py | 1181 +++++++++++++ .../operations/linked_services_operations.py | 314 ++++ .../mgmt/datafactory/operations/operations.py | 98 ++ .../operations/pipeline_runs_operations.py | 233 +++ .../operations/pipelines_operations.py | 404 +++++ .../operations/rerun_triggers_operations.py | 450 +++++ .../operations/trigger_runs_operations.py | 107 ++ .../operations/triggers_operations.py | 482 ++++++ .../azure/mgmt/datafactory/version.py | 13 + 922 files changed, 67494 insertions(+) create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py create mode 100644 datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py new file mode 100644 index 000000000000..db14f5d7f4f6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/__init__.py @@ -0,0 +1,18 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .data_factory_management_client import DataFactoryManagementClient +from .version import VERSION + +__all__ = ['DataFactoryManagementClient'] + +__version__ = VERSION + diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py new file mode 100644 index 000000000000..e49abccce72a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/data_factory_management_client.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.service_client import SDKClient +from msrest import Serializer, Deserializer +from msrestazure import AzureConfiguration +from .version import VERSION +from .operations.operations import Operations +from .operations.factories_operations import FactoriesOperations +from .operations.exposure_control_operations import ExposureControlOperations +from .operations.integration_runtimes_operations import IntegrationRuntimesOperations +from .operations.integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from .operations.integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from .operations.linked_services_operations import LinkedServicesOperations +from .operations.datasets_operations import DatasetsOperations +from .operations.pipelines_operations import PipelinesOperations +from .operations.pipeline_runs_operations import PipelineRunsOperations +from .operations.activity_runs_operations import ActivityRunsOperations +from .operations.triggers_operations import TriggersOperations +from .operations.rerun_triggers_operations import RerunTriggersOperations +from .operations.trigger_runs_operations import TriggerRunsOperations +from . import models + + +class DataFactoryManagementClientConfiguration(AzureConfiguration): + """Configuration for DataFactoryManagementClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: The subscription identifier. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + if credentials is None: + raise ValueError("Parameter 'credentials' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + if not base_url: + base_url = 'https://management.azure.com' + + super(DataFactoryManagementClientConfiguration, self).__init__(base_url) + + self.add_user_agent('azure-mgmt-datafactory/{}'.format(VERSION)) + self.add_user_agent('Azure-SDK-For-Python') + + self.credentials = credentials + self.subscription_id = subscription_id + + +class DataFactoryManagementClient(SDKClient): + """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. + + :ivar config: Configuration for client. + :vartype config: DataFactoryManagementClientConfiguration + + :ivar operations: Operations operations + :vartype operations: azure.mgmt.datafactory.operations.Operations + :ivar factories: Factories operations + :vartype factories: azure.mgmt.datafactory.operations.FactoriesOperations + :ivar exposure_control: ExposureControl operations + :vartype exposure_control: azure.mgmt.datafactory.operations.ExposureControlOperations + :ivar integration_runtimes: IntegrationRuntimes operations + :vartype integration_runtimes: azure.mgmt.datafactory.operations.IntegrationRuntimesOperations + :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadata operations + :vartype integration_runtime_object_metadata: azure.mgmt.datafactory.operations.IntegrationRuntimeObjectMetadataOperations + :ivar integration_runtime_nodes: IntegrationRuntimeNodes operations + :vartype integration_runtime_nodes: azure.mgmt.datafactory.operations.IntegrationRuntimeNodesOperations + :ivar linked_services: LinkedServices operations + :vartype linked_services: azure.mgmt.datafactory.operations.LinkedServicesOperations + :ivar datasets: Datasets operations + :vartype datasets: azure.mgmt.datafactory.operations.DatasetsOperations + :ivar pipelines: Pipelines operations + :vartype pipelines: azure.mgmt.datafactory.operations.PipelinesOperations + :ivar pipeline_runs: PipelineRuns operations + :vartype pipeline_runs: azure.mgmt.datafactory.operations.PipelineRunsOperations + :ivar activity_runs: ActivityRuns operations + :vartype activity_runs: azure.mgmt.datafactory.operations.ActivityRunsOperations + :ivar triggers: Triggers operations + :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations + :ivar rerun_triggers: RerunTriggers operations + :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations + :ivar trigger_runs: TriggerRuns operations + :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations + + :param credentials: Credentials needed for the client to connect to Azure. + :type credentials: :mod:`A msrestazure Credentials + object` + :param subscription_id: The subscription identifier. + :type subscription_id: str + :param str base_url: Service URL + """ + + def __init__( + self, credentials, subscription_id, base_url=None): + + self.config = DataFactoryManagementClientConfiguration(credentials, subscription_id, base_url) + super(DataFactoryManagementClient, self).__init__(self.config.credentials, self.config) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self.api_version = '2018-06-01' + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.operations = Operations( + self._client, self.config, self._serialize, self._deserialize) + self.factories = FactoriesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.exposure_control = ExposureControlOperations( + self._client, self.config, self._serialize, self._deserialize) + self.integration_runtimes = IntegrationRuntimesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( + self._client, self.config, self._serialize, self._deserialize) + self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.linked_services = LinkedServicesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.datasets = DatasetsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.pipelines = PipelinesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.pipeline_runs = PipelineRunsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.activity_runs = ActivityRunsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.triggers = TriggersOperations( + self._client, self.config, self._serialize, self._deserialize) + self.rerun_triggers = RerunTriggersOperations( + self._client, self.config, self._serialize, self._deserialize) + self.trigger_runs = TriggerRunsOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py new file mode 100644 index 000000000000..9f925571446e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -0,0 +1,1489 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +try: + from .resource_py3 import Resource + from .sub_resource_py3 import SubResource + from .expression_py3 import Expression + from .secure_string_py3 import SecureString + from .linked_service_reference_py3 import LinkedServiceReference + from .azure_key_vault_secret_reference_py3 import AzureKeyVaultSecretReference + from .secret_base_py3 import SecretBase + from .factory_identity_py3 import FactoryIdentity + from .factory_repo_configuration_py3 import FactoryRepoConfiguration + from .factory_py3 import Factory + from .integration_runtime_py3 import IntegrationRuntime + from .integration_runtime_resource_py3 import IntegrationRuntimeResource + from .integration_runtime_reference_py3 import IntegrationRuntimeReference + from .integration_runtime_status_py3 import IntegrationRuntimeStatus + from .integration_runtime_status_response_py3 import IntegrationRuntimeStatusResponse + from .integration_runtime_status_list_response_py3 import IntegrationRuntimeStatusListResponse + from .update_integration_runtime_request_py3 import UpdateIntegrationRuntimeRequest + from .update_integration_runtime_node_request_py3 import UpdateIntegrationRuntimeNodeRequest + from .linked_integration_runtime_request_py3 import LinkedIntegrationRuntimeRequest + from .create_linked_integration_runtime_request_py3 import CreateLinkedIntegrationRuntimeRequest + from .parameter_specification_py3 import ParameterSpecification + from .linked_service_py3 import LinkedService + from .linked_service_resource_py3 import LinkedServiceResource + from .dataset_folder_py3 import DatasetFolder + from .dataset_py3 import Dataset + from .dataset_resource_py3 import DatasetResource + from .activity_dependency_py3 import ActivityDependency + from .user_property_py3 import UserProperty + from .activity_py3 import Activity + from .variable_specification_py3 import VariableSpecification + from .pipeline_folder_py3 import PipelineFolder + from .pipeline_resource_py3 import PipelineResource + from .trigger_py3 import Trigger + from .trigger_resource_py3 import TriggerResource + from .create_run_response_py3 import CreateRunResponse + from .factory_vsts_configuration_py3 import FactoryVSTSConfiguration + from .factory_git_hub_configuration_py3 import FactoryGitHubConfiguration + from .factory_repo_update_py3 import FactoryRepoUpdate + from .git_hub_access_token_request_py3 import GitHubAccessTokenRequest + from .git_hub_access_token_response_py3 import GitHubAccessTokenResponse + from .user_access_policy_py3 import UserAccessPolicy + from .access_policy_response_py3 import AccessPolicyResponse + from .pipeline_reference_py3 import PipelineReference + from .trigger_pipeline_reference_py3 import TriggerPipelineReference + from .factory_update_parameters_py3 import FactoryUpdateParameters + from .dataset_reference_py3 import DatasetReference + from .run_query_filter_py3 import RunQueryFilter + from .run_query_order_by_py3 import RunQueryOrderBy + from .run_filter_parameters_py3 import RunFilterParameters + from .pipeline_run_invoked_by_py3 import PipelineRunInvokedBy + from .pipeline_run_py3 import PipelineRun + from .pipeline_runs_query_response_py3 import PipelineRunsQueryResponse + from .activity_run_py3 import ActivityRun + from .activity_runs_query_response_py3 import ActivityRunsQueryResponse + from .trigger_run_py3 import TriggerRun + from .trigger_runs_query_response_py3 import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters_py3 import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger_py3 import RerunTumblingWindowTrigger + from .rerun_trigger_resource_py3 import RerunTriggerResource + from .operation_display_py3 import OperationDisplay + from .operation_log_specification_py3 import OperationLogSpecification + from .operation_metric_availability_py3 import OperationMetricAvailability + from .operation_metric_dimension_py3 import OperationMetricDimension + from .operation_metric_specification_py3 import OperationMetricSpecification + from .operation_service_specification_py3 import OperationServiceSpecification + from .operation_py3 import Operation + from .get_ssis_object_metadata_request_py3 import GetSsisObjectMetadataRequest + from .ssis_object_metadata_status_response_py3 import SsisObjectMetadataStatusResponse + from .exposure_control_request_py3 import ExposureControlRequest + from .exposure_control_response_py3 import ExposureControlResponse + from .self_dependency_tumbling_window_trigger_reference_py3 import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference_py3 import TriggerReference + from .tumbling_window_trigger_dependency_reference_py3 import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference_py3 import TriggerDependencyReference + from .dependency_reference_py3 import DependencyReference + from .retry_policy_py3 import RetryPolicy + from .tumbling_window_trigger_py3 import TumblingWindowTrigger + from .blob_events_trigger_py3 import BlobEventsTrigger + from .blob_trigger_py3 import BlobTrigger + from .recurrence_schedule_occurrence_py3 import RecurrenceScheduleOccurrence + from .recurrence_schedule_py3 import RecurrenceSchedule + from .schedule_trigger_recurrence_py3 import ScheduleTriggerRecurrence + from .schedule_trigger_py3 import ScheduleTrigger + from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + from .azure_function_linked_service_py3 import AzureFunctionLinkedService + from .azure_data_explorer_linked_service_py3 import AzureDataExplorerLinkedService + from .google_ad_words_linked_service_py3 import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service_py3 import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service_py3 import DynamicsAXLinkedService + from .responsys_linked_service_py3 import ResponsysLinkedService + from .azure_databricks_linked_service_py3 import AzureDatabricksLinkedService + from .azure_data_lake_analytics_linked_service_py3 import AzureDataLakeAnalyticsLinkedService + from .script_action_py3 import ScriptAction + from .hd_insight_on_demand_linked_service_py3 import HDInsightOnDemandLinkedService + from .salesforce_marketing_cloud_linked_service_py3 import SalesforceMarketingCloudLinkedService + from .netezza_linked_service_py3 import NetezzaLinkedService + from .vertica_linked_service_py3 import VerticaLinkedService + from .zoho_linked_service_py3 import ZohoLinkedService + from .xero_linked_service_py3 import XeroLinkedService + from .square_linked_service_py3 import SquareLinkedService + from .spark_linked_service_py3 import SparkLinkedService + from .shopify_linked_service_py3 import ShopifyLinkedService + from .service_now_linked_service_py3 import ServiceNowLinkedService + from .quick_books_linked_service_py3 import QuickBooksLinkedService + from .presto_linked_service_py3 import PrestoLinkedService + from .phoenix_linked_service_py3 import PhoenixLinkedService + from .paypal_linked_service_py3 import PaypalLinkedService + from .marketo_linked_service_py3 import MarketoLinkedService + from .maria_db_linked_service_py3 import MariaDBLinkedService + from .magento_linked_service_py3 import MagentoLinkedService + from .jira_linked_service_py3 import JiraLinkedService + from .impala_linked_service_py3 import ImpalaLinkedService + from .hubspot_linked_service_py3 import HubspotLinkedService + from .hive_linked_service_py3 import HiveLinkedService + from .hbase_linked_service_py3 import HBaseLinkedService + from .greenplum_linked_service_py3 import GreenplumLinkedService + from .google_big_query_linked_service_py3 import GoogleBigQueryLinkedService + from .eloqua_linked_service_py3 import EloquaLinkedService + from .drill_linked_service_py3 import DrillLinkedService + from .couchbase_linked_service_py3 import CouchbaseLinkedService + from .concur_linked_service_py3 import ConcurLinkedService + from .azure_postgre_sql_linked_service_py3 import AzurePostgreSqlLinkedService + from .amazon_mws_linked_service_py3 import AmazonMWSLinkedService + from .sap_hana_linked_service_py3 import SapHanaLinkedService + from .sap_bw_linked_service_py3 import SapBWLinkedService + from .sftp_server_linked_service_py3 import SftpServerLinkedService + from .ftp_server_linked_service_py3 import FtpServerLinkedService + from .http_linked_service_py3 import HttpLinkedService + from .azure_search_linked_service_py3 import AzureSearchLinkedService + from .custom_data_source_linked_service_py3 import CustomDataSourceLinkedService + from .amazon_redshift_linked_service_py3 import AmazonRedshiftLinkedService + from .amazon_s3_linked_service_py3 import AmazonS3LinkedService + from .rest_service_linked_service_py3 import RestServiceLinkedService + from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService + from .sap_ecc_linked_service_py3 import SapEccLinkedService + from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService + from .salesforce_linked_service_py3 import SalesforceLinkedService + from .office365_linked_service_py3 import Office365LinkedService + from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService + from .azure_data_lake_store_linked_service_py3 import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service_py3 import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service_py3 import MongoDbV2LinkedService + from .mongo_db_linked_service_py3 import MongoDbLinkedService + from .cassandra_linked_service_py3 import CassandraLinkedService + from .web_client_certificate_authentication_py3 import WebClientCertificateAuthentication + from .web_basic_authentication_py3 import WebBasicAuthentication + from .web_anonymous_authentication_py3 import WebAnonymousAuthentication + from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + from .web_linked_service_py3 import WebLinkedService + from .odata_linked_service_py3 import ODataLinkedService + from .hdfs_linked_service_py3 import HdfsLinkedService + from .odbc_linked_service_py3 import OdbcLinkedService + from .azure_ml_linked_service_py3 import AzureMLLinkedService + from .teradata_linked_service_py3 import TeradataLinkedService + from .db2_linked_service_py3 import Db2LinkedService + from .sybase_linked_service_py3 import SybaseLinkedService + from .postgre_sql_linked_service_py3 import PostgreSqlLinkedService + from .my_sql_linked_service_py3 import MySqlLinkedService + from .azure_my_sql_linked_service_py3 import AzureMySqlLinkedService + from .oracle_linked_service_py3 import OracleLinkedService + from .file_server_linked_service_py3 import FileServerLinkedService + from .hd_insight_linked_service_py3 import HDInsightLinkedService + from .dynamics_linked_service_py3 import DynamicsLinkedService + from .cosmos_db_linked_service_py3 import CosmosDbLinkedService + from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService + from .azure_batch_linked_service_py3 import AzureBatchLinkedService + from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService + from .sql_server_linked_service_py3 import SqlServerLinkedService + from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService + from .azure_table_storage_linked_service_py3 import AzureTableStorageLinkedService + from .azure_blob_storage_linked_service_py3 import AzureBlobStorageLinkedService + from .azure_storage_linked_service_py3 import AzureStorageLinkedService + from .google_ad_words_object_dataset_py3 import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset_py3 import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset_py3 import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset_py3 import DynamicsAXResourceDataset + from .responsys_object_dataset_py3 import ResponsysObjectDataset + from .salesforce_marketing_cloud_object_dataset_py3 import SalesforceMarketingCloudObjectDataset + from .vertica_table_dataset_py3 import VerticaTableDataset + from .netezza_table_dataset_py3 import NetezzaTableDataset + from .zoho_object_dataset_py3 import ZohoObjectDataset + from .xero_object_dataset_py3 import XeroObjectDataset + from .square_object_dataset_py3 import SquareObjectDataset + from .spark_object_dataset_py3 import SparkObjectDataset + from .shopify_object_dataset_py3 import ShopifyObjectDataset + from .service_now_object_dataset_py3 import ServiceNowObjectDataset + from .quick_books_object_dataset_py3 import QuickBooksObjectDataset + from .presto_object_dataset_py3 import PrestoObjectDataset + from .phoenix_object_dataset_py3 import PhoenixObjectDataset + from .paypal_object_dataset_py3 import PaypalObjectDataset + from .marketo_object_dataset_py3 import MarketoObjectDataset + from .maria_db_table_dataset_py3 import MariaDBTableDataset + from .magento_object_dataset_py3 import MagentoObjectDataset + from .jira_object_dataset_py3 import JiraObjectDataset + from .impala_object_dataset_py3 import ImpalaObjectDataset + from .hubspot_object_dataset_py3 import HubspotObjectDataset + from .hive_object_dataset_py3 import HiveObjectDataset + from .hbase_object_dataset_py3 import HBaseObjectDataset + from .greenplum_table_dataset_py3 import GreenplumTableDataset + from .google_big_query_object_dataset_py3 import GoogleBigQueryObjectDataset + from .eloqua_object_dataset_py3 import EloquaObjectDataset + from .drill_table_dataset_py3 import DrillTableDataset + from .couchbase_table_dataset_py3 import CouchbaseTableDataset + from .concur_object_dataset_py3 import ConcurObjectDataset + from .azure_postgre_sql_table_dataset_py3 import AzurePostgreSqlTableDataset + from .amazon_mws_object_dataset_py3 import AmazonMWSObjectDataset + from .dataset_zip_deflate_compression_py3 import DatasetZipDeflateCompression + from .dataset_deflate_compression_py3 import DatasetDeflateCompression + from .dataset_gzip_compression_py3 import DatasetGZipCompression + from .dataset_bzip2_compression_py3 import DatasetBZip2Compression + from .dataset_compression_py3 import DatasetCompression + from .parquet_format_py3 import ParquetFormat + from .orc_format_py3 import OrcFormat + from .avro_format_py3 import AvroFormat + from .json_format_py3 import JsonFormat + from .text_format_py3 import TextFormat + from .dataset_storage_format_py3 import DatasetStorageFormat + from .http_dataset_py3 import HttpDataset + from .azure_search_index_dataset_py3 import AzureSearchIndexDataset + from .web_table_dataset_py3 import WebTableDataset + from .rest_resource_dataset_py3 import RestResourceDataset + from .sql_server_table_dataset_py3 import SqlServerTableDataset + from .sap_open_hub_table_dataset_py3 import SapOpenHubTableDataset + from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset + from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset + from .salesforce_object_dataset_py3 import SalesforceObjectDataset + from .relational_table_dataset_py3 import RelationalTableDataset + from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset + from .oracle_table_dataset_py3 import OracleTableDataset + from .odata_resource_dataset_py3 import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset_py3 import MongoDbV2CollectionDataset + from .mongo_db_collection_dataset_py3 import MongoDbCollectionDataset + from .file_share_dataset_py3 import FileShareDataset + from .office365_dataset_py3 import Office365Dataset + from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset + from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset + from .dynamics_entity_dataset_py3 import DynamicsEntityDataset + from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset + from .custom_dataset_py3 import CustomDataset + from .cassandra_table_dataset_py3 import CassandraTableDataset + from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset + from .azure_sql_table_dataset_py3 import AzureSqlTableDataset + from .azure_table_dataset_py3 import AzureTableDataset + from .azure_blob_dataset_py3 import AzureBlobDataset + from .hdfs_location_py3 import HdfsLocation + from .http_server_location_py3 import HttpServerLocation + from .sftp_location_py3 import SftpLocation + from .ftp_server_location_py3 import FtpServerLocation + from .file_server_location_py3 import FileServerLocation + from .amazon_s3_location_py3 import AmazonS3Location + from .azure_data_lake_store_location_py3 import AzureDataLakeStoreLocation + from .azure_blob_fs_location_py3 import AzureBlobFSLocation + from .azure_blob_storage_location_py3 import AzureBlobStorageLocation + from .dataset_location_py3 import DatasetLocation + from .delimited_text_dataset_py3 import DelimitedTextDataset + from .parquet_dataset_py3 import ParquetDataset + from .amazon_s3_dataset_py3 import AmazonS3Dataset + from .activity_policy_py3 import ActivityPolicy + from .azure_function_activity_py3 import AzureFunctionActivity + from .databricks_spark_python_activity_py3 import DatabricksSparkPythonActivity + from .databricks_spark_jar_activity_py3 import DatabricksSparkJarActivity + from .databricks_notebook_activity_py3 import DatabricksNotebookActivity + from .data_lake_analytics_usql_activity_py3 import DataLakeAnalyticsUSQLActivity + from .azure_ml_update_resource_activity_py3 import AzureMLUpdateResourceActivity + from .azure_ml_web_service_file_py3 import AzureMLWebServiceFile + from .azure_ml_batch_execution_activity_py3 import AzureMLBatchExecutionActivity + from .get_metadata_activity_py3 import GetMetadataActivity + from .web_activity_authentication_py3 import WebActivityAuthentication + from .web_activity_py3 import WebActivity + from .redshift_unload_settings_py3 import RedshiftUnloadSettings + from .amazon_redshift_source_py3 import AmazonRedshiftSource + from .google_ad_words_source_py3 import GoogleAdWordsSource + from .oracle_service_cloud_source_py3 import OracleServiceCloudSource + from .dynamics_ax_source_py3 import DynamicsAXSource + from .responsys_source_py3 import ResponsysSource + from .salesforce_marketing_cloud_source_py3 import SalesforceMarketingCloudSource + from .vertica_source_py3 import VerticaSource + from .netezza_source_py3 import NetezzaSource + from .zoho_source_py3 import ZohoSource + from .xero_source_py3 import XeroSource + from .square_source_py3 import SquareSource + from .spark_source_py3 import SparkSource + from .shopify_source_py3 import ShopifySource + from .service_now_source_py3 import ServiceNowSource + from .quick_books_source_py3 import QuickBooksSource + from .presto_source_py3 import PrestoSource + from .phoenix_source_py3 import PhoenixSource + from .paypal_source_py3 import PaypalSource + from .marketo_source_py3 import MarketoSource + from .maria_db_source_py3 import MariaDBSource + from .magento_source_py3 import MagentoSource + from .jira_source_py3 import JiraSource + from .impala_source_py3 import ImpalaSource + from .hubspot_source_py3 import HubspotSource + from .hive_source_py3 import HiveSource + from .hbase_source_py3 import HBaseSource + from .greenplum_source_py3 import GreenplumSource + from .google_big_query_source_py3 import GoogleBigQuerySource + from .eloqua_source_py3 import EloquaSource + from .drill_source_py3 import DrillSource + from .couchbase_source_py3 import CouchbaseSource + from .concur_source_py3 import ConcurSource + from .azure_postgre_sql_source_py3 import AzurePostgreSqlSource + from .amazon_mws_source_py3 import AmazonMWSSource + from .http_source_py3 import HttpSource + from .azure_blob_fs_source_py3 import AzureBlobFSSource + from .azure_data_lake_store_source_py3 import AzureDataLakeStoreSource + from .office365_source_py3 import Office365Source + from .mongo_db_cursor_methods_properties_py3 import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source_py3 import CosmosDbMongoDbApiSource + from .mongo_db_v2_source_py3 import MongoDbV2Source + from .mongo_db_source_py3 import MongoDbSource + from .cassandra_source_py3 import CassandraSource + from .web_source_py3 import WebSource + from .oracle_partition_settings_py3 import OraclePartitionSettings + from .oracle_source_py3 import OracleSource + from .azure_data_explorer_source_py3 import AzureDataExplorerSource + from .azure_my_sql_source_py3 import AzureMySqlSource + from .distcp_settings_py3 import DistcpSettings + from .hdfs_source_py3 import HdfsSource + from .file_system_source_py3 import FileSystemSource + from .sql_dw_source_py3 import SqlDWSource + from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .azure_sql_source_py3 import AzureSqlSource + from .sql_server_source_py3 import SqlServerSource + from .sql_source_py3 import SqlSource + from .rest_source_py3 import RestSource + from .sap_open_hub_source_py3 import SapOpenHubSource + from .sap_ecc_source_py3 import SapEccSource + from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource + from .salesforce_source_py3 import SalesforceSource + from .relational_source_py3 import RelationalSource + from .dynamics_source_py3 import DynamicsSource + from .document_db_collection_source_py3 import DocumentDbCollectionSource + from .blob_source_py3 import BlobSource + from .azure_table_source_py3 import AzureTableSource + from .format_read_setting_py3 import FormatReadSetting + from .delimited_text_read_setting_py3 import DelimitedTextReadSetting + from .hdfs_read_setting_py3 import HdfsReadSetting + from .http_read_setting_py3 import HttpReadSetting + from .sftp_read_setting_py3 import SftpReadSetting + from .ftp_read_setting_py3 import FtpReadSetting + from .file_server_read_setting_py3 import FileServerReadSetting + from .amazon_s3_read_setting_py3 import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting_py3 import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting_py3 import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting_py3 import AzureBlobStorageReadSetting + from .connector_read_setting_py3 import ConnectorReadSetting + from .delimited_text_source_py3 import DelimitedTextSource + from .parquet_source_py3 import ParquetSource + from .copy_source_py3 import CopySource + from .lookup_activity_py3 import LookupActivity + from .log_storage_settings_py3 import LogStorageSettings + from .delete_activity_py3 import DeleteActivity + from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity + from .custom_activity_reference_object_py3 import CustomActivityReferenceObject + from .custom_activity_py3 import CustomActivity + from .ssis_property_override_py3 import SSISPropertyOverride + from .ssis_execution_parameter_py3 import SSISExecutionParameter + from .ssis_execution_credential_py3 import SSISExecutionCredential + from .ssis_package_location_py3 import SSISPackageLocation + from .execute_ssis_package_activity_py3 import ExecuteSSISPackageActivity + from .hd_insight_spark_activity_py3 import HDInsightSparkActivity + from .hd_insight_streaming_activity_py3 import HDInsightStreamingActivity + from .hd_insight_map_reduce_activity_py3 import HDInsightMapReduceActivity + from .hd_insight_pig_activity_py3 import HDInsightPigActivity + from .hd_insight_hive_activity_py3 import HDInsightHiveActivity + from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings + from .staging_settings_py3 import StagingSettings + from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink + from .salesforce_sink_py3 import SalesforceSink + from .azure_data_explorer_sink_py3 import AzureDataExplorerSink + from .dynamics_sink_py3 import DynamicsSink + from .odbc_sink_py3 import OdbcSink + from .azure_search_index_sink_py3 import AzureSearchIndexSink + from .azure_blob_fs_sink_py3 import AzureBlobFSSink + from .azure_data_lake_store_sink_py3 import AzureDataLakeStoreSink + from .oracle_sink_py3 import OracleSink + from .polybase_settings_py3 import PolybaseSettings + from .sql_dw_sink_py3 import SqlDWSink + from .azure_sql_sink_py3 import AzureSqlSink + from .sql_server_sink_py3 import SqlServerSink + from .sql_sink_py3 import SqlSink + from .document_db_collection_sink_py3 import DocumentDbCollectionSink + from .file_system_sink_py3 import FileSystemSink + from .blob_sink_py3 import BlobSink + from .file_server_write_setting_py3 import FileServerWriteSetting + from .azure_data_lake_store_write_setting_py3 import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting_py3 import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting_py3 import AzureBlobStorageWriteSetting + from .connector_write_setting_py3 import ConnectorWriteSetting + from .parquet_sink_py3 import ParquetSink + from .azure_table_sink_py3 import AzureTableSink + from .azure_queue_sink_py3 import AzureQueueSink + from .sap_cloud_for_customer_sink_py3 import SapCloudForCustomerSink + from .format_write_setting_py3 import FormatWriteSetting + from .delimited_text_write_setting_py3 import DelimitedTextWriteSetting + from .delimited_text_sink_py3 import DelimitedTextSink + from .copy_sink_py3 import CopySink + from .copy_activity_py3 import CopyActivity + from .execution_activity_py3 import ExecutionActivity + from .web_hook_activity_py3 import WebHookActivity + from .append_variable_activity_py3 import AppendVariableActivity + from .set_variable_activity_py3 import SetVariableActivity + from .filter_activity_py3 import FilterActivity + from .validation_activity_py3 import ValidationActivity + from .until_activity_py3 import UntilActivity + from .wait_activity_py3 import WaitActivity + from .for_each_activity_py3 import ForEachActivity + from .if_condition_activity_py3 import IfConditionActivity + from .execute_pipeline_activity_py3 import ExecutePipelineActivity + from .control_activity_py3 import ControlActivity + from .linked_integration_runtime_py3 import LinkedIntegrationRuntime + from .self_hosted_integration_runtime_node_py3 import SelfHostedIntegrationRuntimeNode + from .self_hosted_integration_runtime_status_py3 import SelfHostedIntegrationRuntimeStatus + from .managed_integration_runtime_operation_result_py3 import ManagedIntegrationRuntimeOperationResult + from .managed_integration_runtime_error_py3 import ManagedIntegrationRuntimeError + from .managed_integration_runtime_node_py3 import ManagedIntegrationRuntimeNode + from .managed_integration_runtime_status_py3 import ManagedIntegrationRuntimeStatus + from .linked_integration_runtime_rbac_authorization_py3 import LinkedIntegrationRuntimeRbacAuthorization + from .linked_integration_runtime_key_authorization_py3 import LinkedIntegrationRuntimeKeyAuthorization + from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + from .self_hosted_integration_runtime_py3 import SelfHostedIntegrationRuntime + from .entity_reference_py3 import EntityReference + from .integration_runtime_data_proxy_properties_py3 import IntegrationRuntimeDataProxyProperties + from .integration_runtime_custom_setup_script_properties_py3 import IntegrationRuntimeCustomSetupScriptProperties + from .integration_runtime_ssis_catalog_info_py3 import IntegrationRuntimeSsisCatalogInfo + from .integration_runtime_ssis_properties_py3 import IntegrationRuntimeSsisProperties + from .integration_runtime_vnet_properties_py3 import IntegrationRuntimeVNetProperties + from .integration_runtime_compute_properties_py3 import IntegrationRuntimeComputeProperties + from .managed_integration_runtime_py3 import ManagedIntegrationRuntime + from .integration_runtime_node_ip_address_py3 import IntegrationRuntimeNodeIpAddress + from .ssis_variable_py3 import SsisVariable + from .ssis_environment_py3 import SsisEnvironment + from .ssis_parameter_py3 import SsisParameter + from .ssis_package_py3 import SsisPackage + from .ssis_environment_reference_py3 import SsisEnvironmentReference + from .ssis_project_py3 import SsisProject + from .ssis_folder_py3 import SsisFolder + from .ssis_object_metadata_py3 import SsisObjectMetadata + from .ssis_object_metadata_list_response_py3 import SsisObjectMetadataListResponse + from .integration_runtime_node_monitoring_data_py3 import IntegrationRuntimeNodeMonitoringData + from .integration_runtime_monitoring_data_py3 import IntegrationRuntimeMonitoringData + from .integration_runtime_auth_keys_py3 import IntegrationRuntimeAuthKeys + from .integration_runtime_regenerate_key_parameters_py3 import IntegrationRuntimeRegenerateKeyParameters + from .integration_runtime_connection_info_py3 import IntegrationRuntimeConnectionInfo +except (SyntaxError, ImportError): + from .resource import Resource + from .sub_resource import SubResource + from .expression import Expression + from .secure_string import SecureString + from .linked_service_reference import LinkedServiceReference + from .azure_key_vault_secret_reference import AzureKeyVaultSecretReference + from .secret_base import SecretBase + from .factory_identity import FactoryIdentity + from .factory_repo_configuration import FactoryRepoConfiguration + from .factory import Factory + from .integration_runtime import IntegrationRuntime + from .integration_runtime_resource import IntegrationRuntimeResource + from .integration_runtime_reference import IntegrationRuntimeReference + from .integration_runtime_status import IntegrationRuntimeStatus + from .integration_runtime_status_response import IntegrationRuntimeStatusResponse + from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse + from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest + from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest + from .linked_integration_runtime_request import LinkedIntegrationRuntimeRequest + from .create_linked_integration_runtime_request import CreateLinkedIntegrationRuntimeRequest + from .parameter_specification import ParameterSpecification + from .linked_service import LinkedService + from .linked_service_resource import LinkedServiceResource + from .dataset_folder import DatasetFolder + from .dataset import Dataset + from .dataset_resource import DatasetResource + from .activity_dependency import ActivityDependency + from .user_property import UserProperty + from .activity import Activity + from .variable_specification import VariableSpecification + from .pipeline_folder import PipelineFolder + from .pipeline_resource import PipelineResource + from .trigger import Trigger + from .trigger_resource import TriggerResource + from .create_run_response import CreateRunResponse + from .factory_vsts_configuration import FactoryVSTSConfiguration + from .factory_git_hub_configuration import FactoryGitHubConfiguration + from .factory_repo_update import FactoryRepoUpdate + from .git_hub_access_token_request import GitHubAccessTokenRequest + from .git_hub_access_token_response import GitHubAccessTokenResponse + from .user_access_policy import UserAccessPolicy + from .access_policy_response import AccessPolicyResponse + from .pipeline_reference import PipelineReference + from .trigger_pipeline_reference import TriggerPipelineReference + from .factory_update_parameters import FactoryUpdateParameters + from .dataset_reference import DatasetReference + from .run_query_filter import RunQueryFilter + from .run_query_order_by import RunQueryOrderBy + from .run_filter_parameters import RunFilterParameters + from .pipeline_run_invoked_by import PipelineRunInvokedBy + from .pipeline_run import PipelineRun + from .pipeline_runs_query_response import PipelineRunsQueryResponse + from .activity_run import ActivityRun + from .activity_runs_query_response import ActivityRunsQueryResponse + from .trigger_run import TriggerRun + from .trigger_runs_query_response import TriggerRunsQueryResponse + from .rerun_tumbling_window_trigger_action_parameters import RerunTumblingWindowTriggerActionParameters + from .rerun_tumbling_window_trigger import RerunTumblingWindowTrigger + from .rerun_trigger_resource import RerunTriggerResource + from .operation_display import OperationDisplay + from .operation_log_specification import OperationLogSpecification + from .operation_metric_availability import OperationMetricAvailability + from .operation_metric_dimension import OperationMetricDimension + from .operation_metric_specification import OperationMetricSpecification + from .operation_service_specification import OperationServiceSpecification + from .operation import Operation + from .get_ssis_object_metadata_request import GetSsisObjectMetadataRequest + from .ssis_object_metadata_status_response import SsisObjectMetadataStatusResponse + from .exposure_control_request import ExposureControlRequest + from .exposure_control_response import ExposureControlResponse + from .self_dependency_tumbling_window_trigger_reference import SelfDependencyTumblingWindowTriggerReference + from .trigger_reference import TriggerReference + from .tumbling_window_trigger_dependency_reference import TumblingWindowTriggerDependencyReference + from .trigger_dependency_reference import TriggerDependencyReference + from .dependency_reference import DependencyReference + from .retry_policy import RetryPolicy + from .tumbling_window_trigger import TumblingWindowTrigger + from .blob_events_trigger import BlobEventsTrigger + from .blob_trigger import BlobTrigger + from .recurrence_schedule_occurrence import RecurrenceScheduleOccurrence + from .recurrence_schedule import RecurrenceSchedule + from .schedule_trigger_recurrence import ScheduleTriggerRecurrence + from .schedule_trigger import ScheduleTrigger + from .multiple_pipeline_trigger import MultiplePipelineTrigger + from .azure_function_linked_service import AzureFunctionLinkedService + from .azure_data_explorer_linked_service import AzureDataExplorerLinkedService + from .google_ad_words_linked_service import GoogleAdWordsLinkedService + from .oracle_service_cloud_linked_service import OracleServiceCloudLinkedService + from .dynamics_ax_linked_service import DynamicsAXLinkedService + from .responsys_linked_service import ResponsysLinkedService + from .azure_databricks_linked_service import AzureDatabricksLinkedService + from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService + from .script_action import ScriptAction + from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService + from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService + from .netezza_linked_service import NetezzaLinkedService + from .vertica_linked_service import VerticaLinkedService + from .zoho_linked_service import ZohoLinkedService + from .xero_linked_service import XeroLinkedService + from .square_linked_service import SquareLinkedService + from .spark_linked_service import SparkLinkedService + from .shopify_linked_service import ShopifyLinkedService + from .service_now_linked_service import ServiceNowLinkedService + from .quick_books_linked_service import QuickBooksLinkedService + from .presto_linked_service import PrestoLinkedService + from .phoenix_linked_service import PhoenixLinkedService + from .paypal_linked_service import PaypalLinkedService + from .marketo_linked_service import MarketoLinkedService + from .maria_db_linked_service import MariaDBLinkedService + from .magento_linked_service import MagentoLinkedService + from .jira_linked_service import JiraLinkedService + from .impala_linked_service import ImpalaLinkedService + from .hubspot_linked_service import HubspotLinkedService + from .hive_linked_service import HiveLinkedService + from .hbase_linked_service import HBaseLinkedService + from .greenplum_linked_service import GreenplumLinkedService + from .google_big_query_linked_service import GoogleBigQueryLinkedService + from .eloqua_linked_service import EloquaLinkedService + from .drill_linked_service import DrillLinkedService + from .couchbase_linked_service import CouchbaseLinkedService + from .concur_linked_service import ConcurLinkedService + from .azure_postgre_sql_linked_service import AzurePostgreSqlLinkedService + from .amazon_mws_linked_service import AmazonMWSLinkedService + from .sap_hana_linked_service import SapHanaLinkedService + from .sap_bw_linked_service import SapBWLinkedService + from .sftp_server_linked_service import SftpServerLinkedService + from .ftp_server_linked_service import FtpServerLinkedService + from .http_linked_service import HttpLinkedService + from .azure_search_linked_service import AzureSearchLinkedService + from .custom_data_source_linked_service import CustomDataSourceLinkedService + from .amazon_redshift_linked_service import AmazonRedshiftLinkedService + from .amazon_s3_linked_service import AmazonS3LinkedService + from .rest_service_linked_service import RestServiceLinkedService + from .sap_open_hub_linked_service import SapOpenHubLinkedService + from .sap_ecc_linked_service import SapEccLinkedService + from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService + from .salesforce_linked_service import SalesforceLinkedService + from .office365_linked_service import Office365LinkedService + from .azure_blob_fs_linked_service import AzureBlobFSLinkedService + from .azure_data_lake_store_linked_service import AzureDataLakeStoreLinkedService + from .cosmos_db_mongo_db_api_linked_service import CosmosDbMongoDbApiLinkedService + from .mongo_db_v2_linked_service import MongoDbV2LinkedService + from .mongo_db_linked_service import MongoDbLinkedService + from .cassandra_linked_service import CassandraLinkedService + from .web_client_certificate_authentication import WebClientCertificateAuthentication + from .web_basic_authentication import WebBasicAuthentication + from .web_anonymous_authentication import WebAnonymousAuthentication + from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + from .web_linked_service import WebLinkedService + from .odata_linked_service import ODataLinkedService + from .hdfs_linked_service import HdfsLinkedService + from .odbc_linked_service import OdbcLinkedService + from .azure_ml_linked_service import AzureMLLinkedService + from .teradata_linked_service import TeradataLinkedService + from .db2_linked_service import Db2LinkedService + from .sybase_linked_service import SybaseLinkedService + from .postgre_sql_linked_service import PostgreSqlLinkedService + from .my_sql_linked_service import MySqlLinkedService + from .azure_my_sql_linked_service import AzureMySqlLinkedService + from .oracle_linked_service import OracleLinkedService + from .file_server_linked_service import FileServerLinkedService + from .hd_insight_linked_service import HDInsightLinkedService + from .dynamics_linked_service import DynamicsLinkedService + from .cosmos_db_linked_service import CosmosDbLinkedService + from .azure_key_vault_linked_service import AzureKeyVaultLinkedService + from .azure_batch_linked_service import AzureBatchLinkedService + from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService + from .sql_server_linked_service import SqlServerLinkedService + from .azure_sql_dw_linked_service import AzureSqlDWLinkedService + from .azure_table_storage_linked_service import AzureTableStorageLinkedService + from .azure_blob_storage_linked_service import AzureBlobStorageLinkedService + from .azure_storage_linked_service import AzureStorageLinkedService + from .google_ad_words_object_dataset import GoogleAdWordsObjectDataset + from .azure_data_explorer_table_dataset import AzureDataExplorerTableDataset + from .oracle_service_cloud_object_dataset import OracleServiceCloudObjectDataset + from .dynamics_ax_resource_dataset import DynamicsAXResourceDataset + from .responsys_object_dataset import ResponsysObjectDataset + from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset + from .vertica_table_dataset import VerticaTableDataset + from .netezza_table_dataset import NetezzaTableDataset + from .zoho_object_dataset import ZohoObjectDataset + from .xero_object_dataset import XeroObjectDataset + from .square_object_dataset import SquareObjectDataset + from .spark_object_dataset import SparkObjectDataset + from .shopify_object_dataset import ShopifyObjectDataset + from .service_now_object_dataset import ServiceNowObjectDataset + from .quick_books_object_dataset import QuickBooksObjectDataset + from .presto_object_dataset import PrestoObjectDataset + from .phoenix_object_dataset import PhoenixObjectDataset + from .paypal_object_dataset import PaypalObjectDataset + from .marketo_object_dataset import MarketoObjectDataset + from .maria_db_table_dataset import MariaDBTableDataset + from .magento_object_dataset import MagentoObjectDataset + from .jira_object_dataset import JiraObjectDataset + from .impala_object_dataset import ImpalaObjectDataset + from .hubspot_object_dataset import HubspotObjectDataset + from .hive_object_dataset import HiveObjectDataset + from .hbase_object_dataset import HBaseObjectDataset + from .greenplum_table_dataset import GreenplumTableDataset + from .google_big_query_object_dataset import GoogleBigQueryObjectDataset + from .eloqua_object_dataset import EloquaObjectDataset + from .drill_table_dataset import DrillTableDataset + from .couchbase_table_dataset import CouchbaseTableDataset + from .concur_object_dataset import ConcurObjectDataset + from .azure_postgre_sql_table_dataset import AzurePostgreSqlTableDataset + from .amazon_mws_object_dataset import AmazonMWSObjectDataset + from .dataset_zip_deflate_compression import DatasetZipDeflateCompression + from .dataset_deflate_compression import DatasetDeflateCompression + from .dataset_gzip_compression import DatasetGZipCompression + from .dataset_bzip2_compression import DatasetBZip2Compression + from .dataset_compression import DatasetCompression + from .parquet_format import ParquetFormat + from .orc_format import OrcFormat + from .avro_format import AvroFormat + from .json_format import JsonFormat + from .text_format import TextFormat + from .dataset_storage_format import DatasetStorageFormat + from .http_dataset import HttpDataset + from .azure_search_index_dataset import AzureSearchIndexDataset + from .web_table_dataset import WebTableDataset + from .rest_resource_dataset import RestResourceDataset + from .sql_server_table_dataset import SqlServerTableDataset + from .sap_open_hub_table_dataset import SapOpenHubTableDataset + from .sap_ecc_resource_dataset import SapEccResourceDataset + from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset + from .salesforce_object_dataset import SalesforceObjectDataset + from .relational_table_dataset import RelationalTableDataset + from .azure_my_sql_table_dataset import AzureMySqlTableDataset + from .oracle_table_dataset import OracleTableDataset + from .odata_resource_dataset import ODataResourceDataset + from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset + from .mongo_db_v2_collection_dataset import MongoDbV2CollectionDataset + from .mongo_db_collection_dataset import MongoDbCollectionDataset + from .file_share_dataset import FileShareDataset + from .office365_dataset import Office365Dataset + from .azure_blob_fs_dataset import AzureBlobFSDataset + from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset + from .dynamics_entity_dataset import DynamicsEntityDataset + from .document_db_collection_dataset import DocumentDbCollectionDataset + from .custom_dataset import CustomDataset + from .cassandra_table_dataset import CassandraTableDataset + from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset + from .azure_sql_table_dataset import AzureSqlTableDataset + from .azure_table_dataset import AzureTableDataset + from .azure_blob_dataset import AzureBlobDataset + from .hdfs_location import HdfsLocation + from .http_server_location import HttpServerLocation + from .sftp_location import SftpLocation + from .ftp_server_location import FtpServerLocation + from .file_server_location import FileServerLocation + from .amazon_s3_location import AmazonS3Location + from .azure_data_lake_store_location import AzureDataLakeStoreLocation + from .azure_blob_fs_location import AzureBlobFSLocation + from .azure_blob_storage_location import AzureBlobStorageLocation + from .dataset_location import DatasetLocation + from .delimited_text_dataset import DelimitedTextDataset + from .parquet_dataset import ParquetDataset + from .amazon_s3_dataset import AmazonS3Dataset + from .activity_policy import ActivityPolicy + from .azure_function_activity import AzureFunctionActivity + from .databricks_spark_python_activity import DatabricksSparkPythonActivity + from .databricks_spark_jar_activity import DatabricksSparkJarActivity + from .databricks_notebook_activity import DatabricksNotebookActivity + from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity + from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity + from .azure_ml_web_service_file import AzureMLWebServiceFile + from .azure_ml_batch_execution_activity import AzureMLBatchExecutionActivity + from .get_metadata_activity import GetMetadataActivity + from .web_activity_authentication import WebActivityAuthentication + from .web_activity import WebActivity + from .redshift_unload_settings import RedshiftUnloadSettings + from .amazon_redshift_source import AmazonRedshiftSource + from .google_ad_words_source import GoogleAdWordsSource + from .oracle_service_cloud_source import OracleServiceCloudSource + from .dynamics_ax_source import DynamicsAXSource + from .responsys_source import ResponsysSource + from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource + from .vertica_source import VerticaSource + from .netezza_source import NetezzaSource + from .zoho_source import ZohoSource + from .xero_source import XeroSource + from .square_source import SquareSource + from .spark_source import SparkSource + from .shopify_source import ShopifySource + from .service_now_source import ServiceNowSource + from .quick_books_source import QuickBooksSource + from .presto_source import PrestoSource + from .phoenix_source import PhoenixSource + from .paypal_source import PaypalSource + from .marketo_source import MarketoSource + from .maria_db_source import MariaDBSource + from .magento_source import MagentoSource + from .jira_source import JiraSource + from .impala_source import ImpalaSource + from .hubspot_source import HubspotSource + from .hive_source import HiveSource + from .hbase_source import HBaseSource + from .greenplum_source import GreenplumSource + from .google_big_query_source import GoogleBigQuerySource + from .eloqua_source import EloquaSource + from .drill_source import DrillSource + from .couchbase_source import CouchbaseSource + from .concur_source import ConcurSource + from .azure_postgre_sql_source import AzurePostgreSqlSource + from .amazon_mws_source import AmazonMWSSource + from .http_source import HttpSource + from .azure_blob_fs_source import AzureBlobFSSource + from .azure_data_lake_store_source import AzureDataLakeStoreSource + from .office365_source import Office365Source + from .mongo_db_cursor_methods_properties import MongoDbCursorMethodsProperties + from .cosmos_db_mongo_db_api_source import CosmosDbMongoDbApiSource + from .mongo_db_v2_source import MongoDbV2Source + from .mongo_db_source import MongoDbSource + from .cassandra_source import CassandraSource + from .web_source import WebSource + from .oracle_partition_settings import OraclePartitionSettings + from .oracle_source import OracleSource + from .azure_data_explorer_source import AzureDataExplorerSource + from .azure_my_sql_source import AzureMySqlSource + from .distcp_settings import DistcpSettings + from .hdfs_source import HdfsSource + from .file_system_source import FileSystemSource + from .sql_dw_source import SqlDWSource + from .stored_procedure_parameter import StoredProcedureParameter + from .azure_sql_source import AzureSqlSource + from .sql_server_source import SqlServerSource + from .sql_source import SqlSource + from .rest_source import RestSource + from .sap_open_hub_source import SapOpenHubSource + from .sap_ecc_source import SapEccSource + from .sap_cloud_for_customer_source import SapCloudForCustomerSource + from .salesforce_source import SalesforceSource + from .relational_source import RelationalSource + from .dynamics_source import DynamicsSource + from .document_db_collection_source import DocumentDbCollectionSource + from .blob_source import BlobSource + from .azure_table_source import AzureTableSource + from .format_read_setting import FormatReadSetting + from .delimited_text_read_setting import DelimitedTextReadSetting + from .hdfs_read_setting import HdfsReadSetting + from .http_read_setting import HttpReadSetting + from .sftp_read_setting import SftpReadSetting + from .ftp_read_setting import FtpReadSetting + from .file_server_read_setting import FileServerReadSetting + from .amazon_s3_read_setting import AmazonS3ReadSetting + from .azure_data_lake_store_read_setting import AzureDataLakeStoreReadSetting + from .azure_blob_fs_read_setting import AzureBlobFSReadSetting + from .azure_blob_storage_read_setting import AzureBlobStorageReadSetting + from .connector_read_setting import ConnectorReadSetting + from .delimited_text_source import DelimitedTextSource + from .parquet_source import ParquetSource + from .copy_source import CopySource + from .lookup_activity import LookupActivity + from .log_storage_settings import LogStorageSettings + from .delete_activity import DeleteActivity + from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity + from .custom_activity_reference_object import CustomActivityReferenceObject + from .custom_activity import CustomActivity + from .ssis_property_override import SSISPropertyOverride + from .ssis_execution_parameter import SSISExecutionParameter + from .ssis_execution_credential import SSISExecutionCredential + from .ssis_package_location import SSISPackageLocation + from .execute_ssis_package_activity import ExecuteSSISPackageActivity + from .hd_insight_spark_activity import HDInsightSparkActivity + from .hd_insight_streaming_activity import HDInsightStreamingActivity + from .hd_insight_map_reduce_activity import HDInsightMapReduceActivity + from .hd_insight_pig_activity import HDInsightPigActivity + from .hd_insight_hive_activity import HDInsightHiveActivity + from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings + from .staging_settings import StagingSettings + from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink + from .salesforce_sink import SalesforceSink + from .azure_data_explorer_sink import AzureDataExplorerSink + from .dynamics_sink import DynamicsSink + from .odbc_sink import OdbcSink + from .azure_search_index_sink import AzureSearchIndexSink + from .azure_blob_fs_sink import AzureBlobFSSink + from .azure_data_lake_store_sink import AzureDataLakeStoreSink + from .oracle_sink import OracleSink + from .polybase_settings import PolybaseSettings + from .sql_dw_sink import SqlDWSink + from .azure_sql_sink import AzureSqlSink + from .sql_server_sink import SqlServerSink + from .sql_sink import SqlSink + from .document_db_collection_sink import DocumentDbCollectionSink + from .file_system_sink import FileSystemSink + from .blob_sink import BlobSink + from .file_server_write_setting import FileServerWriteSetting + from .azure_data_lake_store_write_setting import AzureDataLakeStoreWriteSetting + from .azure_blob_fs_write_setting import AzureBlobFSWriteSetting + from .azure_blob_storage_write_setting import AzureBlobStorageWriteSetting + from .connector_write_setting import ConnectorWriteSetting + from .parquet_sink import ParquetSink + from .azure_table_sink import AzureTableSink + from .azure_queue_sink import AzureQueueSink + from .sap_cloud_for_customer_sink import SapCloudForCustomerSink + from .format_write_setting import FormatWriteSetting + from .delimited_text_write_setting import DelimitedTextWriteSetting + from .delimited_text_sink import DelimitedTextSink + from .copy_sink import CopySink + from .copy_activity import CopyActivity + from .execution_activity import ExecutionActivity + from .web_hook_activity import WebHookActivity + from .append_variable_activity import AppendVariableActivity + from .set_variable_activity import SetVariableActivity + from .filter_activity import FilterActivity + from .validation_activity import ValidationActivity + from .until_activity import UntilActivity + from .wait_activity import WaitActivity + from .for_each_activity import ForEachActivity + from .if_condition_activity import IfConditionActivity + from .execute_pipeline_activity import ExecutePipelineActivity + from .control_activity import ControlActivity + from .linked_integration_runtime import LinkedIntegrationRuntime + from .self_hosted_integration_runtime_node import SelfHostedIntegrationRuntimeNode + from .self_hosted_integration_runtime_status import SelfHostedIntegrationRuntimeStatus + from .managed_integration_runtime_operation_result import ManagedIntegrationRuntimeOperationResult + from .managed_integration_runtime_error import ManagedIntegrationRuntimeError + from .managed_integration_runtime_node import ManagedIntegrationRuntimeNode + from .managed_integration_runtime_status import ManagedIntegrationRuntimeStatus + from .linked_integration_runtime_rbac_authorization import LinkedIntegrationRuntimeRbacAuthorization + from .linked_integration_runtime_key_authorization import LinkedIntegrationRuntimeKeyAuthorization + from .linked_integration_runtime_type import LinkedIntegrationRuntimeType + from .self_hosted_integration_runtime import SelfHostedIntegrationRuntime + from .entity_reference import EntityReference + from .integration_runtime_data_proxy_properties import IntegrationRuntimeDataProxyProperties + from .integration_runtime_custom_setup_script_properties import IntegrationRuntimeCustomSetupScriptProperties + from .integration_runtime_ssis_catalog_info import IntegrationRuntimeSsisCatalogInfo + from .integration_runtime_ssis_properties import IntegrationRuntimeSsisProperties + from .integration_runtime_vnet_properties import IntegrationRuntimeVNetProperties + from .integration_runtime_compute_properties import IntegrationRuntimeComputeProperties + from .managed_integration_runtime import ManagedIntegrationRuntime + from .integration_runtime_node_ip_address import IntegrationRuntimeNodeIpAddress + from .ssis_variable import SsisVariable + from .ssis_environment import SsisEnvironment + from .ssis_parameter import SsisParameter + from .ssis_package import SsisPackage + from .ssis_environment_reference import SsisEnvironmentReference + from .ssis_project import SsisProject + from .ssis_folder import SsisFolder + from .ssis_object_metadata import SsisObjectMetadata + from .ssis_object_metadata_list_response import SsisObjectMetadataListResponse + from .integration_runtime_node_monitoring_data import IntegrationRuntimeNodeMonitoringData + from .integration_runtime_monitoring_data import IntegrationRuntimeMonitoringData + from .integration_runtime_auth_keys import IntegrationRuntimeAuthKeys + from .integration_runtime_regenerate_key_parameters import IntegrationRuntimeRegenerateKeyParameters + from .integration_runtime_connection_info import IntegrationRuntimeConnectionInfo +from .operation_paged import OperationPaged +from .factory_paged import FactoryPaged +from .integration_runtime_resource_paged import IntegrationRuntimeResourcePaged +from .linked_service_resource_paged import LinkedServiceResourcePaged +from .dataset_resource_paged import DatasetResourcePaged +from .pipeline_resource_paged import PipelineResourcePaged +from .trigger_resource_paged import TriggerResourcePaged +from .rerun_trigger_resource_paged import RerunTriggerResourcePaged +from .data_factory_management_client_enums import ( + IntegrationRuntimeState, + IntegrationRuntimeAutoUpdate, + ParameterType, + DependencyCondition, + VariableType, + TriggerRuntimeState, + RunQueryFilterOperand, + RunQueryFilterOperator, + RunQueryOrderByField, + RunQueryOrder, + TriggerRunStatus, + TumblingWindowFrequency, + BlobEventTypes, + DayOfWeek, + DaysOfWeek, + RecurrenceFrequency, + GoogleAdWordsAuthenticationType, + SparkServerType, + SparkThriftTransportProtocol, + SparkAuthenticationType, + ServiceNowAuthenticationType, + PrestoAuthenticationType, + PhoenixAuthenticationType, + ImpalaAuthenticationType, + HiveServerType, + HiveThriftTransportProtocol, + HiveAuthenticationType, + HBaseAuthenticationType, + GoogleBigQueryAuthenticationType, + SapHanaAuthenticationType, + SftpAuthenticationType, + FtpAuthenticationType, + HttpAuthenticationType, + RestServiceAuthenticationType, + MongoDbAuthenticationType, + ODataAuthenticationType, + ODataAadServicePrincipalCredentialType, + TeradataAuthenticationType, + Db2AuthenticationType, + SybaseAuthenticationType, + AzureFunctionActivityMethod, + WebActivityMethod, + StoredProcedureParameterType, + HDInsightActivityDebugInfoOption, + PolybaseSettingsRejectType, + WebHookActivityMethod, + IntegrationRuntimeType, + SelfHostedIntegrationRuntimeNodeStatus, + IntegrationRuntimeUpdateResult, + IntegrationRuntimeInternalChannelEncryptionMode, + ManagedIntegrationRuntimeNodeStatus, + IntegrationRuntimeEntityReferenceType, + IntegrationRuntimeSsisCatalogPricingTier, + IntegrationRuntimeLicenseType, + IntegrationRuntimeEdition, + SsisObjectMetadataType, + IntegrationRuntimeAuthKeyName, +) + +__all__ = [ + 'Resource', + 'SubResource', + 'Expression', + 'SecureString', + 'LinkedServiceReference', + 'AzureKeyVaultSecretReference', + 'SecretBase', + 'FactoryIdentity', + 'FactoryRepoConfiguration', + 'Factory', + 'IntegrationRuntime', + 'IntegrationRuntimeResource', + 'IntegrationRuntimeReference', + 'IntegrationRuntimeStatus', + 'IntegrationRuntimeStatusResponse', + 'IntegrationRuntimeStatusListResponse', + 'UpdateIntegrationRuntimeRequest', + 'UpdateIntegrationRuntimeNodeRequest', + 'LinkedIntegrationRuntimeRequest', + 'CreateLinkedIntegrationRuntimeRequest', + 'ParameterSpecification', + 'LinkedService', + 'LinkedServiceResource', + 'DatasetFolder', + 'Dataset', + 'DatasetResource', + 'ActivityDependency', + 'UserProperty', + 'Activity', + 'VariableSpecification', + 'PipelineFolder', + 'PipelineResource', + 'Trigger', + 'TriggerResource', + 'CreateRunResponse', + 'FactoryVSTSConfiguration', + 'FactoryGitHubConfiguration', + 'FactoryRepoUpdate', + 'GitHubAccessTokenRequest', + 'GitHubAccessTokenResponse', + 'UserAccessPolicy', + 'AccessPolicyResponse', + 'PipelineReference', + 'TriggerPipelineReference', + 'FactoryUpdateParameters', + 'DatasetReference', + 'RunQueryFilter', + 'RunQueryOrderBy', + 'RunFilterParameters', + 'PipelineRunInvokedBy', + 'PipelineRun', + 'PipelineRunsQueryResponse', + 'ActivityRun', + 'ActivityRunsQueryResponse', + 'TriggerRun', + 'TriggerRunsQueryResponse', + 'RerunTumblingWindowTriggerActionParameters', + 'RerunTumblingWindowTrigger', + 'RerunTriggerResource', + 'OperationDisplay', + 'OperationLogSpecification', + 'OperationMetricAvailability', + 'OperationMetricDimension', + 'OperationMetricSpecification', + 'OperationServiceSpecification', + 'Operation', + 'GetSsisObjectMetadataRequest', + 'SsisObjectMetadataStatusResponse', + 'ExposureControlRequest', + 'ExposureControlResponse', + 'SelfDependencyTumblingWindowTriggerReference', + 'TriggerReference', + 'TumblingWindowTriggerDependencyReference', + 'TriggerDependencyReference', + 'DependencyReference', + 'RetryPolicy', + 'TumblingWindowTrigger', + 'BlobEventsTrigger', + 'BlobTrigger', + 'RecurrenceScheduleOccurrence', + 'RecurrenceSchedule', + 'ScheduleTriggerRecurrence', + 'ScheduleTrigger', + 'MultiplePipelineTrigger', + 'AzureFunctionLinkedService', + 'AzureDataExplorerLinkedService', + 'GoogleAdWordsLinkedService', + 'OracleServiceCloudLinkedService', + 'DynamicsAXLinkedService', + 'ResponsysLinkedService', + 'AzureDatabricksLinkedService', + 'AzureDataLakeAnalyticsLinkedService', + 'ScriptAction', + 'HDInsightOnDemandLinkedService', + 'SalesforceMarketingCloudLinkedService', + 'NetezzaLinkedService', + 'VerticaLinkedService', + 'ZohoLinkedService', + 'XeroLinkedService', + 'SquareLinkedService', + 'SparkLinkedService', + 'ShopifyLinkedService', + 'ServiceNowLinkedService', + 'QuickBooksLinkedService', + 'PrestoLinkedService', + 'PhoenixLinkedService', + 'PaypalLinkedService', + 'MarketoLinkedService', + 'MariaDBLinkedService', + 'MagentoLinkedService', + 'JiraLinkedService', + 'ImpalaLinkedService', + 'HubspotLinkedService', + 'HiveLinkedService', + 'HBaseLinkedService', + 'GreenplumLinkedService', + 'GoogleBigQueryLinkedService', + 'EloquaLinkedService', + 'DrillLinkedService', + 'CouchbaseLinkedService', + 'ConcurLinkedService', + 'AzurePostgreSqlLinkedService', + 'AmazonMWSLinkedService', + 'SapHanaLinkedService', + 'SapBWLinkedService', + 'SftpServerLinkedService', + 'FtpServerLinkedService', + 'HttpLinkedService', + 'AzureSearchLinkedService', + 'CustomDataSourceLinkedService', + 'AmazonRedshiftLinkedService', + 'AmazonS3LinkedService', + 'RestServiceLinkedService', + 'SapOpenHubLinkedService', + 'SapEccLinkedService', + 'SapCloudForCustomerLinkedService', + 'SalesforceLinkedService', + 'Office365LinkedService', + 'AzureBlobFSLinkedService', + 'AzureDataLakeStoreLinkedService', + 'CosmosDbMongoDbApiLinkedService', + 'MongoDbV2LinkedService', + 'MongoDbLinkedService', + 'CassandraLinkedService', + 'WebClientCertificateAuthentication', + 'WebBasicAuthentication', + 'WebAnonymousAuthentication', + 'WebLinkedServiceTypeProperties', + 'WebLinkedService', + 'ODataLinkedService', + 'HdfsLinkedService', + 'OdbcLinkedService', + 'AzureMLLinkedService', + 'TeradataLinkedService', + 'Db2LinkedService', + 'SybaseLinkedService', + 'PostgreSqlLinkedService', + 'MySqlLinkedService', + 'AzureMySqlLinkedService', + 'OracleLinkedService', + 'FileServerLinkedService', + 'HDInsightLinkedService', + 'DynamicsLinkedService', + 'CosmosDbLinkedService', + 'AzureKeyVaultLinkedService', + 'AzureBatchLinkedService', + 'AzureSqlDatabaseLinkedService', + 'SqlServerLinkedService', + 'AzureSqlDWLinkedService', + 'AzureTableStorageLinkedService', + 'AzureBlobStorageLinkedService', + 'AzureStorageLinkedService', + 'GoogleAdWordsObjectDataset', + 'AzureDataExplorerTableDataset', + 'OracleServiceCloudObjectDataset', + 'DynamicsAXResourceDataset', + 'ResponsysObjectDataset', + 'SalesforceMarketingCloudObjectDataset', + 'VerticaTableDataset', + 'NetezzaTableDataset', + 'ZohoObjectDataset', + 'XeroObjectDataset', + 'SquareObjectDataset', + 'SparkObjectDataset', + 'ShopifyObjectDataset', + 'ServiceNowObjectDataset', + 'QuickBooksObjectDataset', + 'PrestoObjectDataset', + 'PhoenixObjectDataset', + 'PaypalObjectDataset', + 'MarketoObjectDataset', + 'MariaDBTableDataset', + 'MagentoObjectDataset', + 'JiraObjectDataset', + 'ImpalaObjectDataset', + 'HubspotObjectDataset', + 'HiveObjectDataset', + 'HBaseObjectDataset', + 'GreenplumTableDataset', + 'GoogleBigQueryObjectDataset', + 'EloquaObjectDataset', + 'DrillTableDataset', + 'CouchbaseTableDataset', + 'ConcurObjectDataset', + 'AzurePostgreSqlTableDataset', + 'AmazonMWSObjectDataset', + 'DatasetZipDeflateCompression', + 'DatasetDeflateCompression', + 'DatasetGZipCompression', + 'DatasetBZip2Compression', + 'DatasetCompression', + 'ParquetFormat', + 'OrcFormat', + 'AvroFormat', + 'JsonFormat', + 'TextFormat', + 'DatasetStorageFormat', + 'HttpDataset', + 'AzureSearchIndexDataset', + 'WebTableDataset', + 'RestResourceDataset', + 'SqlServerTableDataset', + 'SapOpenHubTableDataset', + 'SapEccResourceDataset', + 'SapCloudForCustomerResourceDataset', + 'SalesforceObjectDataset', + 'RelationalTableDataset', + 'AzureMySqlTableDataset', + 'OracleTableDataset', + 'ODataResourceDataset', + 'CosmosDbMongoDbApiCollectionDataset', + 'MongoDbV2CollectionDataset', + 'MongoDbCollectionDataset', + 'FileShareDataset', + 'Office365Dataset', + 'AzureBlobFSDataset', + 'AzureDataLakeStoreDataset', + 'DynamicsEntityDataset', + 'DocumentDbCollectionDataset', + 'CustomDataset', + 'CassandraTableDataset', + 'AzureSqlDWTableDataset', + 'AzureSqlTableDataset', + 'AzureTableDataset', + 'AzureBlobDataset', + 'HdfsLocation', + 'HttpServerLocation', + 'SftpLocation', + 'FtpServerLocation', + 'FileServerLocation', + 'AmazonS3Location', + 'AzureDataLakeStoreLocation', + 'AzureBlobFSLocation', + 'AzureBlobStorageLocation', + 'DatasetLocation', + 'DelimitedTextDataset', + 'ParquetDataset', + 'AmazonS3Dataset', + 'ActivityPolicy', + 'AzureFunctionActivity', + 'DatabricksSparkPythonActivity', + 'DatabricksSparkJarActivity', + 'DatabricksNotebookActivity', + 'DataLakeAnalyticsUSQLActivity', + 'AzureMLUpdateResourceActivity', + 'AzureMLWebServiceFile', + 'AzureMLBatchExecutionActivity', + 'GetMetadataActivity', + 'WebActivityAuthentication', + 'WebActivity', + 'RedshiftUnloadSettings', + 'AmazonRedshiftSource', + 'GoogleAdWordsSource', + 'OracleServiceCloudSource', + 'DynamicsAXSource', + 'ResponsysSource', + 'SalesforceMarketingCloudSource', + 'VerticaSource', + 'NetezzaSource', + 'ZohoSource', + 'XeroSource', + 'SquareSource', + 'SparkSource', + 'ShopifySource', + 'ServiceNowSource', + 'QuickBooksSource', + 'PrestoSource', + 'PhoenixSource', + 'PaypalSource', + 'MarketoSource', + 'MariaDBSource', + 'MagentoSource', + 'JiraSource', + 'ImpalaSource', + 'HubspotSource', + 'HiveSource', + 'HBaseSource', + 'GreenplumSource', + 'GoogleBigQuerySource', + 'EloquaSource', + 'DrillSource', + 'CouchbaseSource', + 'ConcurSource', + 'AzurePostgreSqlSource', + 'AmazonMWSSource', + 'HttpSource', + 'AzureBlobFSSource', + 'AzureDataLakeStoreSource', + 'Office365Source', + 'MongoDbCursorMethodsProperties', + 'CosmosDbMongoDbApiSource', + 'MongoDbV2Source', + 'MongoDbSource', + 'CassandraSource', + 'WebSource', + 'OraclePartitionSettings', + 'OracleSource', + 'AzureDataExplorerSource', + 'AzureMySqlSource', + 'DistcpSettings', + 'HdfsSource', + 'FileSystemSource', + 'SqlDWSource', + 'StoredProcedureParameter', + 'AzureSqlSource', + 'SqlServerSource', + 'SqlSource', + 'RestSource', + 'SapOpenHubSource', + 'SapEccSource', + 'SapCloudForCustomerSource', + 'SalesforceSource', + 'RelationalSource', + 'DynamicsSource', + 'DocumentDbCollectionSource', + 'BlobSource', + 'AzureTableSource', + 'FormatReadSetting', + 'DelimitedTextReadSetting', + 'HdfsReadSetting', + 'HttpReadSetting', + 'SftpReadSetting', + 'FtpReadSetting', + 'FileServerReadSetting', + 'AmazonS3ReadSetting', + 'AzureDataLakeStoreReadSetting', + 'AzureBlobFSReadSetting', + 'AzureBlobStorageReadSetting', + 'ConnectorReadSetting', + 'DelimitedTextSource', + 'ParquetSource', + 'CopySource', + 'LookupActivity', + 'LogStorageSettings', + 'DeleteActivity', + 'SqlServerStoredProcedureActivity', + 'CustomActivityReferenceObject', + 'CustomActivity', + 'SSISPropertyOverride', + 'SSISExecutionParameter', + 'SSISExecutionCredential', + 'SSISPackageLocation', + 'ExecuteSSISPackageActivity', + 'HDInsightSparkActivity', + 'HDInsightStreamingActivity', + 'HDInsightMapReduceActivity', + 'HDInsightPigActivity', + 'HDInsightHiveActivity', + 'RedirectIncompatibleRowSettings', + 'StagingSettings', + 'CosmosDbMongoDbApiSink', + 'SalesforceSink', + 'AzureDataExplorerSink', + 'DynamicsSink', + 'OdbcSink', + 'AzureSearchIndexSink', + 'AzureBlobFSSink', + 'AzureDataLakeStoreSink', + 'OracleSink', + 'PolybaseSettings', + 'SqlDWSink', + 'AzureSqlSink', + 'SqlServerSink', + 'SqlSink', + 'DocumentDbCollectionSink', + 'FileSystemSink', + 'BlobSink', + 'FileServerWriteSetting', + 'AzureDataLakeStoreWriteSetting', + 'AzureBlobFSWriteSetting', + 'AzureBlobStorageWriteSetting', + 'ConnectorWriteSetting', + 'ParquetSink', + 'AzureTableSink', + 'AzureQueueSink', + 'SapCloudForCustomerSink', + 'FormatWriteSetting', + 'DelimitedTextWriteSetting', + 'DelimitedTextSink', + 'CopySink', + 'CopyActivity', + 'ExecutionActivity', + 'WebHookActivity', + 'AppendVariableActivity', + 'SetVariableActivity', + 'FilterActivity', + 'ValidationActivity', + 'UntilActivity', + 'WaitActivity', + 'ForEachActivity', + 'IfConditionActivity', + 'ExecutePipelineActivity', + 'ControlActivity', + 'LinkedIntegrationRuntime', + 'SelfHostedIntegrationRuntimeNode', + 'SelfHostedIntegrationRuntimeStatus', + 'ManagedIntegrationRuntimeOperationResult', + 'ManagedIntegrationRuntimeError', + 'ManagedIntegrationRuntimeNode', + 'ManagedIntegrationRuntimeStatus', + 'LinkedIntegrationRuntimeRbacAuthorization', + 'LinkedIntegrationRuntimeKeyAuthorization', + 'LinkedIntegrationRuntimeType', + 'SelfHostedIntegrationRuntime', + 'EntityReference', + 'IntegrationRuntimeDataProxyProperties', + 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeSsisCatalogInfo', + 'IntegrationRuntimeSsisProperties', + 'IntegrationRuntimeVNetProperties', + 'IntegrationRuntimeComputeProperties', + 'ManagedIntegrationRuntime', + 'IntegrationRuntimeNodeIpAddress', + 'SsisVariable', + 'SsisEnvironment', + 'SsisParameter', + 'SsisPackage', + 'SsisEnvironmentReference', + 'SsisProject', + 'SsisFolder', + 'SsisObjectMetadata', + 'SsisObjectMetadataListResponse', + 'IntegrationRuntimeNodeMonitoringData', + 'IntegrationRuntimeMonitoringData', + 'IntegrationRuntimeAuthKeys', + 'IntegrationRuntimeRegenerateKeyParameters', + 'IntegrationRuntimeConnectionInfo', + 'OperationPaged', + 'FactoryPaged', + 'IntegrationRuntimeResourcePaged', + 'LinkedServiceResourcePaged', + 'DatasetResourcePaged', + 'PipelineResourcePaged', + 'TriggerResourcePaged', + 'RerunTriggerResourcePaged', + 'IntegrationRuntimeState', + 'IntegrationRuntimeAutoUpdate', + 'ParameterType', + 'DependencyCondition', + 'VariableType', + 'TriggerRuntimeState', + 'RunQueryFilterOperand', + 'RunQueryFilterOperator', + 'RunQueryOrderByField', + 'RunQueryOrder', + 'TriggerRunStatus', + 'TumblingWindowFrequency', + 'BlobEventTypes', + 'DayOfWeek', + 'DaysOfWeek', + 'RecurrenceFrequency', + 'GoogleAdWordsAuthenticationType', + 'SparkServerType', + 'SparkThriftTransportProtocol', + 'SparkAuthenticationType', + 'ServiceNowAuthenticationType', + 'PrestoAuthenticationType', + 'PhoenixAuthenticationType', + 'ImpalaAuthenticationType', + 'HiveServerType', + 'HiveThriftTransportProtocol', + 'HiveAuthenticationType', + 'HBaseAuthenticationType', + 'GoogleBigQueryAuthenticationType', + 'SapHanaAuthenticationType', + 'SftpAuthenticationType', + 'FtpAuthenticationType', + 'HttpAuthenticationType', + 'RestServiceAuthenticationType', + 'MongoDbAuthenticationType', + 'ODataAuthenticationType', + 'ODataAadServicePrincipalCredentialType', + 'TeradataAuthenticationType', + 'Db2AuthenticationType', + 'SybaseAuthenticationType', + 'AzureFunctionActivityMethod', + 'WebActivityMethod', + 'StoredProcedureParameterType', + 'HDInsightActivityDebugInfoOption', + 'PolybaseSettingsRejectType', + 'WebHookActivityMethod', + 'IntegrationRuntimeType', + 'SelfHostedIntegrationRuntimeNodeStatus', + 'IntegrationRuntimeUpdateResult', + 'IntegrationRuntimeInternalChannelEncryptionMode', + 'ManagedIntegrationRuntimeNodeStatus', + 'IntegrationRuntimeEntityReferenceType', + 'IntegrationRuntimeSsisCatalogPricingTier', + 'IntegrationRuntimeLicenseType', + 'IntegrationRuntimeEdition', + 'SsisObjectMetadataType', + 'IntegrationRuntimeAuthKeyName', +] diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py new file mode 100644 index 000000000000..033d0fd9591f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AccessPolicyResponse(Model): + """Get Data Plane read only token response definition. + + :param policy: The user access policy. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param access_token: Data Plane read only access token. + :type access_token: str + :param data_plane_url: Data Plane service base URL. + :type data_plane_url: str + """ + + _attribute_map = { + 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = kwargs.get('policy', None) + self.access_token = kwargs.get('access_token', None) + self.data_plane_url = kwargs.get('data_plane_url', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py new file mode 100644 index 000000000000..2932f547ff26 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/access_policy_response_py3.py @@ -0,0 +1,36 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AccessPolicyResponse(Model): + """Get Data Plane read only token response definition. + + :param policy: The user access policy. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param access_token: Data Plane read only access token. + :type access_token: str + :param data_plane_url: Data Plane service base URL. + :type data_plane_url: str + """ + + _attribute_map = { + 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, + } + + def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: + super(AccessPolicyResponse, self).__init__(**kwargs) + self.policy = policy + self.access_token = access_token + self.data_plane_url = data_plane_url diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py new file mode 100644 index 000000000000..72d920f1d04c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Activity(Model): + """A pipeline activity. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ExecutionActivity, ControlActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} + } + + def __init__(self, **kwargs): + super(Activity, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.depends_on = kwargs.get('depends_on', None) + self.user_properties = kwargs.get('user_properties', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py new file mode 100644 index 000000000000..a15b34acc24f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityDependency(Model): + """Activity dependency information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param activity: Required. Activity name. + :type activity: str + :param dependency_conditions: Required. Match-Condition for the + dependency. + :type dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ + + _validation = { + 'activity': {'required': True}, + 'dependency_conditions': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'activity': {'key': 'activity', 'type': 'str'}, + 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.activity = kwargs.get('activity', None) + self.dependency_conditions = kwargs.get('dependency_conditions', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py new file mode 100644 index 000000000000..2883a81a0adc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_dependency_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityDependency(Model): + """Activity dependency information. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param activity: Required. Activity name. + :type activity: str + :param dependency_conditions: Required. Match-Condition for the + dependency. + :type dependency_conditions: list[str or + ~azure.mgmt.datafactory.models.DependencyCondition] + """ + + _validation = { + 'activity': {'required': True}, + 'dependency_conditions': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'activity': {'key': 'activity', 'type': 'str'}, + 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, + } + + def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: + super(ActivityDependency, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.activity = activity + self.dependency_conditions = dependency_conditions diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py new file mode 100644 index 000000000000..4475cdbd9bea --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityPolicy(Model): + """Execution policy for an activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param timeout: Specifies the timeout for the activity to run. The default + timeout is 7 days. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type retry: object + :param retry_interval_in_seconds: Interval between each retry attempt (in + seconds). The default is 30 sec. + :type retry_interval_in_seconds: int + :param secure_input: When set to true, Input from activity is considered + as secure and will not be logged to monitoring. + :type secure_input: bool + :param secure_output: When set to true, Output from activity is considered + as secure and will not be logged to monitoring. + :type secure_output: bool + """ + + _validation = { + 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'timeout': {'key': 'timeout', 'type': 'object'}, + 'retry': {'key': 'retry', 'type': 'object'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, + 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.timeout = kwargs.get('timeout', None) + self.retry = kwargs.get('retry', None) + self.retry_interval_in_seconds = kwargs.get('retry_interval_in_seconds', None) + self.secure_input = kwargs.get('secure_input', None) + self.secure_output = kwargs.get('secure_output', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py new file mode 100644 index 000000000000..52d469679974 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_policy_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityPolicy(Model): + """Execution policy for an activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param timeout: Specifies the timeout for the activity to run. The default + timeout is 7 days. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type retry: object + :param retry_interval_in_seconds: Interval between each retry attempt (in + seconds). The default is 30 sec. + :type retry_interval_in_seconds: int + :param secure_input: When set to true, Input from activity is considered + as secure and will not be logged to monitoring. + :type secure_input: bool + :param secure_output: When set to true, Output from activity is considered + as secure and will not be logged to monitoring. + :type secure_output: bool + """ + + _validation = { + 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'timeout': {'key': 'timeout', 'type': 'object'}, + 'retry': {'key': 'retry', 'type': 'object'}, + 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, + 'secure_input': {'key': 'secureInput', 'type': 'bool'}, + 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: + super(ActivityPolicy, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.timeout = timeout + self.retry = retry + self.retry_interval_in_seconds = retry_interval_in_seconds + self.secure_input = secure_input + self.secure_output = secure_output diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py new file mode 100644 index 000000000000..b5997c9352e1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Activity(Model): + """A pipeline activity. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ExecutionActivity, ControlActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(Activity, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.description = description + self.depends_on = depends_on + self.user_properties = user_properties + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py new file mode 100644 index 000000000000..901ffe23cd4e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRun(Model): + """Information about an activity run in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar pipeline_name: The name of the pipeline. + :vartype pipeline_name: str + :ivar pipeline_run_id: The id of the pipeline run. + :vartype pipeline_run_id: str + :ivar activity_name: The name of the activity. + :vartype activity_name: str + :ivar activity_type: The type of the activity. + :vartype activity_type: str + :ivar activity_run_id: The id of the activity run. + :vartype activity_run_id: str + :ivar linked_service_name: The name of the compute linked service. + :vartype linked_service_name: str + :ivar status: The status of the activity run. + :vartype status: str + :ivar activity_run_start: The start time of the activity run in 'ISO 8601' + format. + :vartype activity_run_start: datetime + :ivar activity_run_end: The end time of the activity run in 'ISO 8601' + format. + :vartype activity_run_end: datetime + :ivar duration_in_ms: The duration of the activity run. + :vartype duration_in_ms: int + :ivar input: The input for the activity. + :vartype input: object + :ivar output: The output for the activity. + :vartype output: object + :ivar error: The error if any from the activity run. + :vartype error: object + """ + + _validation = { + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, + 'activity_name': {'readonly': True}, + 'activity_type': {'readonly': True}, + 'activity_run_id': {'readonly': True}, + 'linked_service_name': {'readonly': True}, + 'status': {'readonly': True}, + 'activity_run_start': {'readonly': True}, + 'activity_run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'input': {'readonly': True}, + 'output': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + 'activity_name': {'key': 'activityName', 'type': 'str'}, + 'activity_type': {'key': 'activityType', 'type': 'str'}, + 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, + 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'input': {'key': 'input', 'type': 'object'}, + 'output': {'key': 'output', 'type': 'object'}, + 'error': {'key': 'error', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.pipeline_name = None + self.pipeline_run_id = None + self.activity_name = None + self.activity_type = None + self.activity_run_id = None + self.linked_service_name = None + self.status = None + self.activity_run_start = None + self.activity_run_end = None + self.duration_in_ms = None + self.input = None + self.output = None + self.error = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py new file mode 100644 index 000000000000..488e822de957 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_run_py3.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRun(Model): + """Information about an activity run in a pipeline. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar pipeline_name: The name of the pipeline. + :vartype pipeline_name: str + :ivar pipeline_run_id: The id of the pipeline run. + :vartype pipeline_run_id: str + :ivar activity_name: The name of the activity. + :vartype activity_name: str + :ivar activity_type: The type of the activity. + :vartype activity_type: str + :ivar activity_run_id: The id of the activity run. + :vartype activity_run_id: str + :ivar linked_service_name: The name of the compute linked service. + :vartype linked_service_name: str + :ivar status: The status of the activity run. + :vartype status: str + :ivar activity_run_start: The start time of the activity run in 'ISO 8601' + format. + :vartype activity_run_start: datetime + :ivar activity_run_end: The end time of the activity run in 'ISO 8601' + format. + :vartype activity_run_end: datetime + :ivar duration_in_ms: The duration of the activity run. + :vartype duration_in_ms: int + :ivar input: The input for the activity. + :vartype input: object + :ivar output: The output for the activity. + :vartype output: object + :ivar error: The error if any from the activity run. + :vartype error: object + """ + + _validation = { + 'pipeline_name': {'readonly': True}, + 'pipeline_run_id': {'readonly': True}, + 'activity_name': {'readonly': True}, + 'activity_type': {'readonly': True}, + 'activity_run_id': {'readonly': True}, + 'linked_service_name': {'readonly': True}, + 'status': {'readonly': True}, + 'activity_run_start': {'readonly': True}, + 'activity_run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'input': {'readonly': True}, + 'output': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, + 'activity_name': {'key': 'activityName', 'type': 'str'}, + 'activity_type': {'key': 'activityType', 'type': 'str'}, + 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, + 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'input': {'key': 'input', 'type': 'object'}, + 'output': {'key': 'output', 'type': 'object'}, + 'error': {'key': 'error', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ActivityRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.pipeline_name = None + self.pipeline_run_id = None + self.activity_name = None + self.activity_type = None + self.activity_run_id = None + self.linked_service_name = None + self.status = None + self.activity_run_start = None + self.activity_run_end = None + self.duration_in_ms = None + self.input = None + self.output = None + self.error = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py new file mode 100644 index 000000000000..2fcd25a5ced2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRunsQueryResponse(Model): + """A list activity runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ActivityRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py new file mode 100644 index 000000000000..ee3eae141635 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/activity_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ActivityRunsQueryResponse(Model): + """A list activity runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of activity runs. + :type value: list[~azure.mgmt.datafactory.models.ActivityRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ActivityRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(ActivityRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py new file mode 100644 index 000000000000..b1e5ed533bba --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AmazonMWSLinkedService(LinkedService): + """Amazon Marketplace Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com) + :type endpoint: object + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :type marketplace_id: object + :param seller_id: Required. The Amazon seller ID. + :type seller_id: object + :param mws_auth_token: The Amazon MWS authentication token. + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_key_id: Required. The access key id used to access data. + :type access_key_id: object + :param secret_key: The secret key used to access data. + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'marketplace_id': {'required': True}, + 'seller_id': {'required': True}, + 'access_key_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, + 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, + 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.marketplace_id = kwargs.get('marketplace_id', None) + self.seller_id = kwargs.get('seller_id', None) + self.mws_auth_token = kwargs.get('mws_auth_token', None) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_key = kwargs.get('secret_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonMWS' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py new file mode 100644 index 000000000000..a8db63933154 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_linked_service_py3.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonMWSLinkedService(LinkedService): + """Amazon Marketplace Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. + mws.amazonservices.com) + :type endpoint: object + :param marketplace_id: Required. The Amazon Marketplace ID you want to + retrieve data from. To retrieve data from multiple Marketplace IDs, + separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + :type marketplace_id: object + :param seller_id: Required. The Amazon seller ID. + :type seller_id: object + :param mws_auth_token: The Amazon MWS authentication token. + :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_key_id: Required. The access key id used to access data. + :type access_key_id: object + :param secret_key: The secret key used to access data. + :type secret_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'marketplace_id': {'required': True}, + 'seller_id': {'required': True}, + 'access_key_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, + 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, + 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.marketplace_id = marketplace_id + self.seller_id = seller_id + self.mws_auth_token = mws_auth_token + self.access_key_id = access_key_id + self.secret_key = secret_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'AmazonMWS' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py new file mode 100644 index 000000000000..9885f5c77d8c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AmazonMWSObjectDataset(Dataset): + """Amazon Marketplace Web Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AmazonMWSObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py new file mode 100644 index 000000000000..015ed9401c15 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonMWSObjectDataset(Dataset): + """Amazon Marketplace Web Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AmazonMWSObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py new file mode 100644 index 000000000000..f9d034e610d4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AmazonMWSSource(CopySource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonMWSSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AmazonMWSSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py new file mode 100644 index 000000000000..9ef7f5b30244 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_mws_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AmazonMWSSource(CopySource): + """A copy activity Amazon Marketplace Web Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AmazonMWSSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py new file mode 100644 index 000000000000..4272b28c13f5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AmazonRedshiftLinkedService(LinkedService): + """Linked service for Amazon Redshift. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). + :type server: object + :param username: The username of the Amazon Redshift source. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password of the Amazon Redshift source. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). + :type database: object + :param port: The TCP port number that the Amazon Redshift server uses to + listen for client connections. The default value is 5439. Type: integer + (or Expression with resultType integer). + :type port: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.database = kwargs.get('database', None) + self.port = kwargs.get('port', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonRedshift' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py new file mode 100644 index 000000000000..3b84583c6c86 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonRedshiftLinkedService(LinkedService): + """Linked service for Amazon Redshift. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The name of the Amazon Redshift server. Type: + string (or Expression with resultType string). + :type server: object + :param username: The username of the Amazon Redshift source. Type: string + (or Expression with resultType string). + :type username: object + :param password: The password of the Amazon Redshift source. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. The database name of the Amazon Redshift + source. Type: string (or Expression with resultType string). + :type database: object + :param port: The TCP port number that the Amazon Redshift server uses to + listen for client connections. The default value is 5439. Type: integer + (or Expression with resultType integer). + :type port: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.username = username + self.password = password + self.database = database + self.port = port + self.encrypted_credential = encrypted_credential + self.type = 'AmazonRedshift' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py new file mode 100644 index 000000000000..d4fdfa4aa2ba --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AmazonRedshiftSource(CopySource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the + interim Amazon S3 when copying from Amazon Redshift with unload. With + this, data from Amazon Redshift source will be unloaded into S3 first and + then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: + ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__(self, **kwargs): + super(AmazonRedshiftSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.redshift_unload_settings = kwargs.get('redshift_unload_settings', None) + self.type = 'AmazonRedshiftSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py new file mode 100644 index 000000000000..9b34b2ef5b97 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_redshift_source_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AmazonRedshiftSource(CopySource): + """A copy activity source for Amazon Redshift Source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param redshift_unload_settings: The Amazon S3 settings needed for the + interim Amazon S3 when copying from Amazon Redshift with unload. With + this, data from Amazon Redshift source will be unloaded into S3 first and + then copied into the targeted sink from the interim S3. + :type redshift_unload_settings: + ~azure.mgmt.datafactory.models.RedshiftUnloadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.redshift_unload_settings = redshift_unload_settings + self.type = 'AmazonRedshiftSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py new file mode 100644 index 000000000000..e91a5ba26131 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression + with resultType string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression + with resultType string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 + object. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Dataset, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.key = kwargs.get('key', None) + self.prefix = kwargs.get('prefix', None) + self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AmazonS3Object' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py new file mode 100644 index 000000000000..d84ae48b2a46 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_dataset_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: + string (or Expression with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression + with resultType string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression + with resultType string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 + object. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.bucket_name = bucket_name + self.key = key + self.prefix = prefix + self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AmazonS3Object' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py new file mode 100644 index 000000000000..250518c1a7ec --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AmazonS3LinkedService(LinkedService): + """Linked service for Amazon S3. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 Identity + and Access Management (IAM) user. Type: string (or Expression with + resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Identity + and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3LinkedService, self).__init__(**kwargs) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonS3' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py new file mode 100644 index 000000000000..8d136bb71fc0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_linked_service_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AmazonS3LinkedService(LinkedService): + """Linked service for Amazon S3. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 Identity + and Access Management (IAM) user. Type: string (or Expression with + resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 Identity + and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + S3 Connector. This is an optional property; change it only if you want to + try a different service endpoint or want to switch between https and http. + Type: string (or Expression with resultType string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + self.type = 'AmazonS3' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py new file mode 100644 index 000000000000..74c77a16f0f2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3Location, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py new file mode 100644 index 000000000000..36afce341ada --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_location_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AmazonS3Location(DatasetLocation): + """The location of amazon S3 dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of amazon S3. Type: string (or + Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of amazon S3. Type: string (or + Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3Location, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py new file mode 100644 index 000000000000..4de7e0ebb7b9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3ReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py new file mode 100644 index 000000000000..deda331ea561 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/amazon_s3_read_setting_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AmazonS3ReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 object name. Type: string (or + Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3ReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py new file mode 100644 index 000000000000..36a25e959061 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class AppendVariableActivity(ControlActivity): + """Append value for a Variable of type Array. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be + appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AppendVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) + self.type = 'AppendVariable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py new file mode 100644 index 000000000000..4526a6e4a45e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/append_variable_activity_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class AppendVariableActivity(ControlActivity): + """Append value for a Variable of type Array. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be + appended to. + :type variable_name: str + :param value: Value to be appended. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'AppendVariable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py new file mode 100644 index 000000000000..f0346a76080c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AvroFormat, self).__init__(**kwargs) + self.type = 'AvroFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py new file mode 100644 index 000000000000..35d459c4b2a6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/avro_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class AvroFormat(DatasetStorageFormat): + """The data stored in Avro format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'AvroFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py new file mode 100644 index 000000000000..986023308e23 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). + :type account_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.mgmt.datafactory.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBatchLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.access_key = kwargs.get('access_key', None) + self.batch_uri = kwargs.get('batch_uri', None) + self.pool_name = kwargs.get('pool_name', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBatch' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py new file mode 100644 index 000000000000..e7d33dfb342a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_batch_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBatchLinkedService(LinkedService): + """Azure Batch linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Batch account name. Type: string + (or Expression with resultType string). + :type account_name: object + :param access_key: The Azure Batch account access key. + :type access_key: ~azure.mgmt.datafactory.models.SecretBase + :param batch_uri: Required. The Azure Batch URI. Type: string (or + Expression with resultType string). + :type batch_uri: object + :param pool_name: Required. The Azure Batch pool name. Type: string (or + Expression with resultType string). + :type pool_name: object + :param linked_service_name: Required. The Azure Storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'batch_uri': {'required': True}, + 'pool_name': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, + 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, + 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.access_key = access_key + self.batch_uri = batch_uri + self.pool_name = pool_name + self.linked_service_name = linked_service_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureBatch' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py new file mode 100644 index 000000000000..01814cf8f9a9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Blob storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or + Expression with resultType string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression + with resultType string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.table_root_location = kwargs.get('table_root_location', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlob' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py new file mode 100644 index 000000000000..706c39deb289 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_dataset_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Blob storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or + Expression with resultType string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression + with resultType string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified + datetime. Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.table_root_location = table_root_location + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + self.type = 'AzureBlob' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py new file mode 100644 index 000000000000..0ef62ff7122f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureBlobFSFile' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py new file mode 100644 index 000000000000..82136a683fd3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_dataset_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. + Type: string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureBlobFSFile' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py new file mode 100644 index 000000000000..262ce976227b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.account_key = kwargs.get('account_key', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobFS' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py new file mode 100644 index 000000000000..f0d555078bf7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobFSLinkedService(LinkedService): + """Azure Data Lake Storage Gen2 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type url: object + :param account_key: Account key for the Azure Data Lake Storage Gen2 + service. Type: string (or Expression with resultType string). + :type account_key: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. Type: + string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Storage Gen2 account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.account_key = account_key + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobFS' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py new file mode 100644 index 000000000000..c21525bbac4c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSLocation, self).__init__(**kwargs) + self.file_system = kwargs.get('file_system', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py new file mode 100644 index 000000000000..afbae52fdeb0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobFSLocation(DatasetLocation): + """The location of azure blobFS dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param file_system: Specify the fileSystem of azure blobFS. Type: string + (or Expression with resultType string). + :type file_system: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'file_system': {'key': 'fileSystem', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: + super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.file_system = file_system diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py new file mode 100644 index 000000000000..11490a288417 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py new file mode 100644 index 000000000000..28f3b4f7ceb4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobFSReadSetting(ConnectorReadSetting): + """Azure blobFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobFSReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py new file mode 100644 index 000000000000..a47b173c6581 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'AzureBlobFSSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py new file mode 100644 index 000000000000..e2b28bf30a8c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureBlobFSSink(CopySink): + """A copy activity Azure Data Lake Storage Gen2 sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'AzureBlobFSSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py new file mode 100644 index 000000000000..0252ffd5ba8f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureBlobFSSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py new file mode 100644 index 000000000000..5b512c1f334f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureBlobFSSource(CopySource): + """A copy activity Azure BlobFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'AzureBlobFSSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py new file mode 100644 index 000000000000..d5b2d850da58 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobFSWriteSetting, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py new file mode 100644 index 000000000000..62196ff73838 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_fs_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobFSWriteSetting(ConnectorWriteSetting): + """Azure blobFS write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py new file mode 100644 index 000000000000..5246e02ab9b4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually + exclusive with connectionString, serviceEndpoint property. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage + resource. It is mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.service_endpoint = kwargs.get('service_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureBlobStorage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py new file mode 100644 index 000000000000..ba0a511532b4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_linked_service_py3.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureBlobStorageLinkedService(LinkedService): + """The azure blob storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri, serviceEndpoint property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually + exclusive with connectionString, serviceEndpoint property. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_endpoint: Blob service endpoint of the Azure Blob Storage + resource. It is mutually exclusive with connectionString, sasUri property. + :type service_endpoint: str + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.service_endpoint = service_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureBlobStorage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py new file mode 100644 index 000000000000..1efbbeaec352 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageLocation, self).__init__(**kwargs) + self.container = kwargs.get('container', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py new file mode 100644 index 000000000000..63b122573039 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureBlobStorageLocation(DatasetLocation): + """The location of azure blob dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param container: Specify the container of azure blob. Type: string (or + Expression with resultType string). + :type container: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'container': {'key': 'container', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: + super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.container = container diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py new file mode 100644 index 000000000000..ee07a3576f29 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py new file mode 100644 index 000000000000..3e3d35774a46 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureBlobStorageReadSetting(ConnectorReadSetting): + """Azure blob read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureBlobStorageReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py new file mode 100644 index 000000000000..a6499dfda798 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureBlobStorageWriteSetting, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py new file mode 100644 index 000000000000..9abb68c06055 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_blob_storage_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureBlobStorageWriteSetting(ConnectorWriteSetting): + """Azure blob write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobStorageWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py new file mode 100644 index 000000000000..5e5a9f7560c6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.database = kwargs.get('database', None) + self.tenant = kwargs.get('tenant', None) + self.type = 'AzureDataExplorer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py new file mode 100644 index 000000000000..3cd8ab9c3c19 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of Azure Data Explorer (the + engine's endpoint). URL will be in the format + https://..kusto.windows.net. Type: string (or + Expression with resultType string) + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal + used to authenticate against Azure Data Explorer. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal + used to authenticate against Kusto. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + self.type = 'AzureDataExplorer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py new file mode 100644 index 000000000000..5c204ab769e4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSink, self).__init__(**kwargs) + self.ingestion_mapping_name = kwargs.get('ingestion_mapping_name', None) + self.ingestion_mapping_as_json = kwargs.get('ingestion_mapping_as_json', None) + self.flush_immediately = kwargs.get('flush_immediately', None) + self.type = 'AzureDataExplorerSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py new file mode 100644 index 000000000000..e5cb67bc79b8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_sink_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param ingestion_mapping_name: A name of a pre-created csv mapping that + was defined on the target Kusto table. Type: string. + :type ingestion_mapping_name: object + :param ingestion_mapping_as_json: An explicit column mapping description + provided in a json format. Type: string. + :type ingestion_mapping_as_json: object + :param flush_immediately: If set to true, any aggregation will be skipped. + Default is false. Type: boolean. + :type flush_immediately: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, + 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, + 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ingestion_mapping_name = ingestion_mapping_name + self.ingestion_mapping_as_json = ingestion_mapping_as_json + self.flush_immediately = flush_immediately + self.type = 'AzureDataExplorerSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py new file mode 100644 index 000000000000..2caaa517efd5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.no_truncation = kwargs.get('no_truncation', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'AzureDataExplorerSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py new file mode 100644 index 000000000000..55a6bc78ee04 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataExplorerSource(CopySource): + """A copy activity Azure Data Explorer (Kusto) source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Required. Database query. Should be a Kusto Query Language + (KQL) query. Type: string (or Expression with resultType string). + :type query: object + :param no_truncation: The name of the Boolean option that controls whether + truncation is applied to result-sets that go beyond a certain row-count + limit. + :type no_truncation: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.no_truncation = no_truncation + self.query_timeout = query_timeout + self.type = 'AzureDataExplorerSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py new file mode 100644 index 000000000000..594d22171f48 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataExplorerTableDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.type = 'AzureDataExplorerTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py new file mode 100644 index 000000000000..d36b0f39c2fe --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_explorer_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataExplorerTableDataset(Dataset): + """The Azure Data Explorer (Kusto) dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The table name of the Azure Data Explorer database. Type: + string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: + super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.type = 'AzureDataExplorerTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py new file mode 100644 index 000000000000..0381e1b1de65 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Analytics account. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Analytics account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group + name (if different from Data Factory account). Type: string (or Expression + with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string + (or Expression with resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeAnalyticsLinkedService, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.data_lake_analytics_uri = kwargs.get('data_lake_analytics_uri', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDataLakeAnalytics' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py new file mode 100644 index 000000000000..93250e2cef76 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_analytics_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataLakeAnalyticsLinkedService(LinkedService): + """Azure Data Lake Analytics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param account_name: Required. The Azure Data Lake Analytics account name. + Type: string (or Expression with resultType string). + :type account_name: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Analytics account. Type: string + (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Analytics account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The name or ID of the tenant to which the service + principal belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param subscription_id: Data Lake Analytics account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Analytics account resource group + name (if different from Data Factory account). Type: string (or Expression + with resultType string). + :type resource_group_name: object + :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string + (or Expression with resultType string). + :type data_lake_analytics_uri: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.account_name = account_name + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.data_lake_analytics_uri = data_lake_analytics_uri + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeAnalytics' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py new file mode 100644 index 000000000000..de15057f78ed --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in + the Azure Data Lake Store. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'AzureDataLakeStoreFile' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py new file mode 100644 index 000000000000..d2df0ffebe7e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_dataset_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: + string (or Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in + the Azure Data Lake Store. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: + super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + self.type = 'AzureDataLakeStoreFile' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py new file mode 100644 index 000000000000..f08e086cb500 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Store account. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Store account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name + (if different from Data Factory account). Type: string (or Expression with + resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'data_lake_store_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLinkedService, self).__init__(**kwargs) + self.data_lake_store_uri = kwargs.get('data_lake_store_uri', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.account_name = kwargs.get('account_name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group_name = kwargs.get('resource_group_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDataLakeStore' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py new file mode 100644 index 000000000000..7b8ab293c0cf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDataLakeStoreLinkedService(LinkedService): + """Azure Data Lake Store linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param data_lake_store_uri: Required. Data Lake Store service URI. Type: + string (or Expression with resultType string). + :type data_lake_store_uri: object + :param service_principal_id: The ID of the application used to + authenticate against the Azure Data Lake Store account. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The Key of the application used to + authenticate against the Azure Data Lake Store account. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param account_name: Data Lake Store account name. Type: string (or + Expression with resultType string). + :type account_name: object + :param subscription_id: Data Lake Store account subscription ID (if + different from Data Factory account). Type: string (or Expression with + resultType string). + :type subscription_id: object + :param resource_group_name: Data Lake Store account resource group name + (if different from Data Factory account). Type: string (or Expression with + resultType string). + :type resource_group_name: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'data_lake_store_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, + 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, + 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.data_lake_store_uri = data_lake_store_uri + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.account_name = account_name + self.subscription_id = subscription_id + self.resource_group_name = resource_group_name + self.encrypted_credential = encrypted_credential + self.type = 'AzureDataLakeStore' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py new file mode 100644 index 000000000000..a4bf521a2005 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreLocation, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py new file mode 100644 index 000000000000..e7955731fc31 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class AzureDataLakeStoreLocation(DatasetLocation): + """The location of azure data lake store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py new file mode 100644 index 000000000000..0f0dfe7f7c58 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py new file mode 100644 index 000000000000..b9159463d681 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class AzureDataLakeStoreReadSetting(ConnectorReadSetting): + """Azure data lake store read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: ADLS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureDataLakeStoreReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py new file mode 100644 index 000000000000..e882698c2ca6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.enable_adls_single_file_parallel = kwargs.get('enable_adls_single_file_parallel', None) + self.type = 'AzureDataLakeStoreSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py new file mode 100644 index 000000000000..0f96cea725e2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_sink_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureDataLakeStoreSink(CopySink): + """A copy activity Azure Data Lake Store sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param enable_adls_single_file_parallel: Single File Parallel. + :type enable_adls_single_file_parallel: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.enable_adls_single_file_parallel = enable_adls_single_file_parallel + self.type = 'AzureDataLakeStoreSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py new file mode 100644 index 000000000000..9d2046049a30 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.type = 'AzureDataLakeStoreSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py new file mode 100644 index 000000000000..e1d883972220 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureDataLakeStoreSource(CopySource): + """A copy activity Azure Data Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'AzureDataLakeStoreSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py new file mode 100644 index 000000000000..d7875f545e77 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDataLakeStoreWriteSetting, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py new file mode 100644 index 000000000000..e05ddcbaeaac --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_data_lake_store_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class AzureDataLakeStoreWriteSetting(ConnectorWriteSetting): + """Azure data lake store write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py new file mode 100644 index 000000000000..6cc4c12674cb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark + environment variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.access_token = kwargs.get('access_token', None) + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDatabricks' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py new file mode 100644 index 000000000000..6299dac1e3f2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service_py3.py @@ -0,0 +1,126 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer + to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark + environment variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_driver_node_type: The driver node type for the new + cluster. Type: string (or Expression with resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for + the new cluster. Type: array of strings (or Expression with resultType + array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new + cluster. Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk + self.encrypted_credential = encrypted_credential + self.type = 'AzureDatabricks' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py new file mode 100644 index 000000000000..68b02e5f771f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :type method: str or + ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) + :type function_name: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFunctionActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.function_name = kwargs.get('function_name', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.type = 'AzureFunctionActivity' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py new file mode 100644 index 000000000000..95bb1ca260e7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_activity_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureFunctionActivity(ExecutionActivity): + """Azure Function activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' + :type method: str or + ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod + :param function_name: Required. Name of the Function that the Azure + Function Activity will call. Type: string (or Expression with resultType + string) + :type function_name: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'function_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + } + + def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: + super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.function_name = function_name + self.headers = headers + self.body = body + self.type = 'AzureFunctionActivity' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py new file mode 100644 index 000000000000..2ed5b870a778 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'function_app_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFunctionLinkedService, self).__init__(**kwargs) + self.function_app_url = kwargs.get('function_app_url', None) + self.function_key = kwargs.get('function_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureFunction' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py new file mode 100644 index 000000000000..a1bfdbe8b6c1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_function_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureFunctionLinkedService(LinkedService): + """Azure Function linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param function_app_url: Required. The endpoint of the Azure Function App. + URL will be in the format https://.azurewebsites.net. + :type function_app_url: object + :param function_key: Function or Host key for Azure Function App. + :type function_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'function_app_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, + 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.function_app_url = function_app_url + self.function_key = function_key + self.encrypted_credential = encrypted_credential + self.type = 'AzureFunction' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py new file mode 100644 index 000000000000..768f0d83ae93 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType + string). + :type base_url: object + """ + + _validation = { + 'type': {'required': True}, + 'base_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureKeyVaultLinkedService, self).__init__(**kwargs) + self.base_url = kwargs.get('base_url', None) + self.type = 'AzureKeyVault' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py new file mode 100644 index 000000000000..50f4a58a5a1b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_linked_service_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureKeyVaultLinkedService(LinkedService): + """Azure Key Vault linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param base_url: Required. The base URL of the Azure Key Vault. e.g. + https://myakv.vault.azure.net Type: string (or Expression with resultType + string). + :type base_url: object + """ + + _validation = { + 'type': {'required': True}, + 'base_url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, + } + + def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.base_url = base_url + self.type = 'AzureKeyVault' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py new file mode 100644 index 000000000000..28d3e7d31cee --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base import SecretBase + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The + default value is the latest version of the secret. Type: string (or + Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = kwargs.get('store', None) + self.secret_name = kwargs.get('secret_name', None) + self.secret_version = kwargs.get('secret_version', None) + self.type = 'AzureKeyVaultSecret' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py new file mode 100644 index 000000000000..c5fe4c7afbd4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_key_vault_secret_reference_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base_py3 import SecretBase + + +class AzureKeyVaultSecretReference(SecretBase): + """Azure Key Vault secret reference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param store: Required. The Azure Key Vault linked service reference. + :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference + :param secret_name: Required. The name of the secret in Azure Key Vault. + Type: string (or Expression with resultType string). + :type secret_name: object + :param secret_version: The version of the secret in Azure Key Vault. The + default value is the latest version of the secret. Type: string (or + Expression with resultType string). + :type secret_version: object + """ + + _validation = { + 'type': {'required': True}, + 'store': {'required': True}, + 'secret_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, + 'secret_name': {'key': 'secretName', 'type': 'object'}, + 'secret_version': {'key': 'secretVersion', 'type': 'object'}, + } + + def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: + super(AzureKeyVaultSecretReference, self).__init__(**kwargs) + self.store = store + self.secret_name = secret_name + self.secret_version = secret_version + self.type = 'AzureKeyVaultSecret' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py new file mode 100644 index 000000000000..f6c7c75a1299 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML + Batch Execution Service endpoint. Keys must match the names of web service + parameters defined in the published Azure ML web service. Values will be + passed in the GlobalParameters property of the Azure ML batch execution + request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying + the output Blob locations. This information will be passed in the + WebServiceOutputs property of the Azure ML batch execution request. + :type web_service_outputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying + the input Blob locations.. This information will be passed in the + WebServiceInputs property of the Azure ML batch execution request. + :type web_service_inputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__(self, **kwargs): + super(AzureMLBatchExecutionActivity, self).__init__(**kwargs) + self.global_parameters = kwargs.get('global_parameters', None) + self.web_service_outputs = kwargs.get('web_service_outputs', None) + self.web_service_inputs = kwargs.get('web_service_inputs', None) + self.type = 'AzureMLBatchExecution' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py new file mode 100644 index 000000000000..e273c0b38128 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_batch_execution_activity_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureMLBatchExecutionActivity(ExecutionActivity): + """Azure ML Batch Execution activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param global_parameters: Key,Value pairs to be passed to the Azure ML + Batch Execution Service endpoint. Keys must match the names of web service + parameters defined in the published Azure ML web service. Values will be + passed in the GlobalParameters property of the Azure ML batch execution + request. + :type global_parameters: dict[str, object] + :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying + the output Blob locations. This information will be passed in the + WebServiceOutputs property of the Azure ML batch execution request. + :type web_service_outputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML + endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying + the input Blob locations.. This information will be passed in the + WebServiceInputs property of the Azure ML batch execution request. + :type web_service_inputs: dict[str, + ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, + 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, + 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: + super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.global_parameters = global_parameters + self.web_service_outputs = web_service_outputs + self.web_service_inputs = web_service_inputs + self.type = 'AzureMLBatchExecution' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py new file mode 100644 index 000000000000..08dfec98a6bf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureMLLinkedService(LinkedService): + """Azure ML Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. + :type api_key: ~azure.mgmt.datafactory.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure + ML Web Service endpoint. Type: string (or Expression with resultType + string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMLLinkedService, self).__init__(**kwargs) + self.ml_endpoint = kwargs.get('ml_endpoint', None) + self.api_key = kwargs.get('api_key', None) + self.update_resource_endpoint = kwargs.get('update_resource_endpoint', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureML' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py new file mode 100644 index 000000000000..c77a692adc03 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_linked_service_py3.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMLLinkedService(LinkedService): + """Azure ML Web Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML + Web Service endpoint. Type: string (or Expression with resultType string). + :type ml_endpoint: object + :param api_key: Required. The API key for accessing the Azure ML model + endpoint. + :type api_key: ~azure.mgmt.datafactory.models.SecretBase + :param update_resource_endpoint: The Update Resource REST URL for an Azure + ML Web Service endpoint. Type: string (or Expression with resultType + string). + :type update_resource_endpoint: object + :param service_principal_id: The ID of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against the ARM-based updateResourceEndpoint of an Azure ML + web service. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'ml_endpoint': {'required': True}, + 'api_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, + 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, + 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.ml_endpoint = ml_endpoint + self.api_key = api_key + self.update_resource_endpoint = update_resource_endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureML' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py new file mode 100644 index 000000000000..c47a2d81648e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with + resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. + :type trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in + trainedModelLinkedService to represent the .ilearner file that will be + uploaded by the update operation. Type: string (or Expression with + resultType string). + :type trained_model_file_path: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMLUpdateResourceActivity, self).__init__(**kwargs) + self.trained_model_name = kwargs.get('trained_model_name', None) + self.trained_model_linked_service_name = kwargs.get('trained_model_linked_service_name', None) + self.trained_model_file_path = kwargs.get('trained_model_file_path', None) + self.type = 'AzureMLUpdateResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py new file mode 100644 index 000000000000..50a5932f0bf0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_update_resource_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class AzureMLUpdateResourceActivity(ExecutionActivity): + """Azure ML Update Resource management activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param trained_model_name: Required. Name of the Trained Model module in + the Web Service experiment to be updated. Type: string (or Expression with + resultType string). + :type trained_model_name: object + :param trained_model_linked_service_name: Required. Name of Azure Storage + linked service holding the .ilearner file that will be uploaded by the + update operation. + :type trained_model_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param trained_model_file_path: Required. The relative file path in + trainedModelLinkedService to represent the .ilearner file that will be + uploaded by the update operation. Type: string (or Expression with + resultType string). + :type trained_model_file_path: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'trained_model_name': {'required': True}, + 'trained_model_linked_service_name': {'required': True}, + 'trained_model_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, + 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, + } + + def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.trained_model_name = trained_model_name + self.trained_model_linked_service_name = trained_model_linked_service_name + self.trained_model_file_path = trained_model_file_path + self.type = 'AzureMLUpdateResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py new file mode 100644 index 000000000000..682b24fed830 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AzureMLWebServiceFile(Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, **kwargs): + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = kwargs.get('file_path', None) + self.linked_service_name = kwargs.get('linked_service_name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py new file mode 100644 index 000000000000..abe75d9d9bf2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_ml_web_service_file_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class AzureMLWebServiceFile(Model): + """Azure ML WebService Input/Output file. + + All required parameters must be populated in order to send to Azure. + + :param file_path: Required. The relative file path, including container + name, in the Azure Blob Storage specified by the LinkedService. Type: + string (or Expression with resultType string). + :type file_path: object + :param linked_service_name: Required. Reference to an Azure Storage + LinkedService, where Azure ML WebService Input/Output file located. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'file_path': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'file_path': {'key': 'filePath', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: + super(AzureMLWebServiceFile, self).__init__(**kwargs) + self.file_path = file_path + self.linked_service_name = linked_service_name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py new file mode 100644 index 000000000000..aedbdbb73eb5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureMySql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py new file mode 100644 index 000000000000..57692275f564 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureMySqlLinkedService(LinkedService): + """Azure MySQL database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzureMySql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py new file mode 100644 index 000000000000..823336432567 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureMySqlSource(CopySource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzureMySqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py new file mode 100644 index 000000000000..7030738d2615 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureMySqlSource(CopySource): + """A copy activity Azure MySQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzureMySqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py new file mode 100644 index 000000000000..8f5d43478089 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Azure MySQL database table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureMySqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureMySqlTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py new file mode 100644 index 000000000000..7bd7eb6f17f8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_my_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureMySqlTableDataset(Dataset): + """The Azure MySQL database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Azure MySQL database table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureMySqlTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py new file mode 100644 index 000000000000..92359d6d6a10 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzurePostgreSql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py new file mode 100644 index 000000000000..47f8f17980f8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzurePostgreSqlLinkedService(LinkedService): + """Azure PostgreSQL linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzurePostgreSql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py new file mode 100644 index 000000000000..e0cd62fd8028 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzurePostgreSqlSource(CopySource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'AzurePostgreSqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py new file mode 100644 index 000000000000..0362b0dca390 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzurePostgreSqlSource(CopySource): + """A copy activity Azure PostgreSQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'AzurePostgreSqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py new file mode 100644 index 000000000000..8960acc0df75 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzurePostgreSqlTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py new file mode 100644 index 000000000000..fddf0720c565 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_postgre_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzurePostgreSqlTableDataset(Dataset): + """Azure PostgreSQL dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzurePostgreSqlTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py new file mode 100644 index 000000000000..9f3a63db4978 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureQueueSink, self).__init__(**kwargs) + self.type = 'AzureQueueSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py new file mode 100644 index 000000000000..db2fb60ddb1e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_queue_sink_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureQueueSink(CopySink): + """A copy activity Azure Queue sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureQueueSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py new file mode 100644 index 000000000000..1239bbad78fc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). + :type index_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSearchIndexDataset, self).__init__(**kwargs) + self.index_name = kwargs.get('index_name', None) + self.type = 'AzureSearchIndex' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py new file mode 100644 index 000000000000..da5e92dd2edd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSearchIndexDataset(Dataset): + """The Azure Search Index. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index_name: Required. The name of the Azure Search Index. Type: + string (or Expression with resultType string). + :type index_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index_name = index_name + self.type = 'AzureSearchIndex' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py new file mode 100644 index 000000000000..af2505be7a5c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specify the write behavior when upserting documents + into Azure Search Index. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSearchIndexSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'AzureSearchIndexSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py new file mode 100644 index 000000000000..9e57f2f1feb3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_index_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSearchIndexSink(CopySink): + """A copy activity Azure Search Index sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specify the write behavior when upserting documents + into Azure Search Index. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'AzureSearchIndexSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py new file mode 100644 index 000000000000..782799cd5b28 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). + :type url: object + :param key: Admin Key for Azure Search service + :type key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSearchLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.key = kwargs.get('key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSearch' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py new file mode 100644 index 000000000000..8589c3aead91 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_search_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSearchLinkedService(LinkedService): + """Linked service for Windows Azure Search Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. URL for Azure Search service. Type: string (or + Expression with resultType string). + :type url: object + :param key: Admin Key for Azure Search service + :type key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.key = key + self.encrypted_credential = encrypted_credential + self.type = 'AzureSearch' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py new file mode 100644 index 000000000000..0da66637a04f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Database. Type: string (or Expression with + resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Database. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlDatabase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py new file mode 100644 index 000000000000..dbcf6c88b134 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_database_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlDatabaseLinkedService(LinkedService): + """Microsoft Azure SQL Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Database. Type: string (or Expression with + resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Database. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDatabase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py new file mode 100644 index 000000000000..cc7c9d58d19f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDWLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlDW' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py new file mode 100644 index 000000000000..5c75f3904b37 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlDWLinkedService(LinkedService): + """Azure SQL Data Warehouse linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString + or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Data Warehouse. Type: string (or Expression + with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Data Warehouse. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlDW' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py new file mode 100644 index 000000000000..0921505515d5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlDWTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureSqlDWTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py new file mode 100644 index 000000000000..0be72998fc64 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_dw_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlDWTableDataset(Dataset): + """The Azure SQL Data Warehouse dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure SQL Data Warehouse. Type: + string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureSqlDWTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py new file mode 100644 index 000000000000..441bf0c4279f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'AzureSqlSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py new file mode 100644 index 000000000000..6aa431ae57d6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureSqlSink(CopySink): + """A copy activity Azure SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'AzureSqlSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py new file mode 100644 index 000000000000..b6c62f9a3164 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'AzureSqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py new file mode 100644 index 000000000000..cb5c33d28bb2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureSqlSource(CopySource): + """A copy activity Azure SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'AzureSqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py new file mode 100644 index 000000000000..8d8d90d76e85 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure SQL database. Type: string + (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureSqlTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py new file mode 100644 index 000000000000..3d4f1eac3f58 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlTableDataset(Dataset): + """The Azure SQL Server database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Azure SQL database. Type: string + (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureSqlTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py new file mode 100644 index 000000000000..202dd7229b90 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureStorage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py new file mode 100644 index 000000000000..4fac19b70849 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_storage_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureStorageLinkedService(LinkedService): + """The storage account linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureStorage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py new file mode 100644 index 000000000000..eb8dacbfbb98 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'AzureTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py new file mode 100644 index 000000000000..d70a15fdd6f1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureTableDataset(Dataset): + """The Azure Table storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. The table name of the Azure Table storage. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'AzureTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py new file mode 100644 index 000000000000..3459c9ad3ba1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_default_partition_key_value: Azure Table default + partition key value. Type: string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. + Type: string (or Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string + (or Expression with resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or + Expression with resultType string). + :type azure_table_insert_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableSink, self).__init__(**kwargs) + self.azure_table_default_partition_key_value = kwargs.get('azure_table_default_partition_key_value', None) + self.azure_table_partition_key_name = kwargs.get('azure_table_partition_key_name', None) + self.azure_table_row_key_name = kwargs.get('azure_table_row_key_name', None) + self.azure_table_insert_type = kwargs.get('azure_table_insert_type', None) + self.type = 'AzureTableSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py new file mode 100644 index 000000000000..a15247544879 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_sink_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class AzureTableSink(CopySink): + """A copy activity Azure Table sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_default_partition_key_value: Azure Table default + partition key value. Type: string (or Expression with resultType string). + :type azure_table_default_partition_key_value: object + :param azure_table_partition_key_name: Azure Table partition key name. + Type: string (or Expression with resultType string). + :type azure_table_partition_key_name: object + :param azure_table_row_key_name: Azure Table row key name. Type: string + (or Expression with resultType string). + :type azure_table_row_key_name: object + :param azure_table_insert_type: Azure Table insert type. Type: string (or + Expression with resultType string). + :type azure_table_insert_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, + 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, + 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, + 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_default_partition_key_value = azure_table_default_partition_key_value + self.azure_table_partition_key_name = azure_table_partition_key_name + self.azure_table_row_key_name = azure_table_row_key_name + self.azure_table_insert_type = azure_table_insert_type + self.type = 'AzureTableSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py new file mode 100644 index 000000000000..fa7ead73eaa9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class AzureTableSource(CopySource): + """A copy activity Azure Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_source_query: Azure Table source query. Type: string + (or Expression with resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source + ignore table not found. Type: boolean (or Expression with resultType + boolean). + :type azure_table_source_ignore_table_not_found: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureTableSource, self).__init__(**kwargs) + self.azure_table_source_query = kwargs.get('azure_table_source_query', None) + self.azure_table_source_ignore_table_not_found = kwargs.get('azure_table_source_ignore_table_not_found', None) + self.type = 'AzureTableSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py new file mode 100644 index 000000000000..efbac5613219 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class AzureTableSource(CopySource): + """A copy activity Azure Table source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param azure_table_source_query: Azure Table source query. Type: string + (or Expression with resultType string). + :type azure_table_source_query: object + :param azure_table_source_ignore_table_not_found: Azure Table source + ignore table not found. Type: boolean (or Expression with resultType + boolean). + :type azure_table_source_ignore_table_not_found: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, + 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.azure_table_source_query = azure_table_source_query + self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found + self.type = 'AzureTableSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py new file mode 100644 index 000000000000..c2a8c2498ea6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AzureTableStorageLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureTableStorage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py new file mode 100644 index 000000000000..8d4e62c4f3e6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_table_storage_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureTableStorageLinkedService(LinkedService): + """The azure table storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: The connection string. It is mutually exclusive + with sasUri property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure Storage resource. It is mutually + exclusive with connectionString property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas + uri. + :type sas_token: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: + super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.encrypted_credential = encrypted_credential + self.type = 'AzureTableStorage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py new file mode 100644 index 000000000000..673d34167fed --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger import MultiplePipelineTrigger + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern + provided for trigger to fire. For example, '/records/blobs/december/' will + only fire the trigger for blobs in the december folder under the records + container. At least one of these must be provided: blobPathBeginsWith, + blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern + provided for trigger to fire. For example, 'december/boxes.csv' will only + fire the trigger for blobs named boxes in a december folder. At least one + of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param events: Required. The type of events that cause this trigger to + fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(BlobEventsTrigger, self).__init__(**kwargs) + self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) + self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) + self.events = kwargs.get('events', None) + self.scope = kwargs.get('scope', None) + self.type = 'BlobEventsTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py new file mode 100644 index 000000000000..fb65a420a2cd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_events_trigger_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param blob_path_begins_with: The blob path must begin with the pattern + provided for trigger to fire. For example, '/records/blobs/december/' will + only fire the trigger for blobs in the december folder under the records + container. At least one of these must be provided: blobPathBeginsWith, + blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern + provided for trigger to fire. For example, 'december/boxes.csv' will only + fire the trigger for blobs named boxes in a december folder. At least one + of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param events: Required. The type of events that cause this trigger to + fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, **kwargs) -> None: + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.events = events + self.scope = scope + self.type = 'BlobEventsTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py new file mode 100644 index 000000000000..284e0fcecde5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class BlobSink(CopySink): + """A copy activity Azure Blob sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: + boolean (or Expression with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: + string (or Expression with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or + Expression with resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(BlobSink, self).__init__(**kwargs) + self.blob_writer_overwrite_files = kwargs.get('blob_writer_overwrite_files', None) + self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) + self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'BlobSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py new file mode 100644 index 000000000000..370acc72e017 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_sink_py3.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class BlobSink(CopySink): + """A copy activity Azure Blob sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param blob_writer_overwrite_files: Blob writer overwrite files. Type: + boolean (or Expression with resultType boolean). + :type blob_writer_overwrite_files: object + :param blob_writer_date_time_format: Blob writer date time format. Type: + string (or Expression with resultType string). + :type blob_writer_date_time_format: object + :param blob_writer_add_header: Blob writer add header. Type: boolean (or + Expression with resultType boolean). + :type blob_writer_add_header: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, + 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, + 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.blob_writer_overwrite_files = blob_writer_overwrite_files + self.blob_writer_date_time_format = blob_writer_date_time_format + self.blob_writer_add_header = blob_writer_add_header + self.copy_behavior = copy_behavior + self.type = 'BlobSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py new file mode 100644 index 000000000000..ab4313a890cb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(BlobSource, self).__init__(**kwargs) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_header_line_count = kwargs.get('skip_header_line_count', None) + self.recursive = kwargs.get('recursive', None) + self.type = 'BlobSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py new file mode 100644 index 000000000000..78d90cc61e13 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_source_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class BlobSource(CopySource): + """A copy activity Azure Blob source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param treat_empty_as_null: Treat empty as null. Type: boolean (or + Expression with resultType boolean). + :type treat_empty_as_null: object + :param skip_header_line_count: Number of header lines to skip from each + blob. Type: integer (or Expression with resultType integer). + :type skip_header_line_count: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.treat_empty_as_null = treat_empty_as_null + self.skip_header_line_count = skip_header_line_count + self.recursive = recursive + self.type = 'BlobSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py new file mode 100644 index 000000000000..4fd5b5c17100 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger import MultiplePipelineTrigger + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service + reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, **kwargs): + super(BlobTrigger, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.linked_service = kwargs.get('linked_service', None) + self.type = 'BlobTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py new file mode 100644 index 000000000000..cccffd881bfb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/blob_trigger_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param folder_path: Required. The path of the container/folder that will + trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to + handle when it is triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service + reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, + } + + def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service + self.type = 'BlobTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py new file mode 100644 index 000000000000..ebba2be42028 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression + with resultType integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CassandraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Cassandra' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py new file mode 100644 index 000000000000..f22f303cc61d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_linked_service_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CassandraLinkedService(LinkedService): + """Linked service for Cassandra data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name for connection. Type: string (or + Expression with resultType string). + :type host: object + :param authentication_type: AuthenticationType to be used for connection. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param port: The port for the connection. Type: integer (or Expression + with resultType integer). + :type port: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.authentication_type = authentication_type + self.port = port + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Cassandra' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py new file mode 100644 index 000000000000..8a52f03cd5ba --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CassandraSource(CopySource): + """A copy activity source for a Cassandra database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression or + Cassandra Query Language (CQL) command. Type: string (or Expression with + resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many + Cassandra servers must respond to a read request before returning data to + the client application. Cassandra checks the specified number of Cassandra + servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is + case-insensitive. + :type consistency_level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CassandraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.consistency_level = kwargs.get('consistency_level', None) + self.type = 'CassandraSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py new file mode 100644 index 000000000000..6957385bab86 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_source_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CassandraSource(CopySource): + """A copy activity source for a Cassandra database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression or + Cassandra Query Language (CQL) command. Type: string (or Expression with + resultType string). + :type query: object + :param consistency_level: The consistency level specifies how many + Cassandra servers must respond to a read request before returning data to + the client application. Cassandra checks the specified number of Cassandra + servers for data to satisfy the read request. Must be one of + cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is + case-insensitive. + :type consistency_level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'consistency_level': {'key': 'consistencyLevel', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.consistency_level = consistency_level + self.type = 'CassandraSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py new file mode 100644 index 000000000000..b89c324fd4d4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Cassandra database. Type: string + (or Expression with resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or + Expression with resultType string). + :type keyspace: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CassandraTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.keyspace = kwargs.get('keyspace', None) + self.type = 'CassandraTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py new file mode 100644 index 000000000000..256358ce50cb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cassandra_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CassandraTableDataset(Dataset): + """The Cassandra database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the Cassandra database. Type: string + (or Expression with resultType string). + :type table_name: object + :param keyspace: The keyspace of the Cassandra database. Type: string (or + Expression with resultType string). + :type keyspace: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: + super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.keyspace = keyspace + self.type = 'CassandraTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py new file mode 100644 index 000000000000..04179d0d1f53 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. Application client_id supplied by Concur App + Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Concur' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py new file mode 100644 index 000000000000..4411db6d2856 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_linked_service_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ConcurLinkedService(LinkedService): + """Concur Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. Application client_id supplied by Concur App + Management. + :type client_id: object + :param username: Required. The user name that you use to access Concur + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Concur' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py new file mode 100644 index 000000000000..e2595f9d8aff --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ConcurObjectDataset(Dataset): + """Concur Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ConcurObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py new file mode 100644 index 000000000000..9543a6395a32 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ConcurObjectDataset(Dataset): + """Concur Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ConcurObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py new file mode 100644 index 000000000000..11ae557c0cda --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ConcurSource(CopySource): + """A copy activity Concur Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConcurSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ConcurSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py new file mode 100644 index 000000000000..ac8ae8fb5a91 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/concur_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ConcurSource(CopySource): + """A copy activity Concur Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ConcurSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py new file mode 100644 index 000000000000..676a418983f4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py new file mode 100644 index 000000000000..05bb5e2f87f3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_read_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorReadSetting(Model): + """Connector read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(ConnectorReadSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py new file mode 100644 index 000000000000..65daf9f07794 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.copy_behavior = kwargs.get('copy_behavior', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py new file mode 100644 index 000000000000..7f4ea65c916d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/connector_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ConnectorWriteSetting(Model): + """Connector write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(ConnectorWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.max_concurrent_connections = max_concurrent_connections + self.copy_behavior = copy_behavior diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py new file mode 100644 index 000000000000..2242bc36beb2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity import Activity + + +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + } + + def __init__(self, **kwargs): + super(ControlActivity, self).__init__(**kwargs) + self.type = 'Container' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py new file mode 100644 index 000000000000..0aabd99d741f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/control_activity_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity_py3 import Activity + + +class ControlActivity(Activity): + """Base class for all control activities like IfCondition, ForEach , Until. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebHookActivity, AppendVariableActivity, + SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, + WaitActivity, ForEachActivity, IfConditionActivity, ExecutePipelineActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Container' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py new file mode 100644 index 000000000000..2e7c00d551ba --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class CopyActivity(ExecutionActivity): + """Copy activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Copy activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.mgmt.datafactory.models.CopySink + :param translator: Copy activity translator. If not specified, tabular + translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim + staging. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when + EnableStaging is true. + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on + the source or sink to avoid overloading the data store. Type: integer (or + Expression with resultType integer), minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units + that can be used to perform this data movement. Type: integer (or + Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. + Default value is false. Type: boolean (or Expression with resultType + boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row + settings when EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + } + + def __init__(self, **kwargs): + super(CopyActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.sink = kwargs.get('sink', None) + self.translator = kwargs.get('translator', None) + self.enable_staging = kwargs.get('enable_staging', None) + self.staging_settings = kwargs.get('staging_settings', None) + self.parallel_copies = kwargs.get('parallel_copies', None) + self.data_integration_units = kwargs.get('data_integration_units', None) + self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) + self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.preserve_rules = kwargs.get('preserve_rules', None) + self.preserve = kwargs.get('preserve', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.type = 'Copy' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py new file mode 100644 index 000000000000..f8a1fee5625d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_activity_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class CopyActivity(ExecutionActivity): + """Copy activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Copy activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param sink: Required. Copy activity sink. + :type sink: ~azure.mgmt.datafactory.models.CopySink + :param translator: Copy activity translator. If not specified, tabular + translator is used. + :type translator: object + :param enable_staging: Specifies whether to copy data via an interim + staging. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_staging: object + :param staging_settings: Specifies interim staging settings when + EnableStaging is true. + :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings + :param parallel_copies: Maximum number of concurrent sessions opened on + the source or sink to avoid overloading the data store. Type: integer (or + Expression with resultType integer), minimum: 0. + :type parallel_copies: object + :param data_integration_units: Maximum number of data integration units + that can be used to perform this data movement. Type: integer (or + Expression with resultType integer), minimum: 0. + :type data_integration_units: object + :param enable_skip_incompatible_row: Whether to skip incompatible row. + Default value is false. Type: boolean (or Expression with resultType + boolean). + :type enable_skip_incompatible_row: object + :param redirect_incompatible_row_settings: Redirect incompatible row + settings when EnableSkipIncompatibleRow is true. + :type redirect_incompatible_row_settings: + ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param preserve_rules: Preserve Rules. + :type preserve_rules: list[object] + :param preserve: Preserve rules. + :type preserve: list[object] + :param inputs: List of inputs for the activity. + :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] + :param outputs: List of outputs for the activity. + :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'sink': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, + 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, + 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, + 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, + 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, + 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, + 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, + 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, + 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, + 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: + super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.sink = sink + self.translator = translator + self.enable_staging = enable_staging + self.staging_settings = staging_settings + self.parallel_copies = parallel_copies + self.data_integration_units = data_integration_units + self.enable_skip_incompatible_row = enable_skip_incompatible_row + self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.preserve_rules = preserve_rules + self.preserve = preserve + self.inputs = inputs + self.outputs = outputs + self.type = 'Copy' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py new file mode 100644 index 000000000000..43117547e1ea --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySink(Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, + AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + } + + def __init__(self, **kwargs): + super(CopySink, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.write_batch_size = kwargs.get('write_batch_size', None) + self.write_batch_timeout = kwargs.get('write_batch_timeout', None) + self.sink_retry_count = kwargs.get('sink_retry_count', None) + self.sink_retry_wait = kwargs.get('sink_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py new file mode 100644 index 000000000000..4f9ebc84173c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySink(Model): + """A copy activity sink. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, + AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, + AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, + FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, + SapCloudForCustomerSink, DelimitedTextSink + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySink, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.write_batch_size = write_batch_size + self.write_batch_timeout = write_batch_timeout + self.sink_retry_count = sink_retry_count + self.sink_retry_wait = sink_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py new file mode 100644 index 000000000000..091070316cfe --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySource(Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, + HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, + EloquaSource, DrillSource, CouchbaseSource, ConcurSource, + AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapOpenHubSource, SapEccSource, SapCloudForCustomerSource, + SalesforceSource, RelationalSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + } + + def __init__(self, **kwargs): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_retry_count = kwargs.get('source_retry_count', None) + self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py new file mode 100644 index 000000000000..a5b8437e944a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CopySource(Model): + """A copy activity source. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, + HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, + EloquaSource, DrillSource, CouchbaseSource, ConcurSource, + AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, + AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, + SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, + SapOpenHubSource, SapEccSource, SapCloudForCustomerSource, + SalesforceSource, RelationalSource, DynamicsSource, + DocumentDbCollectionSource, BlobSource, AzureTableSource, + DelimitedTextSource, ParquetSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(CopySource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.source_retry_count = source_retry_count + self.source_retry_wait = source_retry_wait + self.max_concurrent_connections = max_concurrent_connections + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py new file mode 100644 index 000000000000..6a8a462f6f46 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CosmosDb' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py new file mode 100644 index 000000000000..57dab80e06b9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbLinkedService(LinkedService): + """Microsoft Azure Cosmos Database (CosmosDB) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in + connection string. + :type account_key: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.account_key = account_key + self.encrypted_credential = encrypted_credential + self.type = 'CosmosDb' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py new file mode 100644 index 000000000000..d86648eb5eee --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py new file mode 100644 index 000000000000..de2650fa14b4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CosmosDbMongoDbApiCollectionDataset(Dataset): + """The CosmosDB (MongoDB API) database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the CosmosDB (MongoDB + API) database. Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'CosmosDbMongoDbApiCollection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py new file mode 100644 index 000000000000..f76e7c5f5a41 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'CosmosDbMongoDbApi' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py new file mode 100644 index 000000000000..2a72bfce4ff2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_linked_service_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CosmosDbMongoDbApiLinkedService(LinkedService): + """Linked service for CosmosDB (MongoDB API) data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The CosmosDB (MongoDB API) connection + string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the CosmosDB (MongoDB API) database + that you want to access. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'CosmosDbMongoDbApi' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py new file mode 100644 index 000000000000..0d40b52add80 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'CosmosDbMongoDbApiSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py new file mode 100644 index 000000000000..5db1ee5c9d36 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_sink_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CosmosDbMongoDbApiSink(CopySink): + """A copy activity sink for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbMongoDbApiSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py new file mode 100644 index 000000000000..dae49c1d45eb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'CosmosDbMongoDbApiSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py new file mode 100644 index 000000000000..a4c869645973 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/cosmos_db_mongo_db_api_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CosmosDbMongoDbApiSource(CopySource): + """A copy activity source for a CosmosDB (MongoDB API) database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'CosmosDbMongoDbApiSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py new file mode 100644 index 000000000000..76e45648f941 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.cred_string = kwargs.get('cred_string', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Couchbase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py new file mode 100644 index 000000000000..afe336f666de --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.cred_string = cred_string + self.encrypted_credential = encrypted_credential + self.type = 'Couchbase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py new file mode 100644 index 000000000000..b355605417d1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CouchbaseSource(CopySource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CouchbaseSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py new file mode 100644 index 000000000000..1358fc20feba --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CouchbaseSource(CopySource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CouchbaseSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py new file mode 100644 index 000000000000..821274b9aae4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'CouchbaseTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py new file mode 100644 index 000000000000..cf5299fd55a5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/couchbase_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'CouchbaseTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py new file mode 100644 index 000000000000..0e7002dcf68a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateLinkedIntegrationRuntimeRequest(Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked + integration runtime belongs to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked + integration runtime belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the + linked integration runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.data_factory_name = kwargs.get('data_factory_name', None) + self.data_factory_location = kwargs.get('data_factory_location', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py new file mode 100644 index 000000000000..aad7d6fa5ac0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_linked_integration_runtime_request_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateLinkedIntegrationRuntimeRequest(Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked + integration runtime belongs to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked + integration runtime belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the + linked integration runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = name + self.subscription_id = subscription_id + self.data_factory_name = data_factory_name + self.data_factory_location = data_factory_location diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py new file mode 100644 index 000000000000..18ec9f963e65 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateRunResponse(Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = kwargs.get('run_id', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py new file mode 100644 index 000000000000..bb280441ae90 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/create_run_response_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateRunResponse(Model): + """Response body with a run identifier. + + All required parameters must be populated in order to send to Azure. + + :param run_id: Required. Identifier of a run. + :type run_id: str + """ + + _validation = { + 'run_id': {'required': True}, + } + + _attribute_map = { + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__(self, *, run_id: str, **kwargs) -> None: + super(CreateRunResponse, self).__init__(**kwargs) + self.run_id = run_id diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py new file mode 100644 index 000000000000..01cfb7335d37 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class CustomActivity(ExecutionActivity): + """Custom activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or + Expression with resultType string). + :type folder_path: object + :param reference_objects: Reference objects + :type reference_objects: + ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no + restriction on the keys or values that can be used. The user specified + custom activity has the full responsibility to consume and interpret the + content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomActivity, self).__init__(**kwargs) + self.command = kwargs.get('command', None) + self.resource_linked_service = kwargs.get('resource_linked_service', None) + self.folder_path = kwargs.get('folder_path', None) + self.reference_objects = kwargs.get('reference_objects', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + self.type = 'Custom' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py new file mode 100644 index 000000000000..bf8326f053f2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class CustomActivity(ExecutionActivity): + """Custom activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param command: Required. Command for custom activity Type: string (or + Expression with resultType string). + :type command: object + :param resource_linked_service: Resource linked service reference. + :type resource_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for resource files Type: string (or + Expression with resultType string). + :type folder_path: object + :param reference_objects: Reference objects + :type reference_objects: + ~azure.mgmt.datafactory.models.CustomActivityReferenceObject + :param extended_properties: User defined property bag. There is no + restriction on the keys or values that can be used. The user specified + custom activity has the full responsibility to consume and interpret the + content defined. + :type extended_properties: dict[str, object] + :param retention_time_in_days: The retention time for the files submitted + for custom activity. Type: double (or Expression with resultType double). + :type retention_time_in_days: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, + 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + } + + def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: + super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.command = command + self.resource_linked_service = resource_linked_service + self.folder_path = folder_path + self.reference_objects = reference_objects + self.extended_properties = extended_properties + self.retention_time_in_days = retention_time_in_days + self.type = 'Custom' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py new file mode 100644 index 000000000000..5f95a54612dd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomActivityReferenceObject(Model): + """Reference objects for custom activity. + + :param linked_services: Linked service references. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__(self, **kwargs): + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = kwargs.get('linked_services', None) + self.datasets = kwargs.get('datasets', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py new file mode 100644 index 000000000000..f860f0141bd0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_activity_reference_object_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CustomActivityReferenceObject(Model): + """Reference objects for custom activity. + + :param linked_services: Linked service references. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param datasets: Dataset references. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + """ + + _attribute_map = { + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, + } + + def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: + super(CustomActivityReferenceObject, self).__init__(**kwargs) + self.linked_services = linked_services + self.datasets = datasets diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py new file mode 100644 index 000000000000..db14a05e7ad1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomDataSourceLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'CustomDataSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py new file mode 100644 index 000000000000..f7633ee28cbd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_data_source_linked_service_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CustomDataSourceLinkedService(LinkedService): + """Custom linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Custom linked service properties. + :type type_properties: object + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py new file mode 100644 index 000000000000..a242309c3fd1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Custom dataset properties. + :type type_properties: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CustomDataset, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'CustomDataset' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py new file mode 100644 index 000000000000..c00dae2b2c56 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/custom_dataset_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CustomDataset(Dataset): + """The custom dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Custom dataset properties. + :type type_properties: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: + super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type_properties = type_properties + self.type = 'CustomDataset' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py new file mode 100644 index 000000000000..ded527b2602a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -0,0 +1,448 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + + +class IntegrationRuntimeState(str, Enum): + + initial = "Initial" + stopped = "Stopped" + started = "Started" + starting = "Starting" + stopping = "Stopping" + need_registration = "NeedRegistration" + online = "Online" + limited = "Limited" + offline = "Offline" + access_denied = "AccessDenied" + + +class IntegrationRuntimeAutoUpdate(str, Enum): + + on = "On" + off = "Off" + + +class ParameterType(str, Enum): + + object_enum = "Object" + string = "String" + int_enum = "Int" + float_enum = "Float" + bool_enum = "Bool" + array = "Array" + secure_string = "SecureString" + + +class DependencyCondition(str, Enum): + + succeeded = "Succeeded" + failed = "Failed" + skipped = "Skipped" + completed = "Completed" + + +class VariableType(str, Enum): + + string = "String" + bool_enum = "Bool" + array = "Array" + + +class TriggerRuntimeState(str, Enum): + + started = "Started" + stopped = "Stopped" + disabled = "Disabled" + + +class RunQueryFilterOperand(str, Enum): + + pipeline_name = "PipelineName" + status = "Status" + run_start = "RunStart" + run_end = "RunEnd" + activity_name = "ActivityName" + activity_run_start = "ActivityRunStart" + activity_run_end = "ActivityRunEnd" + activity_type = "ActivityType" + trigger_name = "TriggerName" + trigger_run_timestamp = "TriggerRunTimestamp" + run_group_id = "RunGroupId" + latest_only = "LatestOnly" + + +class RunQueryFilterOperator(str, Enum): + + equals = "Equals" + not_equals = "NotEquals" + in_enum = "In" + not_in = "NotIn" + + +class RunQueryOrderByField(str, Enum): + + run_start = "RunStart" + run_end = "RunEnd" + pipeline_name = "PipelineName" + status = "Status" + activity_name = "ActivityName" + activity_run_start = "ActivityRunStart" + activity_run_end = "ActivityRunEnd" + trigger_name = "TriggerName" + trigger_run_timestamp = "TriggerRunTimestamp" + + +class RunQueryOrder(str, Enum): + + asc = "ASC" + desc = "DESC" + + +class TriggerRunStatus(str, Enum): + + succeeded = "Succeeded" + failed = "Failed" + inprogress = "Inprogress" + + +class TumblingWindowFrequency(str, Enum): + + minute = "Minute" + hour = "Hour" + + +class BlobEventTypes(str, Enum): + + microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" + microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" + + +class DayOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class DaysOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class RecurrenceFrequency(str, Enum): + + not_specified = "NotSpecified" + minute = "Minute" + hour = "Hour" + day = "Day" + week = "Week" + month = "Month" + year = "Year" + + +class GoogleAdWordsAuthenticationType(str, Enum): + + service_authentication = "ServiceAuthentication" + user_authentication = "UserAuthentication" + + +class SparkServerType(str, Enum): + + shark_server = "SharkServer" + shark_server2 = "SharkServer2" + spark_thrift_server = "SparkThriftServer" + + +class SparkThriftTransportProtocol(str, Enum): + + binary = "Binary" + sasl = "SASL" + http = "HTTP " + + +class SparkAuthenticationType(str, Enum): + + anonymous = "Anonymous" + username = "Username" + username_and_password = "UsernameAndPassword" + windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + + +class ServiceNowAuthenticationType(str, Enum): + + basic = "Basic" + oauth2 = "OAuth2" + + +class PrestoAuthenticationType(str, Enum): + + anonymous = "Anonymous" + ldap = "LDAP" + + +class PhoenixAuthenticationType(str, Enum): + + anonymous = "Anonymous" + username_and_password = "UsernameAndPassword" + windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + + +class ImpalaAuthenticationType(str, Enum): + + anonymous = "Anonymous" + sasl_username = "SASLUsername" + username_and_password = "UsernameAndPassword" + + +class HiveServerType(str, Enum): + + hive_server1 = "HiveServer1" + hive_server2 = "HiveServer2" + hive_thrift_server = "HiveThriftServer" + + +class HiveThriftTransportProtocol(str, Enum): + + binary = "Binary" + sasl = "SASL" + http = "HTTP " + + +class HiveAuthenticationType(str, Enum): + + anonymous = "Anonymous" + username = "Username" + username_and_password = "UsernameAndPassword" + windows_azure_hd_insight_service = "WindowsAzureHDInsightService" + + +class HBaseAuthenticationType(str, Enum): + + anonymous = "Anonymous" + basic = "Basic" + + +class GoogleBigQueryAuthenticationType(str, Enum): + + service_authentication = "ServiceAuthentication" + user_authentication = "UserAuthentication" + + +class SapHanaAuthenticationType(str, Enum): + + basic = "Basic" + windows = "Windows" + + +class SftpAuthenticationType(str, Enum): + + basic = "Basic" + ssh_public_key = "SshPublicKey" + + +class FtpAuthenticationType(str, Enum): + + basic = "Basic" + anonymous = "Anonymous" + + +class HttpAuthenticationType(str, Enum): + + basic = "Basic" + anonymous = "Anonymous" + digest = "Digest" + windows = "Windows" + client_certificate = "ClientCertificate" + + +class RestServiceAuthenticationType(str, Enum): + + anonymous = "Anonymous" + basic = "Basic" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + +class MongoDbAuthenticationType(str, Enum): + + basic = "Basic" + anonymous = "Anonymous" + + +class ODataAuthenticationType(str, Enum): + + basic = "Basic" + anonymous = "Anonymous" + windows = "Windows" + aad_service_principal = "AadServicePrincipal" + managed_service_identity = "ManagedServiceIdentity" + + +class ODataAadServicePrincipalCredentialType(str, Enum): + + service_principal_key = "ServicePrincipalKey" + service_principal_cert = "ServicePrincipalCert" + + +class TeradataAuthenticationType(str, Enum): + + basic = "Basic" + windows = "Windows" + + +class Db2AuthenticationType(str, Enum): + + basic = "Basic" + + +class SybaseAuthenticationType(str, Enum): + + basic = "Basic" + windows = "Windows" + + +class AzureFunctionActivityMethod(str, Enum): + + get = "GET" + post = "POST" + put = "PUT" + delete = "DELETE" + options = "OPTIONS" + head = "HEAD" + trace = "TRACE" + + +class WebActivityMethod(str, Enum): + + get = "GET" + post = "POST" + put = "PUT" + delete = "DELETE" + + +class StoredProcedureParameterType(str, Enum): + + string = "String" + int_enum = "Int" + int64 = "Int64" + decimal_enum = "Decimal" + guid = "Guid" + boolean = "Boolean" + date_enum = "Date" + + +class HDInsightActivityDebugInfoOption(str, Enum): + + none = "None" + always = "Always" + failure = "Failure" + + +class PolybaseSettingsRejectType(str, Enum): + + value = "value" + percentage = "percentage" + + +class WebHookActivityMethod(str, Enum): + + post = "POST" + + +class IntegrationRuntimeType(str, Enum): + + managed = "Managed" + self_hosted = "SelfHosted" + + +class SelfHostedIntegrationRuntimeNodeStatus(str, Enum): + + need_registration = "NeedRegistration" + online = "Online" + limited = "Limited" + offline = "Offline" + upgrading = "Upgrading" + initializing = "Initializing" + initialize_failed = "InitializeFailed" + + +class IntegrationRuntimeUpdateResult(str, Enum): + + none = "None" + succeed = "Succeed" + fail = "Fail" + + +class IntegrationRuntimeInternalChannelEncryptionMode(str, Enum): + + not_set = "NotSet" + ssl_encrypted = "SslEncrypted" + not_encrypted = "NotEncrypted" + + +class ManagedIntegrationRuntimeNodeStatus(str, Enum): + + starting = "Starting" + available = "Available" + recycling = "Recycling" + unavailable = "Unavailable" + + +class IntegrationRuntimeEntityReferenceType(str, Enum): + + integration_runtime_reference = "IntegrationRuntimeReference" + linked_service_reference = "LinkedServiceReference" + + +class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): + + basic = "Basic" + standard = "Standard" + premium = "Premium" + premium_rs = "PremiumRS" + + +class IntegrationRuntimeLicenseType(str, Enum): + + base_price = "BasePrice" + license_included = "LicenseIncluded" + + +class IntegrationRuntimeEdition(str, Enum): + + standard = "Standard" + enterprise = "Enterprise" + + +class SsisObjectMetadataType(str, Enum): + + folder = "Folder" + project = "Project" + package = "Package" + environment = "Environment" + + +class IntegrationRuntimeAuthKeyName(str, Enum): + + auth_key1 = "authKey1" + auth_key2 = "authKey2" diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py new file mode 100644 index 000000000000..364dfd79d71a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously + used to run the job. Default value is 1. Type: integer (or Expression with + resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should + be selected to run first. The lower the number, the higher the priority. + Default value is 1000. Type: integer (or Expression with resultType + integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: + string (or Expression with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these + values : Semantic, Full and SingleBox. Type: string (or Expression with + resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DataLakeAnalyticsUSQLActivity, self).__init__(**kwargs) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.degree_of_parallelism = kwargs.get('degree_of_parallelism', None) + self.priority = kwargs.get('priority', None) + self.parameters = kwargs.get('parameters', None) + self.runtime_version = kwargs.get('runtime_version', None) + self.compilation_mode = kwargs.get('compilation_mode', None) + self.type = 'DataLakeAnalyticsU-SQL' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py new file mode 100644 index 000000000000..22623aa3622c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_lake_analytics_usql_activity_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DataLakeAnalyticsUSQLActivity(ExecutionActivity): + """Data Lake Analytics U-SQL activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param script_path: Required. Case-sensitive path to folder that contains + the U-SQL script. Type: string (or Expression with resultType string). + :type script_path: object + :param script_linked_service: Required. Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param degree_of_parallelism: The maximum number of nodes simultaneously + used to run the job. Default value is 1. Type: integer (or Expression with + resultType integer), minimum: 1. + :type degree_of_parallelism: object + :param priority: Determines which jobs out of all that are queued should + be selected to run first. The lower the number, the higher the priority. + Default value is 1000. Type: integer (or Expression with resultType + integer), minimum: 1. + :type priority: object + :param parameters: Parameters for U-SQL job request. + :type parameters: dict[str, object] + :param runtime_version: Runtime version of the U-SQL engine to use. Type: + string (or Expression with resultType string). + :type runtime_version: object + :param compilation_mode: Compilation mode of U-SQL. Must be one of these + values : Semantic, Full and SingleBox. Type: string (or Expression with + resultType string). + :type compilation_mode: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'script_path': {'required': True}, + 'script_linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, + 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, + 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, + } + + def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: + super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.script_path = script_path + self.script_linked_service = script_linked_service + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.parameters = parameters + self.runtime_version = runtime_version + self.compilation_mode = compilation_mode + self.type = 'DataLakeAnalyticsU-SQL' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py new file mode 100644 index 000000000000..a49bd973e2b9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksNotebookActivity, self).__init__(**kwargs) + self.notebook_path = kwargs.get('notebook_path', None) + self.base_parameters = kwargs.get('base_parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksNotebook' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py new file mode 100644 index 000000000000..7d2d464b7a1a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: Required. The absolute path of the notebook to be + run in the Databricks Workspace. This path must begin with a slash. Type: + string (or Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.libraries = libraries + self.type = 'DatabricksNotebook' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py new file mode 100644 index 000000000000..51e7245d12fe --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkJarActivity, self).__init__(**kwargs) + self.main_class_name = kwargs.get('main_class_name', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkJar' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py new file mode 100644 index 000000000000..6c33f3b51d1e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_jar_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkJar' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py new file mode 100644 index 000000000000..56178d3882c5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkPythonActivity, self).__init__(**kwargs) + self.python_file = kwargs.get('python_file', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkPython' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py new file mode 100644 index 000000000000..5b16d0d5e9ef --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_spark_python_activity_py3.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkPython' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py new file mode 100644 index 000000000000..eb7454388615 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Dataset(Model): + """The Azure Data Factory nested object which identifies data within different + data stores, such as tables, files, folders, and documents. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, RestResourceDataset, SqlServerTableDataset, + SapOpenHubTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SalesforceObjectDataset, + RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, DelimitedTextDataset, ParquetDataset, + AmazonS3Dataset + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + } + + def __init__(self, **kwargs): + super(Dataset, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.structure = kwargs.get('structure', None) + self.schema = kwargs.get('schema', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py new file mode 100644 index 000000000000..71b041c5eb5b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetBZip2Compression, self).__init__(**kwargs) + self.type = 'BZip2' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py new file mode 100644 index 000000000000..f97af4588e0a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_bzip2_compression_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetBZip2Compression(DatasetCompression): + """The BZip2 compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'BZip2' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py new file mode 100644 index 000000000000..c0c4e3d52624 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetCompression(Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + DatasetGZipCompression, DatasetBZip2Compression + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + } + + def __init__(self, **kwargs): + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py new file mode 100644 index 000000000000..3b10abc69abf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_compression_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetCompression(Model): + """The compression method used on a dataset. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + DatasetGZipCompression, DatasetBZip2Compression + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetCompression, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py new file mode 100644 index 000000000000..9c97e2bfa5e3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The Deflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'Deflate' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py new file mode 100644 index 000000000000..11d00081bc1c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_deflate_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetDeflateCompression(DatasetCompression): + """The Deflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The Deflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'Deflate' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py new file mode 100644 index 000000000000..882c84a1e84c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetFolder(Model): + """The folder that this Dataset is in. If not specified, Dataset will appear + at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py new file mode 100644 index 000000000000..ea7fc313f967 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_folder_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetFolder(Model): + """The folder that this Dataset is in. If not specified, Dataset will appear + at the root level. + + :param name: The name of the folder that this Dataset is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(DatasetFolder, self).__init__(**kwargs) + self.name = name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py new file mode 100644 index 000000000000..4925127c7f0f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The GZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetGZipCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'GZip' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py new file mode 100644 index 000000000000..97346e06366d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_gzip_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetGZipCompression(DatasetCompression): + """The GZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The GZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'GZip' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py new file mode 100644 index 000000000000..2c318a91cccb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py new file mode 100644 index 000000000000..d4e32d753197 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_location_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetLocation(Model): + """Dataset location. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.folder_path = folder_path + self.file_name = file_name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py new file mode 100644 index 000000000000..c7831425143f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Dataset(Model): + """The Azure Data Factory nested object which identifies data within different + data stores, such as tables, files, folders, and documents. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, + OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, + ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, + VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, + XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, + ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, + PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, + MarketoObjectDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, RestResourceDataset, SqlServerTableDataset, + SapOpenHubTableDataset, SapEccResourceDataset, + SapCloudForCustomerResourceDataset, SalesforceObjectDataset, + RelationalTableDataset, AzureMySqlTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, DelimitedTextDataset, ParquetDataset, + AmazonS3Dataset + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(Dataset, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.structure = structure + self.schema = schema + self.linked_service_name = linked_service_name + self.parameters = parameters + self.annotations = annotations + self.folder = folder + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py new file mode 100644 index 000000000000..ca3d385f31ce --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetReference(Model): + """Dataset reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . + :vartype type: str + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, **kwargs): + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py new file mode 100644 index 000000000000..80162fd77da1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetReference(Model): + """Dataset reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Dataset reference type. Default value: + "DatasetReference" . + :vartype type: str + :param reference_name: Required. Reference dataset name. + :type reference_name: str + :param parameters: Arguments for dataset. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py new file mode 100644 index 000000000000..a68fb563e425 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, **kwargs): + super(DatasetResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py new file mode 100644 index 000000000000..9cedba8bbce9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class DatasetResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`DatasetResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DatasetResource]'} + } + + def __init__(self, *args, **kwargs): + + super(DatasetResourcePaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py new file mode 100644 index 000000000000..6eb099dcb884 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DatasetResource, self).__init__(**kwargs) + self.properties = properties diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py new file mode 100644 index 000000000000..b3160565230d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetStorageFormat(Model): + """The format definition of a storage. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, + TextFormat + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} + } + + def __init__(self, **kwargs): + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.serializer = kwargs.get('serializer', None) + self.deserializer = kwargs.get('deserializer', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py new file mode 100644 index 000000000000..faf746642d9e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_storage_format_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DatasetStorageFormat(Model): + """The format definition of a storage. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, + TextFormat + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(DatasetStorageFormat, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.serializer = serializer + self.deserializer = deserializer + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py new file mode 100644 index 000000000000..ed80bf3cbcf2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression import DatasetCompression + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetZipDeflateCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'ZipDeflate' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py new file mode 100644 index 000000000000..20abd6fe1088 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_zip_deflate_compression_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_compression_py3 import DatasetCompression + + +class DatasetZipDeflateCompression(DatasetCompression): + """The ZipDeflate compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The ZipDeflate compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'ZipDeflate' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py new file mode 100644 index 000000000000..d163d2b93c18 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Db2LinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Db2' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py new file mode 100644 index 000000000000..44d784fa9bde --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/db2_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Db2LinkedService(LinkedService): + """Linked service for DB2 data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.Db2AuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Db2' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py new file mode 100644 index 000000000000..34ba33a414d5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(DeleteActivity, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.enable_logging = kwargs.get('enable_logging', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Delete' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py new file mode 100644 index 000000000000..5107d9a3381a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class DeleteActivity(ExecutionActivity): + """Delete activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). + :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings + :param dataset: Required. Delete activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: + super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings + self.dataset = dataset + self.type = 'Delete' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py new file mode 100644 index 000000000000..bfee26fcd12c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.compression_level = kwargs.get('compression_level', None) + self.quote_char = kwargs.get('quote_char', None) + self.escape_char = kwargs.get('escape_char', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.null_value = kwargs.get('null_value', None) + self.type = 'DelimitedText' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py new file mode 100644 index 000000000000..c2597e6a022b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_dataset_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DelimitedTextDataset(Dataset): + """Delimited text dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the delimited text storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is UTF-8, unless BOM denotes another Unicode + encoding. Refer to the name column of the table in the following link to + set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param compression_codec: + :type compression_codec: object + :param compression_level: The data compression method used for + DelimitedText. + :type compression_level: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, + 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, + 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: + super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.encoding_name = encoding_name + self.compression_codec = compression_codec + self.compression_level = compression_level + self.quote_char = quote_char + self.escape_char = escape_char + self.first_row_as_header = first_row_as_header + self.null_value = null_value + self.type = 'DelimitedText' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py new file mode 100644 index 000000000000..004eb595a05e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextReadSetting, self).__init__(**kwargs) + self.skip_line_count = kwargs.get('skip_line_count', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py new file mode 100644 index 000000000000..87915fcb3db7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_read_setting_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_read_setting_py3 import FormatReadSetting + + +class DelimitedTextReadSetting(FormatReadSetting): + """Delimited text read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param skip_line_count: Indicates the number of non-empty rows to skip + when reading data from input files. Type: integer (or Expression with + resultType integer). + :type skip_line_count: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, skip_line_count=None, **kwargs) -> None: + super(DelimitedTextReadSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.skip_line_count = skip_line_count diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py new file mode 100644 index 000000000000..ae93f209c8b3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py new file mode 100644 index 000000000000..a1ba953a2662 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_sink_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DelimitedTextSink(CopySink): + """A copy activity DelimitedText sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSetting'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py new file mode 100644 index 000000000000..9f2067d24b9c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.type = 'DelimitedTextSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py new file mode 100644 index 000000000000..b158f97bde81 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_source_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DelimitedTextSource(CopySource): + """A copy activity DelimitedText source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: DelimitedText store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + :param format_settings: DelimitedText format settings. + :type format_settings: + ~azure.mgmt.datafactory.models.DelimitedTextReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.format_settings = format_settings + self.type = 'DelimitedTextSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py new file mode 100644 index 000000000000..21fe168f1316 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DelimitedTextWriteSetting, self).__init__(**kwargs) + self.quote_all_text = kwargs.get('quote_all_text', None) + self.file_extension = kwargs.get('file_extension', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py new file mode 100644 index 000000000000..ac0e3b2d00cc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delimited_text_write_setting_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .format_write_setting_py3 import FormatWriteSetting + + +class DelimitedTextWriteSetting(FormatWriteSetting): + """Delimited text write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param quote_all_text: Indicates whether string values should always be + enclosed with quotes. Type: boolean (or Expression with resultType + boolean). + :type quote_all_text: object + :param file_extension: Required. The file extension used to create the + files. Type: string (or Expression with resultType string). + :type file_extension: object + """ + + _validation = { + 'type': {'required': True}, + 'file_extension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, + 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + } + + def __init__(self, *, type: str, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + super(DelimitedTextWriteSetting, self).__init__(additional_properties=additional_properties, type=type, **kwargs) + self.quote_all_text = quote_all_text + self.file_extension = file_extension diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py new file mode 100644 index 000000000000..89e750df8f0d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DependencyReference(Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, + TriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__(self, **kwargs): + super(DependencyReference, self).__init__(**kwargs) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py new file mode 100644 index 000000000000..1b0647b74991 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dependency_reference_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DependencyReference(Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, + TriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__(self, **kwargs) -> None: + super(DependencyReference, self).__init__(**kwargs) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py new file mode 100644 index 000000000000..a8065ec3cc06 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DistcpSettings(Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or + Expression with resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = kwargs.get('resource_manager_endpoint', None) + self.temp_script_path = kwargs.get('temp_script_path', None) + self.distcp_options = kwargs.get('distcp_options', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py new file mode 100644 index 000000000000..628e2d207f8e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/distcp_settings_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DistcpSettings(Model): + """Distcp settings. + + All required parameters must be populated in order to send to Azure. + + :param resource_manager_endpoint: Required. Specifies the Yarn + ResourceManager endpoint. Type: string (or Expression with resultType + string). + :type resource_manager_endpoint: object + :param temp_script_path: Required. Specifies an existing folder path which + will be used to store temp Distcp command script. The script file is + generated by ADF and will be removed after Copy job finished. Type: string + (or Expression with resultType string). + :type temp_script_path: object + :param distcp_options: Specifies the Distcp options. Type: string (or + Expression with resultType string). + :type distcp_options: object + """ + + _validation = { + 'resource_manager_endpoint': {'required': True}, + 'temp_script_path': {'required': True}, + } + + _attribute_map = { + 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, + 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, + 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, + } + + def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: + super(DistcpSettings, self).__init__(**kwargs) + self.resource_manager_endpoint = resource_manager_endpoint + self.temp_script_path = temp_script_path + self.distcp_options = distcp_options diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py new file mode 100644 index 000000000000..fb2b8d46fa9c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) + self.type = 'DocumentDbCollection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py new file mode 100644 index 000000000000..5eb4dbbf0997 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DocumentDbCollectionDataset(Dataset): + """Microsoft Azure Document Database Collection dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. Document Database collection name. Type: + string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'DocumentDbCollection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py new file mode 100644 index 000000000000..c2908dc1dd05 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param nesting_separator: Nested properties separator. Default is . (dot). + Type: string (or Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionSink, self).__init__(**kwargs) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'DocumentDbCollectionSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py new file mode 100644 index 000000000000..f1410cd211a4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_sink_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DocumentDbCollectionSink(CopySink): + """A copy activity Document Database Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param nesting_separator: Nested properties separator. Default is . (dot). + Type: string (or Expression with resultType string). + :type nesting_separator: object + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Allowed values: insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.nesting_separator = nesting_separator + self.write_behavior = write_behavior + self.type = 'DocumentDbCollectionSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py new file mode 100644 index 000000000000..9fdd23f2795f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Documents query. Type: string (or Expression with resultType + string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or + Expression with resultType string). + :type nesting_separator: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DocumentDbCollectionSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.type = 'DocumentDbCollectionSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py new file mode 100644 index 000000000000..9e0bf6382b04 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/document_db_collection_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DocumentDbCollectionSource(CopySource): + """A copy activity Document Database Collection source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Documents query. Type: string (or Expression with resultType + string). + :type query: object + :param nesting_separator: Nested properties separator. Type: string (or + Expression with resultType string). + :type nesting_separator: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.nesting_separator = nesting_separator + self.type = 'DocumentDbCollectionSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py new file mode 100644 index 000000000000..c5428ace02a2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Drill' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py new file mode 100644 index 000000000000..5fb0cb25ecdb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DrillLinkedService(LinkedService): + """Drill server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Drill' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py new file mode 100644 index 000000000000..9a3391f27786 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DrillSource(CopySource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DrillSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py new file mode 100644 index 000000000000..313183abab83 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DrillSource(CopySource): + """A copy activity Drill server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DrillSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py new file mode 100644 index 000000000000..c12b086b7824 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DrillTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'DrillTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py new file mode 100644 index 000000000000..f4f5712f29e3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/drill_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DrillTableDataset(Dataset): + """Drill server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'DrillTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py new file mode 100644 index 000000000000..5ff0b150718b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsAX' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py new file mode 100644 index 000000000000..79d3a34ba313 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsAXLinkedService(LinkedService): + """Dynamics AX linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The Dynamics AX (or Dynamics 365 Finance and + Operations) instance OData endpoint. + :type url: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + Mark this field as a SecureString to store it securely in Data Factory, or + reference a secret stored in Azure Key Vault. Type: string (or Expression + with resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. Specify the tenant information (domain name or + tenant ID) under which your application resides. Retrieve it by hovering + the mouse in the top-right corner of the Azure portal. Type: string (or + Expression with resultType string). + :type tenant: object + :param aad_resource_id: Required. Specify the resource you are requesting + authorization. Type: string (or Expression with resultType string). + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'tenant': {'required': True}, + 'aad_resource_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsAX' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py new file mode 100644 index 000000000000..392b8ac7b971 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'DynamicsAXResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py new file mode 100644 index 000000000000..6cade3e4aa59 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsAXResourceDataset(Dataset): + """The path of the Dynamics AX OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the Dynamics AX OData entity. Type: + string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'DynamicsAXResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py new file mode 100644 index 000000000000..619bad0f75c9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsAXSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsAXSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py new file mode 100644 index 000000000000..7679e68bae7b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_ax_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsAXSource(CopySource): + """A copy activity Dynamics AX source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsAXSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py new file mode 100644 index 000000000000..435c6d153066 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsEntity' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py new file mode 100644 index 000000000000..7ee671890354 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsEntityDataset(Dataset): + """The Dynamics entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsEntity' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py new file mode 100644 index 000000000000..c925033d1240 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). + :type deployment_type: object + :param host_name: The host name of the on-premises Dynamics server. The + property is required for on-prem and not allowed for online. Type: string + (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics server. The property + is required for on-line and not allowed for on-prem. Type: string (or + Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics instance. + The property is required for on-prem and required for online when there + are more than one Dynamics instances associated with the user. Type: + string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). + :type authentication_type: object + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Dynamics' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py new file mode 100644 index 000000000000..07c028ff2477 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_linked_service_py3.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsLinkedService(LinkedService): + """Dynamics linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics + instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for + Dynamics on-premises with Ifd. Type: string (or Expression with resultType + string). + :type deployment_type: object + :param host_name: The host name of the on-premises Dynamics server. The + property is required for on-prem and not allowed for online. Type: string + (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics server. The property + is required for on-line and not allowed for on-prem. Type: string (or + Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics instance. + The property is required for on-prem and required for online when there + are more than one Dynamics instances associated with the user. Type: + string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises + with Ifd scenario. Type: string (or Expression with resultType string). + :type authentication_type: object + :param username: Required. User name to access the Dynamics instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Dynamics' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py new file mode 100644 index 000000000000..5afce6ced25b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + :vartype write_behavior: object + :param ignore_null_values: The flag indicating whether ignore null values + from input dataset (except key fields) during write operation. Default is + false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = None + + def __init__(self, **kwargs): + super(DynamicsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py new file mode 100644 index 000000000000..ffdb08363bfd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_sink_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsSink(CopySink): + """A copy activity Dynamics sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + :vartype write_behavior: object + :param ignore_null_values: The flag indicating whether ignore null values + from input dataset (except key fields) during write operation. Default is + false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = None + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py new file mode 100644 index 000000000000..d38f96fee911 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py new file mode 100644 index 000000000000..12d83625bc6a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsSource(CopySource): + """A copy activity Dynamics source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py new file mode 100644 index 000000000000..6249c2e2334b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + eloqua.example.com) + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Eloqua' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py new file mode 100644 index 000000000000..623d798036a3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class EloquaLinkedService(LinkedService): + """Eloqua server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Eloqua server. (i.e. + eloqua.example.com) + :type endpoint: object + :param username: Required. The site name and user name of your Eloqua + account in the form: sitename/username. (i.e. Eloqua/Alice) + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Eloqua' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py new file mode 100644 index 000000000000..56adc0ce47c4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'EloquaObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py new file mode 100644 index 000000000000..705f43cd225c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class EloquaObjectDataset(Dataset): + """Eloqua server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'EloquaObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py new file mode 100644 index 000000000000..f016140189f1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class EloquaSource(CopySource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(EloquaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'EloquaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py new file mode 100644 index 000000000000..d200ff32fd9d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/eloqua_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class EloquaSource(CopySource): + """A copy activity Eloqua server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'EloquaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py new file mode 100644 index 000000000000..5db1448a5a55 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py new file mode 100644 index 000000000000..f87698b67a64 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/entity_reference_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class EntityReference(Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + 'IntegrationRuntimeReference', 'LinkedServiceReference' + :type type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py new file mode 100644 index 000000000000..0008b5eee153 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ExecutePipelineActivity(ControlActivity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait + for the dependent pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(ExecutePipelineActivity, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.parameters = kwargs.get('parameters', None) + self.wait_on_completion = kwargs.get('wait_on_completion', None) + self.type = 'ExecutePipeline' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py new file mode 100644 index 000000000000..addaafabe7b0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_pipeline_activity_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ExecutePipelineActivity(ControlActivity): + """Execute pipeline activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline reference. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + :param wait_on_completion: Defines whether activity execution will wait + for the dependent pipeline execution to finish. Default is false. + :type wait_on_completion: bool + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, + 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, + } + + def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: + super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.pipeline = pipeline + self.parameters = parameters + self.wait_on_completion = wait_on_completion + self.type = 'ExecutePipeline' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py new file mode 100644 index 000000000000..3ea2abd2e734 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value + should be "x86" or "x64". Type: string (or Expression with resultType + string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: + string (or Expression with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. + Type: string (or Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: + ~azure.mgmt.datafactory.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the + SSIS package. + :type project_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the + SSIS package. + :type package_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers + to execute the SSIS package. + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param package_connection_managers: The package level connection managers + to execute the SSIS package. + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param property_overrides: The property overrides to execute the SSIS + package. + :type property_overrides: dict[str, + ~azure.mgmt.datafactory.models.SSISPropertyOverride] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + } + + def __init__(self, **kwargs): + super(ExecuteSSISPackageActivity, self).__init__(**kwargs) + self.package_location = kwargs.get('package_location', None) + self.runtime = kwargs.get('runtime', None) + self.logging_level = kwargs.get('logging_level', None) + self.environment_path = kwargs.get('environment_path', None) + self.execution_credential = kwargs.get('execution_credential', None) + self.connect_via = kwargs.get('connect_via', None) + self.project_parameters = kwargs.get('project_parameters', None) + self.package_parameters = kwargs.get('package_parameters', None) + self.project_connection_managers = kwargs.get('project_connection_managers', None) + self.package_connection_managers = kwargs.get('package_connection_managers', None) + self.property_overrides = kwargs.get('property_overrides', None) + self.type = 'ExecuteSSISPackage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py new file mode 100644 index 000000000000..fb72bacf03d9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class ExecuteSSISPackageActivity(ExecutionActivity): + """Execute SSIS package activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param package_location: Required. SSIS package location. + :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation + :param runtime: Specifies the runtime to execute SSIS package. The value + should be "x86" or "x64". Type: string (or Expression with resultType + string). + :type runtime: object + :param logging_level: The logging level of SSIS package execution. Type: + string (or Expression with resultType string). + :type logging_level: object + :param environment_path: The environment path to execute the SSIS package. + Type: string (or Expression with resultType string). + :type environment_path: object + :param execution_credential: The package execution credential. + :type execution_credential: + ~azure.mgmt.datafactory.models.SSISExecutionCredential + :param connect_via: Required. The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param project_parameters: The project level parameters to execute the + SSIS package. + :type project_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param package_parameters: The package level parameters to execute the + SSIS package. + :type package_parameters: dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter] + :param project_connection_managers: The project level connection managers + to execute the SSIS package. + :type project_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param package_connection_managers: The package level connection managers + to execute the SSIS package. + :type package_connection_managers: dict[str, dict[str, + ~azure.mgmt.datafactory.models.SSISExecutionParameter]] + :param property_overrides: The property overrides to execute the SSIS + package. + :type property_overrides: dict[str, + ~azure.mgmt.datafactory.models.SSISPropertyOverride] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'package_location': {'required': True}, + 'connect_via': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, + 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, + 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, + 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, + 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, + 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, + 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, + 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, + 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, + 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + } + + def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, **kwargs) -> None: + super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.package_location = package_location + self.runtime = runtime + self.logging_level = logging_level + self.environment_path = environment_path + self.execution_credential = execution_credential + self.connect_via = connect_via + self.project_parameters = project_parameters + self.package_parameters = package_parameters + self.project_connection_managers = project_connection_managers + self.package_connection_managers = package_connection_managers + self.property_overrides = property_overrides + self.type = 'ExecuteSSISPackage' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py new file mode 100644 index 000000000000..aca89a009b8e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity import Activity + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, + DatabricksSparkJarActivity, DatabricksNotebookActivity, + DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, + AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, + LookupActivity, DeleteActivity, SqlServerStoredProcedureActivity, + CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, HDInsightMapReduceActivity, + HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + } + + def __init__(self, **kwargs): + super(ExecutionActivity, self).__init__(**kwargs) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) + self.type = 'Execution' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py new file mode 100644 index 000000000000..7f3b452fc3f9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .activity_py3 import Activity + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, + DatabricksSparkJarActivity, DatabricksNotebookActivity, + DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, + AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, + LookupActivity, DeleteActivity, SqlServerStoredProcedureActivity, + CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, + HDInsightStreamingActivity, HDInsightMapReduceActivity, + HDInsightPigActivity, HDInsightHiveActivity, CopyActivity + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.linked_service_name = linked_service_name + self.policy = policy + self.type = 'Execution' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py new file mode 100644 index 000000000000..a6a2cc280b4d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlRequest(Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = kwargs.get('feature_name', None) + self.feature_type = kwargs.get('feature_type', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py new file mode 100644 index 000000000000..b3f4099fb972 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_request_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlRequest(Model): + """The exposure control request. + + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + """ + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'feature_type': {'key': 'featureType', 'type': 'str'}, + } + + def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: + super(ExposureControlRequest, self).__init__(**kwargs) + self.feature_name = feature_name + self.feature_type = feature_type diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py new file mode 100644 index 000000000000..868647e3c5b3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlResponse(Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py new file mode 100644 index 000000000000..1ac7138e7984 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/exposure_control_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ExposureControlResponse(Model): + """The exposure control response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar feature_name: The feature name. + :vartype feature_name: str + :ivar value: The feature value. + :vartype value: str + """ + + _validation = { + 'feature_name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'feature_name': {'key': 'featureName', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ExposureControlResponse, self).__init__(**kwargs) + self.feature_name = None + self.value = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py new file mode 100644 index 000000000000..4b16ceca2794 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Expression(Model): + """Azure Data Factory expression definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . + :vartype type: str + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + type = "Expression" + + def __init__(self, **kwargs): + super(Expression, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py new file mode 100644 index 000000000000..c6ad023a57ed --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/expression_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Expression(Model): + """Azure Data Factory expression definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Expression type. Default value: "Expression" . + :vartype type: str + :param value: Required. Expression value. + :type value: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + type = "Expression" + + def __init__(self, *, value: str, **kwargs) -> None: + super(Expression, self).__init__(**kwargs) + self.value = value diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py new file mode 100644 index 000000000000..614b3d7fc97a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, **kwargs): + super(Factory, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.identity = kwargs.get('identity', None) + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py new file mode 100644 index 000000000000..02cec39d8313 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration import FactoryRepoConfiguration + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param host_name: GitHub Enterprise host name. For example: + https://github.mydomain.com + :type host_name: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FactoryGitHubConfiguration, self).__init__(**kwargs) + self.host_name = kwargs.get('host_name', None) + self.type = 'FactoryGitHubConfiguration' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py new file mode 100644 index 000000000000..23c5dbf21f0c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_git_hub_configuration_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration_py3 import FactoryRepoConfiguration + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param host_name: GitHub Enterprise host name. For example: + https://github.mydomain.com + :type host_name: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: + super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.host_name = host_name + self.type = 'FactoryGitHubConfiguration' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py new file mode 100644 index 000000000000..dad745424af3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryIdentity(Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__(self, **kwargs): + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py new file mode 100644 index 000000000000..567100d8c19e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_identity_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryIdentity(Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type + is 'SystemAssigned'. Default value: "SystemAssigned" . + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__(self, **kwargs) -> None: + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py new file mode 100644 index 000000000000..589b44defc56 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class FactoryPaged(Paged): + """ + A paging container for iterating over a list of :class:`Factory ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Factory]'} + } + + def __init__(self, *args, **kwargs): + + super(FactoryPaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py new file mode 100644 index 000000000000..0682aa5f8852 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource_py3 import Resource + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, **kwargs) -> None: + super(Factory, self).__init__(location=location, tags=tags, **kwargs) + self.additional_properties = additional_properties + self.identity = identity + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = repo_configuration diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py new file mode 100644 index 000000000000..7c20db016c71 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoConfiguration(Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} + } + + def __init__(self, **kwargs): + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.repository_name = kwargs.get('repository_name', None) + self.collaboration_branch = kwargs.get('collaboration_branch', None) + self.root_folder = kwargs.get('root_folder', None) + self.last_commit_id = kwargs.get('last_commit_id', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py new file mode 100644 index 000000000000..eefed7978850 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_configuration_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoConfiguration(Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.account_name = account_name + self.repository_name = repository_name + self.collaboration_branch = collaboration_branch + self.root_folder = root_folder + self.last_commit_id = last_commit_id + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py new file mode 100644 index 000000000000..44eac9d287ce --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoUpdate(Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, **kwargs): + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = kwargs.get('factory_resource_id', None) + self.repo_configuration = kwargs.get('repo_configuration', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py new file mode 100644 index 000000000000..68aca7a48db8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_repo_update_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryRepoUpdate(Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = factory_resource_id + self.repo_configuration = repo_configuration diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py new file mode 100644 index 000000000000..e9977fceff86 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryUpdateParameters(Model): + """Parameters for updating a factory resource. + + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__(self, **kwargs): + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py new file mode 100644 index 000000000000..5bd523fedf3d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_update_parameters_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FactoryUpdateParameters(Model): + """Parameters for updating a factory resource. + + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__(self, *, tags=None, identity=None, **kwargs) -> None: + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = tags + self.identity = identity diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py new file mode 100644 index 000000000000..6d07c68d23e3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration import FactoryRepoConfiguration + + +class FactoryVSTSConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FactoryVSTSConfiguration, self).__init__(**kwargs) + self.project_name = kwargs.get('project_name', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.type = 'FactoryVSTSConfiguration' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py new file mode 100644 index 000000000000..4f13c0959d63 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/factory_vsts_configuration_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .factory_repo_configuration_py3 import FactoryRepoConfiguration + + +class FactoryVSTSConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param type: Required. Constant filled by server. + :type type: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'type': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: + super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.project_name = project_name + self.tenant_id = tenant_id + self.type = 'FactoryVSTSConfiguration' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py new file mode 100644 index 000000000000..ffced5c2e689 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'FileServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py new file mode 100644 index 000000000000..ec6fe58bb3a3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_linked_service_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class FileServerLinkedService(LinkedService): + """File system linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'FileServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py new file mode 100644 index 000000000000..edce5fe68a65 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerLocation, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py new file mode 100644 index 000000000000..f7fb8354bcbc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FileServerLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FileServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py new file mode 100644 index 000000000000..6ba2a5f56b79 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py new file mode 100644 index 000000000000..4393692d63f3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_read_setting_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FileServerReadSetting(ConnectorReadSetting): + """File server read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string + (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: FileServer wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(FileServerReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py new file mode 100644 index 000000000000..9342210abdfb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileServerWriteSetting, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py new file mode 100644 index 000000000000..1ed4bf220417 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_server_write_setting_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_write_setting_py3 import ConnectorWriteSetting + + +class FileServerWriteSetting(ConnectorWriteSetting): + """File server write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py new file mode 100644 index 000000000000..6874f4c08929 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the on-premises file system. Type: string + (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string + (or Expression with resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(FileShareDataset, self).__init__(**kwargs) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.file_filter = kwargs.get('file_filter', None) + self.compression = kwargs.get('compression', None) + self.type = 'FileShare' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py new file mode 100644 index 000000000000..19e88a264e12 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_share_dataset_py3.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param folder_path: The path of the on-premises file system. Type: string + (or Expression with resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string + (or Expression with resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of + files in the folderPath rather than all files. Type: string (or Expression + with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: + super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.folder_path = folder_path + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.file_filter = file_filter + self.compression = compression + self.type = 'FileShare' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py new file mode 100644 index 000000000000..8b8f238c9534 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileSystemSink, self).__init__(**kwargs) + self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = 'FileSystemSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py new file mode 100644 index 000000000000..24f8623cbb02 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class FileSystemSink(CopySink): + """A copy activity file system sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.copy_behavior = copy_behavior + self.type = 'FileSystemSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py new file mode 100644 index 000000000000..2986b1848153 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FileSystemSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.type = 'FileSystemSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py new file mode 100644 index 000000000000..0598490ca51c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/file_system_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class FileSystemSource(CopySource): + """A copy activity file system source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.type = 'FileSystemSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py new file mode 100644 index 000000000000..1346bb234695 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class FilterActivity(ControlActivity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.mgmt.datafactory.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.mgmt.datafactory.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__(self, **kwargs): + super(FilterActivity, self).__init__(**kwargs) + self.items = kwargs.get('items', None) + self.condition = kwargs.get('condition', None) + self.type = 'Filter' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py new file mode 100644 index 000000000000..a07cf01d1dd5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/filter_activity_py3.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class FilterActivity(ControlActivity): + """Filter and return results from input array based on the conditions. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param items: Required. Input array on which filter should be applied. + :type items: ~azure.mgmt.datafactory.models.Expression + :param condition: Required. Condition to be used for filtering the input. + :type condition: ~azure.mgmt.datafactory.models.Expression + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'items': {'required': True}, + 'condition': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, + } + + def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.items = items + self.condition = condition + self.type = 'Filter' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py new file mode 100644 index 000000000000..5edfa2a8140e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ForEachActivity(ControlActivity): + """This activity is used for iterating over a collection and execute given + activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param is_sequential: Should the loop be executed in sequence or in + parallel (max 50) + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of + parallel execution (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.mgmt.datafactory.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(ForEachActivity, self).__init__(**kwargs) + self.is_sequential = kwargs.get('is_sequential', None) + self.batch_count = kwargs.get('batch_count', None) + self.items = kwargs.get('items', None) + self.activities = kwargs.get('activities', None) + self.type = 'ForEach' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py new file mode 100644 index 000000000000..7c5c887bb1d9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/for_each_activity_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ForEachActivity(ControlActivity): + """This activity is used for iterating over a collection and execute given + activities. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param is_sequential: Should the loop be executed in sequence or in + parallel (max 50) + :type is_sequential: bool + :param batch_count: Batch count to be used for controlling the number of + parallel execution (when isSequential is set to false). + :type batch_count: int + :param items: Required. Collection to iterate. + :type items: ~azure.mgmt.datafactory.models.Expression + :param activities: Required. List of activities to execute . + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'batch_count': {'maximum': 50}, + 'items': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, + 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, + 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: + super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.is_sequential = is_sequential + self.batch_count = batch_count + self.items = items + self.activities = activities + self.type = 'ForEach' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py new file mode 100644 index 000000000000..730cec9f525f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatReadSetting(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatReadSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py new file mode 100644 index 000000000000..ed68bf35f009 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_read_setting_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatReadSetting(Model): + """Format read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatReadSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py new file mode 100644 index 000000000000..0fd6966859d5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = kwargs.get('type', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py new file mode 100644 index 000000000000..3e5609066208 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/format_write_setting_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class FormatWriteSetting(Model): + """Format write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The write setting type. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, type: str, additional_properties=None, **kwargs) -> None: + super(FormatWriteSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py new file mode 100644 index 000000000000..137a56948deb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(FtpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.use_binary_transfer = kwargs.get('use_binary_transfer', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py new file mode 100644 index 000000000000..5294301e4fd8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class FtpReadSetting(ConnectorReadSetting): + """Ftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Ftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param use_binary_transfer: Specify whether to use binary transfer mode + for FTP stores. + :type use_binary_transfer: bool + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + super(FtpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.use_binary_transfer = use_binary_transfer diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py new file mode 100644 index 000000000000..e649ca56e37c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for + client connections. Default value is 21. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS + channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP + server SSL certificate when connect over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.type = 'FtpServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py new file mode 100644 index 000000000000..b38ad1c03f46 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class FtpServerLinkedService(LinkedService): + """A FTP server Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the FTP server. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the FTP server uses to listen for + client connections. Default value is 21. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.FtpAuthenticationType + :param user_name: Username to logon the FTP server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to logon the FTP server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_ssl: If true, connect to the FTP server over SSL/TLS + channel. Default value is true. Type: boolean (or Expression with + resultType boolean). + :type enable_ssl: object + :param enable_server_certificate_validation: If true, validate the FTP + server SSL certificate when connect over SSL/TLS channel. Default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.enable_ssl = enable_ssl + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'FtpServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py new file mode 100644 index 000000000000..5d5e933036df --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py new file mode 100644 index 000000000000..ac296bcfca31 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ftp_server_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class FtpServerLocation(DatasetLocation): + """The location of ftp server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py new file mode 100644 index 000000000000..7941189f2dcd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__(self, **kwargs): + super(GetMetadataActivity, self).__init__(**kwargs) + self.dataset = kwargs.get('dataset', None) + self.field_list = kwargs.get('field_list', None) + self.type = 'GetMetadata' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py new file mode 100644 index 000000000000..b4d8eb17cab1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_metadata_activity_py3.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class GetMetadataActivity(ExecutionActivity): + """Activity to get metadata of dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param dataset: Required. GetMetadata activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param field_list: Fields of metadata to get from dataset. + :type field_list: list[object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, **kwargs) -> None: + super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.dataset = dataset + self.field_list = field_list + self.type = 'GetMetadata' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py new file mode 100644 index 000000000000..1be4a2afece0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GetSsisObjectMetadataRequest(Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = kwargs.get('metadata_path', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py new file mode 100644 index 000000000000..310cd9783d81 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/get_ssis_object_metadata_request_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GetSsisObjectMetadataRequest(Model): + """The request payload of get SSIS object metadata. + + :param metadata_path: Metadata path. + :type metadata_path: str + """ + + _attribute_map = { + 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, + } + + def __init__(self, *, metadata_path: str=None, **kwargs) -> None: + super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) + self.metadata_path = metadata_path diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py new file mode 100644 index 000000000000..cadecdf70f44 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenRequest(Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = kwargs.get('git_hub_access_code', None) + self.git_hub_client_id = kwargs.get('git_hub_client_id', None) + self.git_hub_access_token_base_url = kwargs.get('git_hub_access_token_base_url', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py new file mode 100644 index 000000000000..7961e1bc33ed --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_request_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenRequest(Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base + URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = git_hub_access_code + self.git_hub_client_id = git_hub_client_id + self.git_hub_access_token_base_url = git_hub_access_token_base_url diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py new file mode 100644 index 000000000000..4a4afce8f0f0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenResponse(Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = kwargs.get('git_hub_access_token', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py new file mode 100644 index 000000000000..4f28ade6e914 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/git_hub_access_token_response_py3.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class GitHubAccessTokenResponse(Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = git_hub_access_token diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py new file mode 100644 index 000000000000..c460dd95c380 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsLinkedService, self).__init__(**kwargs) + self.client_customer_id = kwargs.get('client_customer_id', None) + self.developer_token = kwargs.get('developer_token', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleAdWords' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py new file mode 100644 index 000000000000..dfb3bc07e69f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleAdWordsLinkedService(LinkedService): + """Google AdWords service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_customer_id: Required. The Client customer ID of the AdWords + account that you want to fetch report data for. + :type client_customer_id: object + :param developer_token: Required. The developer token associated with the + manager account that you use to grant access to the AdWords API. + :type developer_token: ~azure.mgmt.datafactory.models.SecretBase + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to AdWords for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_customer_id': {'required': True}, + 'developer_token': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, + 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_customer_id = client_customer_id + self.developer_token = developer_token + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleAdWords' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py new file mode 100644 index 000000000000..92b901b774ed --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GoogleAdWordsObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py new file mode 100644 index 000000000000..e1272f978b8e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleAdWordsObjectDataset(Dataset): + """Google AdWords service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleAdWordsObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py new file mode 100644 index 000000000000..8699057abe09 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleAdWordsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleAdWordsSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py new file mode 100644 index 000000000000..995d5324670b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_ad_words_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleAdWordsSource(CopySource): + """A copy activity Google AdWords service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleAdWordsSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py new file mode 100644 index 000000000000..45a535b95d43 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery + projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google + Drive. Allowing Google Drive access enables support for federated tables + that combine BigQuery data with data from Google Drive. The default value + is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to BigQuery for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQueryLinkedService, self).__init__(**kwargs) + self.project = kwargs.get('project', None) + self.additional_projects = kwargs.get('additional_projects', None) + self.request_google_drive_scope = kwargs.get('request_google_drive_scope', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.email = kwargs.get('email', None) + self.key_file_path = kwargs.get('key_file_path', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleBigQuery' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py new file mode 100644 index 000000000000..146674a85531 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service_py3.py @@ -0,0 +1,124 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GoogleBigQueryLinkedService(LinkedService): + """Google BigQuery service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param project: Required. The default BigQuery project to query against. + :type project: object + :param additional_projects: A comma-separated list of public BigQuery + projects to access. + :type additional_projects: object + :param request_google_drive_scope: Whether to request access to Google + Drive. Allowing Google Drive access enables support for federated tables + that combine BigQuery data with data from Google Drive. The default value + is false. + :type request_google_drive_scope: object + :param authentication_type: Required. The OAuth 2.0 authentication + mechanism used for authentication. ServiceAuthentication can only be used + on self-hosted IR. Possible values include: 'ServiceAuthentication', + 'UserAuthentication' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType + :param refresh_token: The refresh token obtained from Google for + authorizing access to BigQuery for UserAuthentication. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param email: The service account email ID that is used for + ServiceAuthentication and can only be used on self-hosted IR. + :type email: object + :param key_file_path: The full path to the .p12 key file that is used to + authenticate the service account email address and can only be used on + self-hosted IR. + :type key_file_path: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'project': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'project': {'key': 'typeProperties.project', 'type': 'object'}, + 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, + 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'email': {'key': 'typeProperties.email', 'type': 'object'}, + 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.project = project + self.additional_projects = additional_projects + self.request_google_drive_scope = request_google_drive_scope + self.authentication_type = authentication_type + self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret + self.email = email + self.key_file_path = key_file_path + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.encrypted_credential = encrypted_credential + self.type = 'GoogleBigQuery' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py new file mode 100644 index 000000000000..5750875dc3a0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQueryObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GoogleBigQueryObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py new file mode 100644 index 000000000000..625cd068b731 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GoogleBigQueryObjectDataset(Dataset): + """Google BigQuery service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GoogleBigQueryObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py new file mode 100644 index 000000000000..3a28d2563a8b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GoogleBigQuerySource(CopySource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleBigQuerySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GoogleBigQuerySource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py new file mode 100644 index 000000000000..49364b4d0e3f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GoogleBigQuerySource(CopySource): + """A copy activity Google BigQuery service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GoogleBigQuerySource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py new file mode 100644 index 000000000000..57913f779ca1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Greenplum' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py new file mode 100644 index 000000000000..bd707a5e85c9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class GreenplumLinkedService(LinkedService): + """Greenplum Database linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Greenplum' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py new file mode 100644 index 000000000000..086f12419f4a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class GreenplumSource(CopySource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'GreenplumSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py new file mode 100644 index 000000000000..8b789deb43da --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class GreenplumSource(CopySource): + """A copy activity Greenplum Database source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'GreenplumSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py new file mode 100644 index 000000000000..fa4a066f11a9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GreenplumTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'GreenplumTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py new file mode 100644 index 000000000000..7c698db22339 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/greenplum_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class GreenplumTableDataset(Dataset): + """Greenplum Database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'GreenplumTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py new file mode 100644 index 000000000000..b6affd5caa0d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the HBase instance uses to listen for + client connections. The default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version) + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'HBase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py new file mode 100644 index 000000000000..a8823e2e8937 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_linked_service_py3.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HBaseLinkedService(LinkedService): + """HBase server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the HBase server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the HBase instance uses to listen for + client connections. The default value is 9090. + :type port: object + :param http_path: The partial URL corresponding to the HBase server. (i.e. + /gateway/sandbox/hbase/version) + :type http_path: object + :param authentication_type: Required. The authentication mechanism to use + to connect to the HBase server. Possible values include: 'Anonymous', + 'Basic' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HBaseAuthenticationType + :param username: The user name used to connect to the HBase instance. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'HBase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py new file mode 100644 index 000000000000..5de32bcb6871 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'HBaseObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py new file mode 100644 index 000000000000..27fc0d1514ea --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HBaseObjectDataset(Dataset): + """HBase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HBaseObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py new file mode 100644 index 000000000000..eb6e3f1789bb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HBaseSource(CopySource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HBaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HBaseSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py new file mode 100644 index 000000000000..b2680e95c212 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hbase_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HBaseSource(CopySource): + """A copy activity HBase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HBaseSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py new file mode 100644 index 000000000000..4110b0f8b7de --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight cluster is with ESP (Enterprise Security Package) + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(HDInsightHiveActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.variables = kwargs.get('variables', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'HDInsightHive' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py new file mode 100644 index 000000000000..f8a5441fe767 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightHiveActivity(ExecutionActivity): + """HDInsight Hive activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Hive job request. + :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight cluster is with ESP (Enterprise Security Package) + :type query_timeout: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: + super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.variables = variables + self.query_timeout = query_timeout + self.type = 'HDInsightHive' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py new file mode 100644 index 000000000000..7249c12a9f16 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked + service that points to the HCatalog database. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HDInsightLinkedService, self).__init__(**kwargs) + self.cluster_uri = kwargs.get('cluster_uri', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.is_esp_enabled = kwargs.get('is_esp_enabled', None) + self.type = 'HDInsight' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py new file mode 100644 index 000000000000..44e9f28b2a13 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HDInsightLinkedService(LinkedService): + """HDInsight linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_uri: Required. HDInsight cluster URI. Type: string (or + Expression with resultType string). + :type cluster_uri: object + :param user_name: HDInsight cluster user name. Type: string (or Expression + with resultType string). + :type user_name: object + :param password: HDInsight cluster password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param linked_service_name: The Azure Storage linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param hcatalog_linked_service_name: A reference to the Azure SQL linked + service that points to the HCatalog database. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object + """ + + _validation = { + 'type': {'required': True}, + 'cluster_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, + } + + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, **kwargs) -> None: + super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_uri = cluster_uri + self.user_name = user_name + self.password = password + self.linked_service_name = linked_service_name + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled + self.type = 'HDInsight' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py new file mode 100644 index 000000000000..20655843e1db --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with + resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job + request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightMapReduceActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.class_name = kwargs.get('class_name', None) + self.jar_file_path = kwargs.get('jar_file_path', None) + self.jar_linked_service = kwargs.get('jar_linked_service', None) + self.jar_libs = kwargs.get('jar_libs', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightMapReduce' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py new file mode 100644 index 000000000000..dffa9f119069 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_map_reduce_activity_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightMapReduceActivity(ExecutionActivity): + """HDInsight MapReduce activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param class_name: Required. Class name. Type: string (or Expression with + resultType string). + :type class_name: object + :param jar_file_path: Required. Jar path. Type: string (or Expression with + resultType string). + :type jar_file_path: object + :param jar_linked_service: Jar linked service reference. + :type jar_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param jar_libs: Jar libs. + :type jar_libs: list[object] + :param defines: Allows user to specify defines for the MapReduce job + request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'class_name': {'required': True}, + 'jar_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, + 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, + 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, + 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: + super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.class_name = class_name + self.jar_file_path = jar_file_path + self.jar_linked_service = jar_linked_service + self.jar_libs = jar_libs + self.defines = defines + self.type = 'HDInsightMapReduce' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py new file mode 100644 index 000000000000..f5af746f764f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -0,0 +1,225 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_size: Required. Number of worker/data nodes in the cluster. + Suggestion value: 4. Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the + hostSubscriptionId. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be + distinct with timestamp. Type: string (or Expression with resultType + string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string + (or Expression with resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to + cluster’s node (for Linux). Type: string (or Expression with resultType + string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect + cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :param additional_linked_service_names: Specifies additional storage + accounts for the HDInsight linked service so that the Data Factory service + can register them on your behalf. + :type additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service + that point to the HCatalog database. The on-demand HDInsight cluster is + created by using the Azure SQL database as the metastore. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with + resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. + Type: string (or Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as + in core-site.xml) for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters + (hbase-site.xml) for the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters + (hdfs-site.xml) for the HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters + (hive-site.xml) for the HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration + parameters (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters + (oozie-site.xml) for the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters + (storm-site.xml) for the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters + (yarn-site.xml) for the HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand + cluster once it's up. Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + } + + def __init__(self, **kwargs): + super(HDInsightOnDemandLinkedService, self).__init__(**kwargs) + self.cluster_size = kwargs.get('cluster_size', None) + self.time_to_live = kwargs.get('time_to_live', None) + self.version = kwargs.get('version', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.host_subscription_id = kwargs.get('host_subscription_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.cluster_resource_group = kwargs.get('cluster_resource_group', None) + self.cluster_name_prefix = kwargs.get('cluster_name_prefix', None) + self.cluster_user_name = kwargs.get('cluster_user_name', None) + self.cluster_password = kwargs.get('cluster_password', None) + self.cluster_ssh_user_name = kwargs.get('cluster_ssh_user_name', None) + self.cluster_ssh_password = kwargs.get('cluster_ssh_password', None) + self.additional_linked_service_names = kwargs.get('additional_linked_service_names', None) + self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) + self.cluster_type = kwargs.get('cluster_type', None) + self.spark_version = kwargs.get('spark_version', None) + self.core_configuration = kwargs.get('core_configuration', None) + self.h_base_configuration = kwargs.get('h_base_configuration', None) + self.hdfs_configuration = kwargs.get('hdfs_configuration', None) + self.hive_configuration = kwargs.get('hive_configuration', None) + self.map_reduce_configuration = kwargs.get('map_reduce_configuration', None) + self.oozie_configuration = kwargs.get('oozie_configuration', None) + self.storm_configuration = kwargs.get('storm_configuration', None) + self.yarn_configuration = kwargs.get('yarn_configuration', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.head_node_size = kwargs.get('head_node_size', None) + self.data_node_size = kwargs.get('data_node_size', None) + self.zookeeper_node_size = kwargs.get('zookeeper_node_size', None) + self.script_actions = kwargs.get('script_actions', None) + self.type = 'HDInsightOnDemand' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py new file mode 100644 index 000000000000..9509e4c7e529 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service_py3.py @@ -0,0 +1,225 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HDInsightOnDemandLinkedService(LinkedService): + """HDInsight ondemand linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param cluster_size: Required. Number of worker/data nodes in the cluster. + Suggestion value: 4. Type: string (or Expression with resultType string). + :type cluster_size: object + :param time_to_live: Required. The allowed idle time for the on-demand + HDInsight cluster. Specifies how long the on-demand HDInsight cluster + stays alive after completion of an activity run if there are no other + active jobs in the cluster. The minimum value is 5 mins. Type: string (or + Expression with resultType string). + :type time_to_live: object + :param version: Required. Version of the HDInsight cluster.  Type: string + (or Expression with resultType string). + :type version: object + :param linked_service_name: Required. Azure Storage linked service to be + used by the on-demand cluster for storing and processing data. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param host_subscription_id: Required. The customer’s subscription to host + the cluster. Type: string (or Expression with resultType string). + :type host_subscription_id: object + :param service_principal_id: The service principal id for the + hostSubscriptionId. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key for the service principal id. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Required. The Tenant id/name to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param cluster_resource_group: Required. The resource group where the + cluster belongs. Type: string (or Expression with resultType string). + :type cluster_resource_group: object + :param cluster_name_prefix: The prefix of cluster name, postfix will be + distinct with timestamp. Type: string (or Expression with resultType + string). + :type cluster_name_prefix: object + :param cluster_user_name: The username to access the cluster. Type: string + (or Expression with resultType string). + :type cluster_user_name: object + :param cluster_password: The password to access the cluster. + :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_ssh_user_name: The username to SSH remotely connect to + cluster’s node (for Linux). Type: string (or Expression with resultType + string). + :type cluster_ssh_user_name: object + :param cluster_ssh_password: The password to SSH remotely connect + cluster’s node (for Linux). + :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase + :param additional_linked_service_names: Specifies additional storage + accounts for the HDInsight linked service so that the Data Factory service + can register them on your behalf. + :type additional_linked_service_names: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param hcatalog_linked_service_name: The name of Azure SQL linked service + that point to the HCatalog database. The on-demand HDInsight cluster is + created by using the Azure SQL database as the metastore. + :type hcatalog_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param cluster_type: The cluster type. Type: string (or Expression with + resultType string). + :type cluster_type: object + :param spark_version: The version of spark if the cluster type is 'spark'. + Type: string (or Expression with resultType string). + :type spark_version: object + :param core_configuration: Specifies the core configuration parameters (as + in core-site.xml) for the HDInsight cluster to be created. + :type core_configuration: object + :param h_base_configuration: Specifies the HBase configuration parameters + (hbase-site.xml) for the HDInsight cluster. + :type h_base_configuration: object + :param hdfs_configuration: Specifies the HDFS configuration parameters + (hdfs-site.xml) for the HDInsight cluster. + :type hdfs_configuration: object + :param hive_configuration: Specifies the hive configuration parameters + (hive-site.xml) for the HDInsight cluster. + :type hive_configuration: object + :param map_reduce_configuration: Specifies the MapReduce configuration + parameters (mapred-site.xml) for the HDInsight cluster. + :type map_reduce_configuration: object + :param oozie_configuration: Specifies the Oozie configuration parameters + (oozie-site.xml) for the HDInsight cluster. + :type oozie_configuration: object + :param storm_configuration: Specifies the Storm configuration parameters + (storm-site.xml) for the HDInsight cluster. + :type storm_configuration: object + :param yarn_configuration: Specifies the Yarn configuration parameters + (yarn-site.xml) for the HDInsight cluster. + :type yarn_configuration: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object + :param script_actions: Custom script actions to run on HDI ondemand + cluster once it's up. Please refer to + https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] + """ + + _validation = { + 'type': {'required': True}, + 'cluster_size': {'required': True}, + 'time_to_live': {'required': True}, + 'version': {'required': True}, + 'linked_service_name': {'required': True}, + 'host_subscription_id': {'required': True}, + 'tenant': {'required': True}, + 'cluster_resource_group': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, + 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, + 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, + 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, + 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, + 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, + 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, + 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, + 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, + 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, + 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, + 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, + 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, + 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, + 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, + 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, + 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, + 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, + 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, + 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, + 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, + } + + def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, **kwargs) -> None: + super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.cluster_size = cluster_size + self.time_to_live = time_to_live + self.version = version + self.linked_service_name = linked_service_name + self.host_subscription_id = host_subscription_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.cluster_resource_group = cluster_resource_group + self.cluster_name_prefix = cluster_name_prefix + self.cluster_user_name = cluster_user_name + self.cluster_password = cluster_password + self.cluster_ssh_user_name = cluster_ssh_user_name + self.cluster_ssh_password = cluster_ssh_password + self.additional_linked_service_names = additional_linked_service_names + self.hcatalog_linked_service_name = hcatalog_linked_service_name + self.cluster_type = cluster_type + self.spark_version = spark_version + self.core_configuration = core_configuration + self.h_base_configuration = h_base_configuration + self.hdfs_configuration = hdfs_configuration + self.hive_configuration = hive_configuration + self.map_reduce_configuration = map_reduce_configuration + self.oozie_configuration = oozie_configuration + self.storm_configuration = storm_configuration + self.yarn_configuration = yarn_configuration + self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size + self.script_actions = script_actions + self.type = 'HDInsightOnDemand' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py new file mode 100644 index 000000000000..61b939076db6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightPigActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.script_path = kwargs.get('script_path', None) + self.script_linked_service = kwargs.get('script_linked_service', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightPig' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py new file mode 100644 index 000000000000..fb149df91f39 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_pig_activity_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightPigActivity(ExecutionActivity): + """HDInsight Pig activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param script_path: Script path. Type: string (or Expression with + resultType string). + :type script_path: object + :param script_linked_service: Script linked service reference. + :type script_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param defines: Allows user to specify defines for Pig job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, + 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: + super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.script_path = script_path + self.script_linked_service = script_linked_service + self.defines = defines + self.type = 'HDInsightPig' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py new file mode 100644 index 000000000000..7822344f012f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading + the entry file and dependencies, and for receiving logs. + :type spark_job_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. + Type: string (or Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightSparkActivity, self).__init__(**kwargs) + self.root_path = kwargs.get('root_path', None) + self.entry_file_path = kwargs.get('entry_file_path', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.spark_job_linked_service = kwargs.get('spark_job_linked_service', None) + self.class_name = kwargs.get('class_name', None) + self.proxy_user = kwargs.get('proxy_user', None) + self.spark_config = kwargs.get('spark_config', None) + self.type = 'HDInsightSpark' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py new file mode 100644 index 000000000000..3f305901abb7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_spark_activity_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightSparkActivity(ExecutionActivity): + """HDInsight Spark activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param root_path: Required. The root path in 'sparkJobLinkedService' for + all the job’s files. Type: string (or Expression with resultType string). + :type root_path: object + :param entry_file_path: Required. The relative path to the root folder of + the code/package to be executed. Type: string (or Expression with + resultType string). + :type entry_file_path: object + :param arguments: The user-specified arguments to HDInsightSparkActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param spark_job_linked_service: The storage linked service for uploading + the entry file and dependencies, and for receiving logs. + :type spark_job_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param class_name: The application's Java/Spark main class. + :type class_name: str + :param proxy_user: The user to impersonate that will execute the job. + Type: string (or Expression with resultType string). + :type proxy_user: object + :param spark_config: Spark configuration property. + :type spark_config: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'root_path': {'required': True}, + 'entry_file_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, + 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, + 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, + 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, + 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, + } + + def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: + super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.root_path = root_path + self.entry_file_path = entry_file_path + self.arguments = arguments + self.get_debug_info = get_debug_info + self.spark_job_linked_service = spark_job_linked_service + self.class_name = class_name + self.proxy_user = proxy_user + self.spark_config = spark_config + self.type = 'HDInsightSpark' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py new file mode 100644 index 000000000000..42146a5d6cc6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with + resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be + directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are + located. + :type file_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression + with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(HDInsightStreamingActivity, self).__init__(**kwargs) + self.storage_linked_services = kwargs.get('storage_linked_services', None) + self.arguments = kwargs.get('arguments', None) + self.get_debug_info = kwargs.get('get_debug_info', None) + self.mapper = kwargs.get('mapper', None) + self.reducer = kwargs.get('reducer', None) + self.input = kwargs.get('input', None) + self.output = kwargs.get('output', None) + self.file_paths = kwargs.get('file_paths', None) + self.file_linked_service = kwargs.get('file_linked_service', None) + self.combiner = kwargs.get('combiner', None) + self.command_environment = kwargs.get('command_environment', None) + self.defines = kwargs.get('defines', None) + self.type = 'HDInsightStreaming' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py new file mode 100644 index 000000000000..2f5a301ff880 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_streaming_activity_py3.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class HDInsightStreamingActivity(ExecutionActivity): + """HDInsight streaming activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param storage_linked_services: Storage linked service references. + :type storage_linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + :param arguments: User specified arguments to HDInsightActivity. + :type arguments: list[object] + :param get_debug_info: Debug info option. Possible values include: 'None', + 'Always', 'Failure' + :type get_debug_info: str or + ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption + :param mapper: Required. Mapper executable name. Type: string (or + Expression with resultType string). + :type mapper: object + :param reducer: Required. Reducer executable name. Type: string (or + Expression with resultType string). + :type reducer: object + :param input: Required. Input blob path. Type: string (or Expression with + resultType string). + :type input: object + :param output: Required. Output blob path. Type: string (or Expression + with resultType string). + :type output: object + :param file_paths: Required. Paths to streaming job files. Can be + directories. + :type file_paths: list[object] + :param file_linked_service: Linked service reference where the files are + located. + :type file_linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param combiner: Combiner executable name. Type: string (or Expression + with resultType string). + :type combiner: object + :param command_environment: Command line environment values. + :type command_environment: list[object] + :param defines: Allows user to specify defines for streaming job request. + :type defines: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'mapper': {'required': True}, + 'reducer': {'required': True}, + 'input': {'required': True}, + 'output': {'required': True}, + 'file_paths': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, + 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, + 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, + 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, + 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, + 'input': {'key': 'typeProperties.input', 'type': 'object'}, + 'output': {'key': 'typeProperties.output', 'type': 'object'}, + 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, + 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, + 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, + 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, + 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + } + + def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: + super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.storage_linked_services = storage_linked_services + self.arguments = arguments + self.get_debug_info = get_debug_info + self.mapper = mapper + self.reducer = reducer + self.input = input + self.output = output + self.file_paths = file_paths + self.file_linked_service = file_linked_service + self.combiner = combiner + self.command_environment = command_environment + self.defines = defines + self.type = 'HDInsightStreaming' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py new file mode 100644 index 000000000000..b527f05a7e2f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + HDFS. Possible values are: Anonymous and Windows. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(HdfsLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.type = 'Hdfs' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py new file mode 100644 index 000000000000..e004701e1da0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_linked_service_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HdfsLinkedService(LinkedService): + """Hadoop Distributed File System (HDFS) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the HDFS service endpoint, e.g. + http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with + resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + HDFS. Possible values are: Anonymous and Windows. Type: string (or + Expression with resultType string). + :type authentication_type: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param user_name: User name for Windows authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: + super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.encrypted_credential = encrypted_credential + self.user_name = user_name + self.password = password + self.type = 'Hdfs' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py new file mode 100644 index 000000000000..a8f5d1ba332c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HdfsLocation, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py new file mode 100644 index 000000000000..2e07575bef0f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HdfsLocation(DatasetLocation): + """The location of HDFS. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(HdfsLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py new file mode 100644 index 000000000000..4fdadbc2fcd0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.distcp_settings = kwargs.get('distcp_settings', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py new file mode 100644 index 000000000000..164a6f497e52 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_read_setting_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HdfsReadSetting(ConnectorReadSetting): + """HDFS read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: HDFS wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + super(HdfsReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.distcp_settings = distcp_settings diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py new file mode 100644 index 000000000000..be50590f6c32 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, **kwargs): + super(HdfsSource, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.distcp_settings = kwargs.get('distcp_settings', None) + self.type = 'HdfsSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py new file mode 100644 index 000000000000..3c60cab46289 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hdfs_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HdfsSource(CopySource): + """A copy activity HDFS source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param distcp_settings: Specifies Distcp-related settings. + :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.distcp_settings = distcp_settings + self.type = 'HdfsSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py new file mode 100644 index 000000000000..c54c1393d56e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service.py @@ -0,0 +1,147 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client + connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: + 'HiveServer1', 'HiveServer2', 'HiveThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper + service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive + Server 2 nodes are added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL + queries,or converts them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.service_discovery_mode = kwargs.get('service_discovery_mode', None) + self.zoo_keeper_name_space = kwargs.get('zoo_keeper_name_space', None) + self.use_native_query = kwargs.get('use_native_query', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Hive' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py new file mode 100644 index 000000000000..611d30ecb781 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_linked_service_py3.py @@ -0,0 +1,147 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HiveLinkedService(LinkedService): + """Hive Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Hive server, + separated by ';' for multiple hosts (only when serviceDiscoveryMode is + enable). + :type host: object + :param port: The TCP port that the Hive server uses to listen for client + connections. + :type port: object + :param server_type: The type of Hive server. Possible values include: + 'HiveServer1', 'HiveServer2', 'HiveThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Hive server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HiveAuthenticationType + :param service_discovery_mode: true to indicate using the ZooKeeper + service, false not. + :type service_discovery_mode: object + :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive + Server 2 nodes are added. + :type zoo_keeper_name_space: object + :param use_native_query: Specifies whether the driver uses native HiveQL + queries,or converts them into an equivalent form in HiveQL. + :type use_native_query: object + :param username: The user name that you use to access Hive Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Hive server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, + 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, + 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.service_discovery_mode = service_discovery_mode + self.zoo_keeper_name_space = zoo_keeper_name_space + self.use_native_query = use_native_query + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Hive' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py new file mode 100644 index 000000000000..7dc4fd367f8a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'HiveObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py new file mode 100644 index 000000000000..c007333721be --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HiveObjectDataset(Dataset): + """Hive Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HiveObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py new file mode 100644 index 000000000000..3af88c3280e3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HiveSource(CopySource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HiveSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HiveSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py new file mode 100644 index 000000000000..6c09191b8c1b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hive_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HiveSource(CopySource): + """A copy activity Hive Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HiveSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py new file mode 100644 index 000000000000..f2184dea151f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL based on the URL in the + HttpLinkedService refers to an HTTP file Type: string (or Expression with + resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string + (or Expression with resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or + Expression with resultType string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression + with resultType string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, **kwargs): + super(HttpDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + self.type = 'HttpFile' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py new file mode 100644 index 000000000000..09f97a03a95d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_dataset_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL based on the URL in the + HttpLinkedService refers to an HTTP file Type: string (or Expression with + resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string + (or Expression with resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or + Expression with resultType string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. + request-header-name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression + with resultType string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.mgmt.datafactory.models.DatasetCompression + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: + super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.format = format + self.compression = compression + self.type = 'HttpFile' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py new file mode 100644 index 000000000000..6232bc45fee4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the HTTP endpoint, e.g. + http://www.microsoft.com. Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: The authentication type to be used to connect + to the HTTP server. Possible values include: 'Basic', 'Anonymous', + 'Digest', 'Windows', 'ClientCertificate' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. + Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate + with EmbeddedCertData authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for + ClientCertificate authentication. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate + authentication. Only valid for on-premises copy. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS + server SSL certificate. Default value is true. Type: boolean (or + Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.embedded_cert_data = kwargs.get('embedded_cert_data', None) + self.cert_thumbprint = kwargs.get('cert_thumbprint', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.type = 'HttpServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py new file mode 100644 index 000000000000..7f70adb08425 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_linked_service_py3.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HttpLinkedService(LinkedService): + """Linked service for an HTTP source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the HTTP endpoint, e.g. + http://www.microsoft.com. Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: The authentication type to be used to connect + to the HTTP server. Possible values include: 'Basic', 'Anonymous', + 'Digest', 'Windows', 'ClientCertificate' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.HttpAuthenticationType + :param user_name: User name for Basic, Digest, or Windows authentication. + Type: string (or Expression with resultType string). + :type user_name: object + :param password: Password for Basic, Digest, Windows, or ClientCertificate + with EmbeddedCertData authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param embedded_cert_data: Base64 encoded certificate data for + ClientCertificate authentication. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type embedded_cert_data: object + :param cert_thumbprint: Thumbprint of certificate for ClientCertificate + authentication. Only valid for on-premises copy. For on-premises copy with + ClientCertificate authentication, either CertThumbprint or + EmbeddedCertData/Password should be specified. Type: string (or Expression + with resultType string). + :type cert_thumbprint: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param enable_server_certificate_validation: If true, validate the HTTPS + server SSL certificate. Default value is true. Type: boolean (or + Expression with resultType boolean). + :type enable_server_certificate_validation: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, + 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: + super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.embedded_cert_data = embedded_cert_data + self.cert_thumbprint = cert_thumbprint + self.encrypted_credential = encrypted_credential + self.enable_server_certificate_validation = enable_server_certificate_validation + self.type = 'HttpServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py new file mode 100644 index 000000000000..696a9fdb3faf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpReadSetting, self).__init__(**kwargs) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.request_timeout = kwargs.get('request_timeout', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py new file mode 100644 index 000000000000..3d5d75a80785 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_read_setting_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class HttpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param request_timeout: Specifies the timeout for a HTTP client to get + HTTP response from HTTP server. + :type request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'request_body': {'key': 'requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, **kwargs) -> None: + super(HttpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.request_timeout = request_timeout diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py new file mode 100644 index 000000000000..94106fae9d15 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpServerLocation, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py new file mode 100644 index 000000000000..c52c53dcf357 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_server_location_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class HttpServerLocation(DatasetLocation): + """The location of http server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param relative_url: Specify the relativeUrl of http server. Type: string + (or Expression with resultType string) + :type relative_url: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: + super(HttpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.relative_url = relative_url diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py new file mode 100644 index 000000000000..ae131aa16c8c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: Specifies the timeout for a HTTP client to + get HTTP response from HTTP server. The default value is equivalent to + System.Net.HttpWebRequest.Timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HttpSource, self).__init__(**kwargs) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.type = 'HttpSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py new file mode 100644 index 000000000000..df339fc3aef7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/http_source_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HttpSource(CopySource): + """A copy activity source for an HTTP file. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: Specifies the timeout for a HTTP client to + get HTTP response from HTTP server. The default value is equivalent to + System.Net.HttpWebRequest.Timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.http_request_timeout = http_request_timeout + self.type = 'HttpSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py new file mode 100644 index 000000000000..3d0d6cb3a6f4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with your Hubspot + application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: The access token obtained when initially + authenticating your OAuth integration. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param refresh_token: The refresh token obtained when initially + authenticating your OAuth integration. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.access_token = kwargs.get('access_token', None) + self.refresh_token = kwargs.get('refresh_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Hubspot' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py new file mode 100644 index 000000000000..272d613e9cd1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_linked_service_py3.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class HubspotLinkedService(LinkedService): + """Hubspot Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with your Hubspot + application. + :type client_id: object + :param client_secret: The client secret associated with your Hubspot + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: The access token obtained when initially + authenticating your OAuth integration. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param refresh_token: The refresh token obtained when initially + authenticating your OAuth integration. + :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Hubspot' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py new file mode 100644 index 000000000000..ce8994b4db4a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'HubspotObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py new file mode 100644 index 000000000000..bd2309101f72 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class HubspotObjectDataset(Dataset): + """Hubspot Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'HubspotObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py new file mode 100644 index 000000000000..b4b4c618c33e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class HubspotSource(CopySource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(HubspotSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'HubspotSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py new file mode 100644 index 000000000000..a29811342ce0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hubspot_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class HubspotSource(CopySource): + """A copy activity Hubspot Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'HubspotSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py new file mode 100644 index 000000000000..a8cb1da690e1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class IfConditionActivity(ControlActivity): + """This activity evaluates a boolean expression and executes either the + activities under the ifTrueActivities property or the ifFalseActivities + property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or + ifFalseActivities) that will be executed. + :type expression: ~azure.mgmt.datafactory.models.Expression + :param if_true_activities: List of activities to execute if expression is + evaluated to true. This is an optional property and if not provided, the + activity will exit without any action. + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :param if_false_activities: List of activities to execute if expression is + evaluated to false. This is an optional property and if not provided, the + activity will exit without any action. + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(IfConditionActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.if_true_activities = kwargs.get('if_true_activities', None) + self.if_false_activities = kwargs.get('if_false_activities', None) + self.type = 'IfCondition' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py new file mode 100644 index 000000000000..7921a2602807 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/if_condition_activity_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class IfConditionActivity(ControlActivity): + """This activity evaluates a boolean expression and executes either the + activities under the ifTrueActivities property or the ifFalseActivities + property depending on the result of the expression. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + This is used to determine the block of activities (ifTrueActivities or + ifFalseActivities) that will be executed. + :type expression: ~azure.mgmt.datafactory.models.Expression + :param if_true_activities: List of activities to execute if expression is + evaluated to true. This is an optional property and if not provided, the + activity will exit without any action. + :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] + :param if_false_activities: List of activities to execute if expression is + evaluated to false. This is an optional property and if not provided, the + activity will exit without any action. + :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, + 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: + super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.if_true_activities = if_true_activities + self.if_false_activities = if_false_activities + self.type = 'IfCondition' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py new file mode 100644 index 000000000000..a704852652db --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Impala server uses to listen for client + connections. The default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The + default value is anonymous when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using + UsernameAndPassword. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Impala' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py new file mode 100644 index 000000000000..55b2e0c861d7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_linked_service_py3.py @@ -0,0 +1,117 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ImpalaLinkedService(LinkedService): + """Impala server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Impala server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Impala server uses to listen for client + connections. The default value is 21050. + :type port: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Anonymous', 'SASLUsername', + 'UsernameAndPassword' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ImpalaAuthenticationType + :param username: The user name used to access the Impala server. The + default value is anonymous when using SASLUsername. + :type username: object + :param password: The password corresponding to the user name when using + UsernameAndPassword. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Impala' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py new file mode 100644 index 000000000000..d9bf591d8021 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ImpalaObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py new file mode 100644 index 000000000000..d103603b2586 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ImpalaObjectDataset(Dataset): + """Impala server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ImpalaObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py new file mode 100644 index 000000000000..9e27dbdb6266 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ImpalaSource(CopySource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ImpalaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ImpalaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py new file mode 100644 index 000000000000..f7dc4016d020 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/impala_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ImpalaSource(CopySource): + """A copy activity Impala server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ImpalaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py new file mode 100644 index 000000000000..5dd45d16f76e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntime(Model): + """Azure Data Factory nested object which serves as a compute resource for + activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} + } + + def __init__(self, **kwargs): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py new file mode 100644 index 000000000000..12ed6925585e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeAuthKeys(Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = kwargs.get('auth_key1', None) + self.auth_key2 = kwargs.get('auth_key2', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py new file mode 100644 index 000000000000..b807d4cd5b55 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_auth_keys_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeAuthKeys(Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = auth_key1 + self.auth_key2 = auth_key2 diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py new file mode 100644 index 000000000000..e387ef4077f2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeComputeProperties(Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The + supported regions could be found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + :type location: str + :param node_size: The node size requirement to managed integration + runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed + integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count + per node for managed integration runtime. + :type max_parallel_executions_per_node: int + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.location = kwargs.get('location', None) + self.node_size = kwargs.get('node_size', None) + self.number_of_nodes = kwargs.get('number_of_nodes', None) + self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.v_net_properties = kwargs.get('v_net_properties', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py new file mode 100644 index 000000000000..f47f339dd067 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_compute_properties_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeComputeProperties(Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The + supported regions could be found on + https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + :type location: str + :param node_size: The node size requirement to managed integration + runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed + integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count + per node for managed integration runtime. + :type max_parallel_executions_per_node: int + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.v_net_properties = v_net_properties diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py new file mode 100644 index 000000000000..c185f916e8e5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeConnectionInfo(Model): + """Connection information for encrypting the on-premises data source + credentials. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this + token to authenticate to integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate + thumbprint. Click-Once application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when + transferring the credential to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is + expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py new file mode 100644 index 000000000000..8cc5aceb16d7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_connection_info_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeConnectionInfo(Model): + """Connection information for encrypting the on-premises data source + credentials. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this + token to authenticate to integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate + thumbprint. Click-Once application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when + transferring the credential to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is + expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py new file mode 100644 index 000000000000..44cd5fe5979b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeCustomSetupScriptProperties(Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that + contains the custom setup script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.mgmt.datafactory.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = kwargs.get('blob_container_uri', None) + self.sas_token = kwargs.get('sas_token', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py new file mode 100644 index 000000000000..7f3c08c0b339 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_custom_setup_script_properties_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeCustomSetupScriptProperties(Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that + contains the custom setup script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: ~azure.mgmt.datafactory.models.SecureString + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, + } + + def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py new file mode 100644 index 000000000000..ebc0e9b38d6f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py new file mode 100644 index 000000000000..532b774cad3d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_data_proxy_properties_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeDataProxyProperties(Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: + ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py new file mode 100644 index 000000000000..f7b695729403 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeMonitoringData(Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.nodes = kwargs.get('nodes', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py new file mode 100644 index 000000000000..16f3b656c9cc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_monitoring_data_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeMonitoringData(Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = name + self.nodes = nodes diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py new file mode 100644 index 000000000000..2edabd3e2472 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeIpAddress(Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py new file mode 100644 index 000000000000..476be9815984 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_ip_address_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeIpAddress(Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py new file mode 100644 index 000000000000..9d27bedf70aa --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeMonitoringData(Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration + runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the + integration runtime node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py new file mode 100644 index 000000000000..35c7e664b2ff --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_node_monitoring_data_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeNodeMonitoringData(Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration + runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the + integration runtime node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py new file mode 100644 index 000000000000..b4056a07591b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntime(Model): + """Azure Data Factory nested object which serves as a compute resource for + activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} + } + + def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py new file mode 100644 index 000000000000..7461d29de284 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeReference(Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference" . + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "IntegrationRuntimeReference" + + def __init__(self, **kwargs): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py new file mode 100644 index 000000000000..56fd3608ba61 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeReference(Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference" . + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "IntegrationRuntimeReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py new file mode 100644 index 000000000000..3cd91195af1b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeRegenerateKeyParameters(Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = kwargs.get('key_name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py new file mode 100644 index 000000000000..f3846cf8ec55 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_regenerate_key_parameters_py3.py @@ -0,0 +1,30 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeRegenerateKeyParameters(Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__(self, *, key_name=None, **kwargs) -> None: + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = key_name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py new file mode 100644 index 000000000000..b18f376d3698 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py new file mode 100644 index 000000000000..cef89866884e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class IntegrationRuntimeResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`IntegrationRuntimeResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[IntegrationRuntimeResource]'} + } + + def __init__(self, *args, **kwargs): + + super(IntegrationRuntimeResourcePaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py new file mode 100644 index 000000000000..9239f54166f9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = properties diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py new file mode 100644 index 000000000000..3399f8f38300 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisCatalogInfo(Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog + database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user + account of the catalog database. + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. + The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) + self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) + self.catalog_admin_password = kwargs.get('catalog_admin_password', None) + self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py new file mode 100644 index 000000000000..27996bb4aeb5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_catalog_info_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisCatalogInfo(Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog + database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user + account of the catalog database. + :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString + :param catalog_pricing_tier: The pricing tier for the catalog database. + The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible + values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py new file mode 100644 index 000000000000..293f071aa0b3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisProperties(Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration + runtime. + :type catalog_info: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. + Possible values include: 'BasePrice', 'LicenseIncluded' + :type license_type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for + a managed dedicated integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible + values include: 'Standard', 'Enterprise' + :type edition: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_info = kwargs.get('catalog_info', None) + self.license_type = kwargs.get('license_type', None) + self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) + self.edition = kwargs.get('edition', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py new file mode 100644 index 000000000000..f75775e29a7f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_ssis_properties_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeSsisProperties(Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration + runtime. + :type catalog_info: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. + Possible values include: 'BasePrice', 'LicenseIncluded' + :type license_type: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for + a managed dedicated integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed + dedicated integration runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible + values include: 'Standard', 'Enterprise' + :type edition: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py new file mode 100644 index 000000000000..64da6347f9ed --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatus(Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntimeStatus, + ManagedIntegrationRuntimeStatus + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.data_factory_name = None + self.state = None + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py new file mode 100644 index 000000000000..9382b4b08fde --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusListResponse(Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py new file mode 100644 index 000000000000..bed71f74ffc6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_list_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusListResponse(Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: + list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value, next_link: str=None, **kwargs) -> None: + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py new file mode 100644 index 000000000000..8541e04dc679 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_py3.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatus(Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfHostedIntegrationRuntimeStatus, + ManagedIntegrationRuntimeStatus + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.data_factory_name = None + self.state = None + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py new file mode 100644 index 000000000000..901b4d8b7442 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusResponse(Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = kwargs.get('properties', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py new file mode 100644 index 000000000000..64d84a1e4f19 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_status_response_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeStatusResponse(Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = properties diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py new file mode 100644 index 000000000000..752b5b99eb60 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeVNetProperties(Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will + join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = kwargs.get('v_net_id', None) + self.subnet = kwargs.get('subnet', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py new file mode 100644 index 000000000000..32e8beb31ea1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/integration_runtime_vnet_properties_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class IntegrationRuntimeVNetProperties(Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will + join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, **kwargs) -> None: + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py new file mode 100644 index 000000000000..517cdd63caa5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) + :type host: object + :param port: The TCP port that the Jira server uses to listen for client + connections. The default value is 443 if connecting through HTTPS, or 8080 + if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Jira' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py new file mode 100644 index 000000000000..82dc8d578da3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class JiraLinkedService(LinkedService): + """Jira Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Jira service. + (e.g. jira.example.com) + :type host: object + :param port: The TCP port that the Jira server uses to listen for client + connections. The default value is 443 if connecting through HTTPS, or 8080 + if connecting through HTTP. + :type port: object + :param username: Required. The user name that you use to access Jira + Service. + :type username: object + :param password: The password corresponding to the user name that you + provided in the username field. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Jira' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py new file mode 100644 index 000000000000..1c2b12c18e15 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'JiraObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py new file mode 100644 index 000000000000..3c061b238cde --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class JiraObjectDataset(Dataset): + """Jira Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'JiraObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py new file mode 100644 index 000000000000..709da0ce1205 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class JiraSource(CopySource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JiraSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'JiraSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py new file mode 100644 index 000000000000..c958c8351bb3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/jira_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class JiraSource(CopySource): + """A copy activity Jira Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'JiraSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py new file mode 100644 index 000000000000..80f4ff0aaf8b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. To be more specific, the way of + separating a collection of JSON objects. The default value is + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object + :param nesting_separator: The character used to separate nesting levels. + Default value is '.' (dot). Type: string (or Expression with resultType + string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not + provided, the default value is 'utf-8', unless the byte order mark (BOM) + denotes another Unicode encoding. The full list of supported values can be + found in the 'Name' column of the table of encodings in the following + reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be + flattened. Example: "$.ArrayPath". Type: string (or Expression with + resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column + mapping with a customized column name to extract data from JSON file. For + fields under root object, start with "$"; for fields inside the array + chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. + Type: object (or Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(JsonFormat, self).__init__(**kwargs) + self.file_pattern = kwargs.get('file_pattern', None) + self.nesting_separator = kwargs.get('nesting_separator', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.json_node_reference = kwargs.get('json_node_reference', None) + self.json_path_definition = kwargs.get('json_path_definition', None) + self.type = 'JsonFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py new file mode 100644 index 000000000000..2fdb44cc3b7f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/json_format_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class JsonFormat(DatasetStorageFormat): + """The data stored in JSON format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param file_pattern: File pattern of JSON. To be more specific, the way of + separating a collection of JSON objects. The default value is + 'setOfObjects'. It is case-sensitive. + :type file_pattern: object + :param nesting_separator: The character used to separate nesting levels. + Default value is '.' (dot). Type: string (or Expression with resultType + string). + :type nesting_separator: object + :param encoding_name: The code page name of the preferred encoding. If not + provided, the default value is 'utf-8', unless the byte order mark (BOM) + denotes another Unicode encoding. The full list of supported values can be + found in the 'Name' column of the table of encodings in the following + reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param json_node_reference: The JSONPath of the JSON array element to be + flattened. Example: "$.ArrayPath". Type: string (or Expression with + resultType string). + :type json_node_reference: object + :param json_path_definition: The JSONPath definition for each column + mapping with a customized column name to extract data from JSON file. For + fields under root object, start with "$"; for fields inside the array + chosen by jsonNodeReference property, start from the array element. + Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. + Type: object (or Expression with resultType object). + :type json_path_definition: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, + 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, + 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: + super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.file_pattern = file_pattern + self.nesting_separator = nesting_separator + self.encoding_name = encoding_name + self.json_node_reference = json_node_reference + self.json_path_definition = json_path_definition + self.type = 'JsonFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py new file mode 100644 index 000000000000..f4a4e7eb8bf0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntime(Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked + integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked + integration runtime belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which + the linked integration runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py new file mode 100644 index 000000000000..b7be47e8f096 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = kwargs.get('key', None) + self.authorization_type = 'Key' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py new file mode 100644 index 000000000000..4a2ebd8d1003 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_key_authorization_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'SecureString'}, + } + + def __init__(self, *, key, **kwargs) -> None: + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.key = key + self.authorization_type = 'Key' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py new file mode 100644 index 000000000000..6c831ab5f511 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntime(Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked + integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked + integration runtime belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which + the linked integration runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py new file mode 100644 index 000000000000..3fbc8dd9cac2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration + runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = kwargs.get('resource_id', None) + self.authorization_type = 'RBAC' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py new file mode 100644 index 000000000000..055b64809e18 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_rbac_authorization_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_integration_runtime_type_py3 import LinkedIntegrationRuntimeType + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration + runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration + runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, *, resource_id: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.resource_id = resource_id + self.authorization_type = 'RBAC' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py new file mode 100644 index 000000000000..807757332b3e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeRequest(Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = kwargs.get('linked_factory_name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py new file mode 100644 index 000000000000..45362ab63ba3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_request_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeRequest(Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked + integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__(self, *, linked_factory_name: str, **kwargs) -> None: + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = linked_factory_name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py new file mode 100644 index 000000000000..446395bb9cbf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeType(Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, + LinkedIntegrationRuntimeKeyAuthorization + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} + } + + def __init__(self, **kwargs): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py new file mode 100644 index 000000000000..79468dc450d2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_integration_runtime_type_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedIntegrationRuntimeType(Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, + LinkedIntegrationRuntimeKeyAuthorization + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} + } + + def __init__(self, **kwargs) -> None: + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py new file mode 100644 index 000000000000..3d4660d72e89 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedService(Model): + """The Azure Data Factory nested object which contains the information and + credential which can be used to connect with related store or compute + resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, + ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, + HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, + EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, + ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, + SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, + FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, + CustomDataSourceLinkedService, AmazonRedshiftLinkedService, + AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, + SapEccLinkedService, SapCloudForCustomerLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + } + + def __init__(self, **kwargs): + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.connect_via = kwargs.get('connect_via', None) + self.description = kwargs.get('description', None) + self.parameters = kwargs.get('parameters', None) + self.annotations = kwargs.get('annotations', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py new file mode 100644 index 000000000000..eadf4030e132 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedService(Model): + """The Azure Data Factory nested object which contains the information and + credential which can be used to connect with related store or compute + resource. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureFunctionLinkedService, + AzureDataExplorerLinkedService, GoogleAdWordsLinkedService, + OracleServiceCloudLinkedService, DynamicsAXLinkedService, + ResponsysLinkedService, AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + SalesforceMarketingCloudLinkedService, NetezzaLinkedService, + VerticaLinkedService, ZohoLinkedService, XeroLinkedService, + SquareLinkedService, SparkLinkedService, ShopifyLinkedService, + ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, + PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, + MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, + ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, + HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, + EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, + ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, + SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, + FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, + CustomDataSourceLinkedService, AmazonRedshiftLinkedService, + AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, + SapEccLinkedService, SapCloudForCustomerLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, + AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, + MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, + SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, + AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, + HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + AzureKeyVaultLinkedService, AzureBatchLinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(LinkedService, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.connect_via = connect_via + self.description = description + self.parameters = parameters + self.annotations = annotations + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py new file mode 100644 index 000000000000..28ffeda7d01a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedServiceReference(Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: + "LinkedServiceReference" . + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__(self, **kwargs): + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py new file mode 100644 index 000000000000..b6238130bdb6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LinkedServiceReference(Model): + """Linked service reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Linked service reference type. Default value: + "LinkedServiceReference" . + :vartype type: str + :param reference_name: Required. Reference LinkedService name. + :type reference_name: str + :param parameters: Arguments for LinkedService. + :type parameters: dict[str, object] + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "LinkedServiceReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(LinkedServiceReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py new file mode 100644 index 000000000000..75828718f589 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__(self, **kwargs): + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py new file mode 100644 index 000000000000..af0a57170e56 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class LinkedServiceResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`LinkedServiceResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[LinkedServiceResource]'} + } + + def __init__(self, *args, **kwargs): + + super(LinkedServiceResourcePaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py new file mode 100644 index 000000000000..1fa964b51f57 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class LinkedServiceResource(SubResource): + """Linked service resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LinkedService'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(LinkedServiceResource, self).__init__(**kwargs) + self.properties = properties diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py new file mode 100644 index 000000000000..81b4e7ca619e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py new file mode 100644 index 000000000000..4850b7adacdf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py new file mode 100644 index 000000000000..62584b2f704a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy + activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default + value is true. Type: boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LookupActivity, self).__init__(**kwargs) + self.source = kwargs.get('source', None) + self.dataset = kwargs.get('dataset', None) + self.first_row_only = kwargs.get('first_row_only', None) + self.type = 'Lookup' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py new file mode 100644 index 000000000000..41061675ebbe --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/lookup_activity_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class LookupActivity(ExecutionActivity): + """Lookup activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param source: Required. Dataset-specific source properties, same as copy + activity source. + :type source: ~azure.mgmt.datafactory.models.CopySource + :param dataset: Required. Lookup activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + :param first_row_only: Whether to return first row or all rows. Default + value is true. Type: boolean (or Expression with resultType boolean). + :type first_row_only: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'source': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, + } + + def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: + super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.source = source + self.dataset = dataset + self.first_row_only = first_row_only + self.type = 'Lookup' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py new file mode 100644 index 000000000000..9d65437b5daa --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Magento instance. (i.e. + 192.168.222.110/magento3) + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Magento' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py new file mode 100644 index 000000000000..74de1573118b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MagentoLinkedService(LinkedService): + """Magento server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Magento instance. (i.e. + 192.168.222.110/magento3) + :type host: object + :param access_token: The access token from Magento. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Magento' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py new file mode 100644 index 000000000000..ad540093ca55 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MagentoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py new file mode 100644 index 000000000000..481732bb688a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MagentoObjectDataset(Dataset): + """Magento server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MagentoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py new file mode 100644 index 000000000000..df49fe63a544 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MagentoSource(CopySource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MagentoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MagentoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py new file mode 100644 index 000000000000..15efcc12a054 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/magento_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MagentoSource(CopySource): + """A copy activity Magento server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MagentoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py new file mode 100644 index 000000000000..9cbc9e94e7c3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime import IntegrationRuntime + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed + dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :ivar state: Integration runtime state, only valid for managed dedicated + integration runtime. Possible values include: 'Initial', 'Stopped', + 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', + 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration + runtime. + :type compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntime, self).__init__(**kwargs) + self.state = None + self.compute_properties = kwargs.get('compute_properties', None) + self.ssis_properties = kwargs.get('ssis_properties', None) + self.type = 'Managed' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py new file mode 100644 index 000000000000..c70323697fdf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeError(Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.time = None + self.code = None + self.parameters = None + self.message = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py new file mode 100644 index 000000000000..1668c5196537 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error_py3.py @@ -0,0 +1,55 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeError(Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.time = None + self.code = None + self.parameters = None + self.message = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py new file mode 100644 index 000000000000..e9c0169cf6c5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeNode(Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values + include: 'Starting', 'Available', 'Recycling', 'Unavailable' + :vartype status: str or + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_id = None + self.status = None + self.errors = kwargs.get('errors', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py new file mode 100644 index 000000000000..0e8104d0de05 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeNode(Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values + include: 'Starting', 'Available', 'Recycling', 'Unavailable' + :vartype status: str or + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_id = None + self.status = None + self.errors = errors diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py new file mode 100644 index 000000000000..2329f7a2ba36 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeOperationResult(Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py new file mode 100644 index 000000000000..58a80c0e600e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ManagedIntegrationRuntimeOperationResult(Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py new file mode 100644 index 000000000000..0e71d8b09f4e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_py3 import IntegrationRuntime + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed + dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :ivar state: Integration runtime state, only valid for managed dedicated + integration runtime. Possible values include: 'Initial', 'Stopped', + 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', + 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration + runtime. + :type compute_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.state = None + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties + self.type = 'Managed' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py new file mode 100644 index 000000000000..17d21775f09f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status import IntegrationRuntimeStatus + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this + integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__(self, **kwargs): + super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + self.type = 'Managed' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py new file mode 100644 index 000000000000..03d9451045bd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_status_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status_py3 import IntegrationRuntimeStatus + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: + list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this + integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + self.type = 'Managed' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py new file mode 100644 index 000000000000..3bbe048d4877 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MariaDB' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py new file mode 100644 index 000000000000..475284d56038 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MariaDBLinkedService(LinkedService): + """MariaDB server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'MariaDB' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py new file mode 100644 index 000000000000..a744c1c5ff8f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MariaDBSource(CopySource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MariaDBSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py new file mode 100644 index 000000000000..472877b8f0bb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MariaDBSource(CopySource): + """A copy activity MariaDB server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MariaDBSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py new file mode 100644 index 000000000000..66dc9c8ea9b7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MariaDBTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MariaDBTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py new file mode 100644 index 000000000000..ac3c8cf2ea72 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/maria_db_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MariaDBTableDataset(Dataset): + """MariaDB server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MariaDBTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py new file mode 100644 index 000000000000..2a9e76446122 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com) + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Marketo' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py new file mode 100644 index 000000000000..dc326f24acd5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_linked_service_py3.py @@ -0,0 +1,90 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MarketoLinkedService(LinkedService): + """Marketo server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Marketo server. (i.e. + 123-ABC-321.mktorest.com) + :type endpoint: object + :param client_id: Required. The client Id of your Marketo service. + :type client_id: object + :param client_secret: The client secret of your Marketo service. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Marketo' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py new file mode 100644 index 000000000000..63daa10047b9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MarketoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py new file mode 100644 index 000000000000..7179d5af53dd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MarketoObjectDataset(Dataset): + """Marketo server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MarketoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py new file mode 100644 index 000000000000..6d2061ef0dee --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MarketoSource(CopySource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MarketoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MarketoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py new file mode 100644 index 000000000000..573dc0439754 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/marketo_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MarketoSource(CopySource): + """A copy activity Marketo server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MarketoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py new file mode 100644 index 000000000000..796c5e14eaca --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) + self.type = 'MongoDbCollection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py new file mode 100644 index 000000000000..68fe2affb0e4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbCollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection_name: Required. The table name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'MongoDbCollection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py new file mode 100644 index 000000000000..a2d2127d1397 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.project = kwargs.get('project', None) + self.sort = kwargs.get('sort', None) + self.skip = kwargs.get('skip', None) + self.limit = kwargs.get('limit', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py new file mode 100644 index 000000000000..e1e3f50d1539 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_cursor_methods_properties_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class MongoDbCursorMethodsProperties(Model): + """Cursor methods for Mongodb query. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param project: Specifies the fields to return in the documents that match + the query filter. To return all fields in the matching documents, omit + this parameter. Type: string (or Expression with resultType string). + :type project: object + :param sort: Specifies the order in which the query returns matching + documents. Type: string (or Expression with resultType string). Type: + string (or Expression with resultType string). + :type sort: object + :param skip: Specifies the how many documents skipped and where MongoDB + begins returning results. This approach may be useful in implementing + paginated results. Type: integer (or Expression with resultType integer). + :type skip: object + :param limit: Specifies the maximum number of documents the server + returns. limit() is analogous to the LIMIT statement in a SQL database. + Type: integer (or Expression with resultType integer). + :type limit: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'project': {'key': 'project', 'type': 'object'}, + 'sort': {'key': 'sort', 'type': 'object'}, + 'skip': {'key': 'skip', 'type': 'object'}, + 'limit': {'key': 'limit', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.project = project + self.sort = sort + self.skip = skip + self.limit = limit diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py new file mode 100644 index 000000000000..76d162b0ff70 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the MongoDB database. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param auth_source: Database to verify the username and password. Type: + string (or Expression with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen + for client connections. The default value is 27017. Type: integer (or + Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.database_name = kwargs.get('database_name', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.auth_source = kwargs.get('auth_source', None) + self.port = kwargs.get('port', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MongoDb' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py new file mode 100644 index 000000000000..95308b6ea8f0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_linked_service_py3.py @@ -0,0 +1,109 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbLinkedService(LinkedService): + """Linked service for MongoDb data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. The IP address or server name of the MongoDB + server. Type: string (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the MongoDB database. Possible values include: 'Basic', 'Anonymous' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.MongoDbAuthenticationType + :param database_name: Required. The name of the MongoDB database that you + want to access. Type: string (or Expression with resultType string). + :type database_name: object + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param auth_source: Database to verify the username and password. Type: + string (or Expression with resultType string). + :type auth_source: object + :param port: The TCP port number that the MongoDB server uses to listen + for client connections. The default value is 27017. Type: integer (or + Expression with resultType integer), minimum: 0. + :type port: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type enable_ssl: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + Type: boolean (or Expression with resultType boolean). + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.authentication_type = authentication_type + self.database_name = database_name + self.username = username + self.password = password + self.auth_source = auth_source + self.port = port + self.enable_ssl = enable_ssl + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'MongoDb' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py new file mode 100644 index 000000000000..3da4b931f5e5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression. Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MongoDbSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py new file mode 100644 index 000000000000..ab3e5b6e0cc9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbSource(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Should be a SQL-92 query expression. Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MongoDbSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py new file mode 100644 index 000000000000..17089373d4c5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) + self.collection = kwargs.get('collection', None) + self.type = 'MongoDbV2Collection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py new file mode 100644 index 000000000000..ad1e5c538645 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_collection_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MongoDbV2CollectionDataset(Dataset): + """The MongoDB database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param collection: Required. The collection name of the MongoDB database. + Type: string (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection = collection + self.type = 'MongoDbV2Collection' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py new file mode 100644 index 000000000000..bb29fc767420 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.database = kwargs.get('database', None) + self.type = 'MongoDbV2' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py new file mode 100644 index 000000000000..d1388ce797a5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_linked_service_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MongoDbV2LinkedService(LinkedService): + """Linked service for MongoDB data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The MongoDB connection string. Type: + string, SecureString or AzureKeyVaultSecretReference. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB database that you want + to access. Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbV2' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py new file mode 100644 index 000000000000..295b74228b9a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2Source, self).__init__(**kwargs) + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.type = 'MongoDbV2Source' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py new file mode 100644 index 000000000000..872b060a49bb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/mongo_db_v2_source_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MongoDbV2Source(CopySource): + """A copy activity source for a MongoDB database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param filter: Specifies selection filter using query operators. To return + all documents in a collection, omit this parameter or pass an empty + document ({}). Type: string (or Expression with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query + :type cursor_methods: + ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each + batch of the response from MongoDB instance. In most cases, modifying the + batch size will not affect the user or the application. This property�s + main purpose is to avoid hit the limitation of response size. Type: + integer (or Expression with resultType integer). + :type batch_size: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.type = 'MongoDbV2Source' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py new file mode 100644 index 000000000000..1be28aa1b6ab --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger import Trigger + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to + pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__(self, **kwargs): + super(MultiplePipelineTrigger, self).__init__(**kwargs) + self.pipelines = kwargs.get('pipelines', None) + self.type = 'MultiplePipelineTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py new file mode 100644 index 000000000000..206ab74ef419 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/multiple_pipeline_trigger_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to + pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipelines = pipelines + self.type = 'MultiplePipelineTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py new file mode 100644 index 000000000000..ec85b0136714 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MySqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MySql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py new file mode 100644 index 000000000000..b8038df22fd6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/my_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MySqlLinkedService(LinkedService): + """Linked service for MySQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MySql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py new file mode 100644 index 000000000000..5d94bdecaf62 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Netezza' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py new file mode 100644 index 000000000000..2fcc288fd5b7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Netezza' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py new file mode 100644 index 000000000000..caf73f9ef81d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'NetezzaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py new file mode 100644 index 000000000000..101a1f26a74d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'NetezzaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py new file mode 100644 index 000000000000..cf3b9205846c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(NetezzaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'NetezzaTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py new file mode 100644 index 000000000000..39de0032e8c9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'NetezzaTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py new file mode 100644 index 000000000000..01db8d71e924 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.tenant = kwargs.get('tenant', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.service_principal_embedded_cert = kwargs.get('service_principal_embedded_cert', None) + self.service_principal_embedded_cert_password = kwargs.get('service_principal_embedded_cert_password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OData' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py new file mode 100644 index 000000000000..fcf2d8bb9819 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_linked_service_py3.py @@ -0,0 +1,127 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ODataLinkedService(LinkedService): + """Open Data Protocol (OData) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of the OData service endpoint. Type: string + (or Expression with resultType string). + :type url: object + :param authentication_type: Type of authentication used to connect to the + OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', + 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ODataAuthenticationType + :param user_name: User name of the OData service. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password of the OData service. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: Specify the tenant information (domain name or tenant ID) + under which your application resides. Type: string (or Expression with + resultType string). + :type tenant: object + :param service_principal_id: Specify the application id of your + application registered in Azure Active Directory. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param aad_resource_id: Specify the resource you are requesting + authorization to use Directory. Type: string (or Expression with + resultType string). + :type aad_resource_id: object + :param aad_service_principal_credential_type: Specify the credential type + (key or cert) is used for service principal. Possible values include: + 'ServicePrincipalKey', 'ServicePrincipalCert' + :type aad_service_principal_credential_type: str or + ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType + :param service_principal_key: Specify the secret of your application + registered in Azure Active Directory. Type: string (or Expression with + resultType string). + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert: Specify the base64 encoded + certificate of your application registered in Azure Active Directory. + Type: string (or Expression with resultType string). + :type service_principal_embedded_cert: + ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_embedded_cert_password: Specify the password of + your certificate if your certificate has a password and you are using + AadServicePrincipal authentication. Type: string (or Expression with + resultType string). + :type service_principal_embedded_cert_password: + ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, + 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: + super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.tenant = tenant + self.service_principal_id = service_principal_id + self.aad_resource_id = aad_resource_id + self.aad_service_principal_credential_type = aad_service_principal_credential_type + self.service_principal_key = service_principal_key + self.service_principal_embedded_cert = service_principal_embedded_cert + self.service_principal_embedded_cert_password = service_principal_embedded_cert_password + self.encrypted_credential = encrypted_credential + self.type = 'OData' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py new file mode 100644 index 000000000000..658cf40c8d2b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: The OData resource path. Type: string (or Expression with + resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ODataResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'ODataResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py new file mode 100644 index 000000000000..5951a2cf6d80 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odata_resource_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ODataResourceDataset(Dataset): + """The Open Data Protocol (OData) resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: The OData resource path. Type: string (or Expression with + resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'ODataResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py new file mode 100644 index 000000000000..53d21dee2def --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + ODBC data store. Possible values are: Anonymous and Basic. Type: string + (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Odbc' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py new file mode 100644 index 000000000000..2e376d23c67a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OdbcLinkedService(LinkedService): + """Open Database Connectivity (ODBC) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + ODBC data store. Possible values are: Anonymous and Basic. Type: string + (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Odbc' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py new file mode 100644 index 000000000000..ced7e1dbd9e4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OdbcSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'OdbcSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py new file mode 100644 index 000000000000..9a181f8df7e9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/odbc_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class OdbcSink(CopySink): + """A copy activity ODBC sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OdbcSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py new file mode 100644 index 000000000000..baa90666d669 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365Dataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.predicate = kwargs.get('predicate', None) + self.type = 'Office365Table' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py new file mode 100644 index 000000000000..5517f7daf9e3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_dataset_py3.py @@ -0,0 +1,79 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class Office365Dataset(Dataset): + """The Office365 account. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: Required. Name of the dataset to extract from Office + 365. Type: string (or Expression with resultType string). + :type table_name: object + :param predicate: A predicate expression that can be used to filter the + specific rows to extract from Office 365. Type: string (or Expression with + resultType string). + :type predicate: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'table_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: + super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.predicate = predicate + self.type = 'Office365Table' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py new file mode 100644 index 000000000000..2dc98897482a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(Office365LinkedService, self).__init__(**kwargs) + self.office365_tenant_id = kwargs.get('office365_tenant_id', None) + self.service_principal_tenant_id = kwargs.get('service_principal_tenant_id', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Office365' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py new file mode 100644 index 000000000000..5a69c0d895fa --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_linked_service_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class Office365LinkedService(LinkedService): + """Office365 linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param office365_tenant_id: Required. Azure tenant ID to which the Office + 365 account belongs. Type: string (or Expression with resultType string). + :type office365_tenant_id: object + :param service_principal_tenant_id: Required. Specify the tenant + information under which your Azure AD web application resides. Type: + string (or Expression with resultType string). + :type service_principal_tenant_id: object + :param service_principal_id: Required. Specify the application's client + ID. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. Specify the application's key. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'office365_tenant_id': {'required': True}, + 'service_principal_tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, + 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: + super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.office365_tenant_id = office365_tenant_id + self.service_principal_tenant_id = service_principal_tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + self.type = 'Office365' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py new file mode 100644 index 000000000000..8dff7a01ccaa --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Office365Source, self).__init__(**kwargs) + self.type = 'Office365Source' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py new file mode 100644 index 000000000000..25ae6340ae01 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/office365_source_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class Office365Source(CopySource): + """A copy activity source for an Office365 service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'Office365Source' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py new file mode 100644 index 000000000000..db8cde8db784 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Operation(Model): + """Azure Data Factory API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param origin: The intended executor of the operation. + :type origin: str + :param display: Metadata associated with the operation. + :type display: ~azure.mgmt.datafactory.models.OperationDisplay + :param service_specification: Details about a service operation. + :type service_specification: + ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + } + + def __init__(self, **kwargs): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.origin = kwargs.get('origin', None) + self.display = kwargs.get('display', None) + self.service_specification = kwargs.get('service_specification', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py new file mode 100644 index 000000000000..1d96541c0581 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationDisplay(Model): + """Metadata associated with the operation. + + :param description: The description of the operation. + :type description: str + :param provider: The name of the provider. + :type provider: str + :param resource: The name of the resource type on which the operation is + performed. + :type resource: str + :param operation: The type of operation: get, read, delete, etc. + :type operation: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationDisplay, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py new file mode 100644 index 000000000000..dfbb782627f4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_display_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationDisplay(Model): + """Metadata associated with the operation. + + :param description: The description of the operation. + :type description: str + :param provider: The name of the provider. + :type provider: str + :param resource: The name of the resource type on which the operation is + performed. + :type resource: str + :param operation: The type of operation: get, read, delete, etc. + :type operation: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: + super(OperationDisplay, self).__init__(**kwargs) + self.description = description + self.provider = provider + self.resource = resource + self.operation = operation diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py new file mode 100644 index 000000000000..93bfaf4ed0de --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationLogSpecification(Model): + """Details about an operation related to logs. + + :param name: The name of the log category. + :type name: str + :param display_name: Localized display name. + :type display_name: str + :param blob_duration: Blobs created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.blob_duration = kwargs.get('blob_duration', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py new file mode 100644 index 000000000000..2cdd941fab7b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_log_specification_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationLogSpecification(Model): + """Details about an operation related to logs. + + :param name: The name of the log category. + :type name: str + :param display_name: Localized display name. + :type display_name: str + :param blob_duration: Blobs created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationLogSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.blob_duration = blob_duration diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py new file mode 100644 index 000000000000..974e0cbf4b0b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricAvailability(Model): + """Defines how often data for a metric becomes available. + + :param time_grain: The granularity for the metric. + :type time_grain: str + :param blob_duration: Blob created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'time_grain': {'key': 'timeGrain', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = kwargs.get('time_grain', None) + self.blob_duration = kwargs.get('blob_duration', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py new file mode 100644 index 000000000000..312b83a23701 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_availability_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricAvailability(Model): + """Defines how often data for a metric becomes available. + + :param time_grain: The granularity for the metric. + :type time_grain: str + :param blob_duration: Blob created in the customer storage account, per + hour. + :type blob_duration: str + """ + + _attribute_map = { + 'time_grain': {'key': 'timeGrain', 'type': 'str'}, + 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, + } + + def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: + super(OperationMetricAvailability, self).__init__(**kwargs) + self.time_grain = time_grain + self.blob_duration = blob_duration diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py new file mode 100644 index 000000000000..24232e7b5470 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricDimension(Model): + """Defines the metric dimension. + + :param name: The name of the dimension for the metric. + :type name: str + :param display_name: The display name of the metric dimension. + :type display_name: str + :param to_be_exported_for_shoebox: Whether the dimension should be + exported to Azure Monitor. + :type to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py new file mode 100644 index 000000000000..1d8610b7fab8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_dimension_py3.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricDimension(Model): + """Defines the metric dimension. + + :param name: The name of the dimension for the metric. + :type name: str + :param display_name: The display name of the metric dimension. + :type display_name: str + :param to_be_exported_for_shoebox: Whether the dimension should be + exported to Azure Monitor. + :type to_be_exported_for_shoebox: bool + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: + super(OperationMetricDimension, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.to_be_exported_for_shoebox = to_be_exported_for_shoebox diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py new file mode 100644 index 000000000000..77f533fdcebf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricSpecification(Model): + """Details about an operation related to metrics. + + :param name: The name of the metric. + :type name: str + :param display_name: Localized display name of the metric. + :type display_name: str + :param display_description: The description of the metric. + :type display_description: str + :param unit: The unit that the metric is measured in. + :type unit: str + :param aggregation_type: The type of metric aggregation. + :type aggregation_type: str + :param enable_regional_mdm_account: Whether or not the service is using + regional MDM accounts. + :type enable_regional_mdm_account: str + :param source_mdm_account: The name of the MDM account. + :type source_mdm_account: str + :param source_mdm_namespace: The name of the MDM namespace. + :type source_mdm_namespace: str + :param availabilities: Defines how often data for metrics becomes + available. + :type availabilities: + list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :param dimensions: Defines the metric dimension. + :type dimensions: + list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, **kwargs): + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.display_description = kwargs.get('display_description', None) + self.unit = kwargs.get('unit', None) + self.aggregation_type = kwargs.get('aggregation_type', None) + self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) + self.source_mdm_account = kwargs.get('source_mdm_account', None) + self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None) + self.availabilities = kwargs.get('availabilities', None) + self.dimensions = kwargs.get('dimensions', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py new file mode 100644 index 000000000000..c1cc4ad39e72 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_metric_specification_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationMetricSpecification(Model): + """Details about an operation related to metrics. + + :param name: The name of the metric. + :type name: str + :param display_name: Localized display name of the metric. + :type display_name: str + :param display_description: The description of the metric. + :type display_description: str + :param unit: The unit that the metric is measured in. + :type unit: str + :param aggregation_type: The type of metric aggregation. + :type aggregation_type: str + :param enable_regional_mdm_account: Whether or not the service is using + regional MDM accounts. + :type enable_regional_mdm_account: str + :param source_mdm_account: The name of the MDM account. + :type source_mdm_account: str + :param source_mdm_namespace: The name of the MDM namespace. + :type source_mdm_namespace: str + :param availabilities: Defines how often data for metrics becomes + available. + :type availabilities: + list[~azure.mgmt.datafactory.models.OperationMetricAvailability] + :param dimensions: Defines the metric dimension. + :type dimensions: + list[~azure.mgmt.datafactory.models.OperationMetricDimension] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.aggregation_type = aggregation_type + self.enable_regional_mdm_account = enable_regional_mdm_account + self.source_mdm_account = source_mdm_account + self.source_mdm_namespace = source_mdm_namespace + self.availabilities = availabilities + self.dimensions = dimensions diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py new file mode 100644 index 000000000000..d6eea01bbdb9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class OperationPaged(Paged): + """ + A paging container for iterating over a list of :class:`Operation ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[Operation]'} + } + + def __init__(self, *args, **kwargs): + + super(OperationPaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py new file mode 100644 index 000000000000..23305038a090 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Operation(Model): + """Azure Data Factory API operation definition. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param origin: The intended executor of the operation. + :type origin: str + :param display: Metadata associated with the operation. + :type display: ~azure.mgmt.datafactory.models.OperationDisplay + :param service_specification: Details about a service operation. + :type service_specification: + ~azure.mgmt.datafactory.models.OperationServiceSpecification + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, + } + + def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: + super(Operation, self).__init__(**kwargs) + self.name = name + self.origin = origin + self.display = display + self.service_specification = service_specification diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py new file mode 100644 index 000000000000..82622a44af5a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, **kwargs): + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = kwargs.get('log_specifications', None) + self.metric_specifications = kwargs.get('metric_specifications', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py new file mode 100644 index 000000000000..4215dac6eb7f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/operation_service_specification_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = log_specifications + self.metric_specifications = metric_specifications diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py new file mode 100644 index 000000000000..19f715dfd9e2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OracleLinkedService(LinkedService): + """Oracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Oracle' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py new file mode 100644 index 000000000000..a46f0463afb5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_linked_service_py3.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleLinkedService(LinkedService): + """Oracle database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Oracle' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py new file mode 100644 index 000000000000..87f69763a470 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: list[object] + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': '[object]'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = kwargs.get('partition_names', None) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py new file mode 100644 index 000000000000..6a13a4d647e8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_partition_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class OraclePartitionSettings(Model): + """The settings that will be leveraged for oracle source partitioning. + + :param partition_names: Names of the physical partitions of oracle table. + :type partition_names: list[object] + :param partition_column_name: The name of the column in integer type that + will be used for proceeding range partitioning. Type: string (or + Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_names': {'key': 'partitionNames', 'type': '[object]'}, + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(OraclePartitionSettings, self).__init__(**kwargs) + self.partition_names = partition_names + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py new file mode 100644 index 000000000000..44ce000868b7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OracleServiceCloud' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..8732e2e82ca0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_linked_service_py3.py @@ -0,0 +1,95 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class OracleServiceCloudLinkedService(LinkedService): + """Oracle Service Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Oracle Service Cloud instance. + :type host: object + :param username: Required. The user name that you use to access Oracle + Service Cloud server. + :type username: object + :param password: Required. The password corresponding to the user name + that you provided in the username key. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.username = username + self.password = password + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'OracleServiceCloud' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py new file mode 100644 index 000000000000..35ce3439d8a0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OracleServiceCloudObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..a478e1abc828 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleServiceCloudObjectDataset(Dataset): + """Oracle Service Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleServiceCloudObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py new file mode 100644 index 000000000000..f42291941393 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'OracleServiceCloudSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py new file mode 100644 index 000000000000..1fa5d6eb3748 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_service_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleServiceCloudSource(CopySource): + """A copy activity Oracle Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'OracleServiceCloudSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py new file mode 100644 index 000000000000..1f6c747c49db --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'OracleSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py new file mode 100644 index 000000000000..3a571c66732a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class OracleSink(CopySink): + """A copy activity Oracle sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'OracleSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py new file mode 100644 index 000000000000..84ad79ed19c7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param oracle_reader_query: Oracle reader query. Type: string (or + Expression with resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__(self, **kwargs): + super(OracleSource, self).__init__(**kwargs) + self.oracle_reader_query = kwargs.get('oracle_reader_query', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'OracleSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py new file mode 100644 index 000000000000..dfcbd2e0330d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_source_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class OracleSource(CopySource): + """A copy activity Oracle source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param oracle_reader_query: Oracle reader query. Type: string (or + Expression with resultType string). + :type oracle_reader_query: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param partition_option: The partition mechanism that will be used for + oracle read in parallel. + :type partition_option: object + :param partition_settings: The settings that will be leveraged for oracle + source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.OraclePartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.oracle_reader_query = oracle_reader_query + self.query_timeout = query_timeout + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'OracleSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py new file mode 100644 index 000000000000..af51100cd88e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the on-premises Oracle database. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'OracleTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py new file mode 100644 index 000000000000..563371653de8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/oracle_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class OracleTableDataset(Dataset): + """The on-premises Oracle database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the on-premises Oracle database. + Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'OracleTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py new file mode 100644 index 000000000000..8f0a0322062c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OrcFormat, self).__init__(**kwargs) + self.type = 'OrcFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py new file mode 100644 index 000000000000..40a0e389ccc3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/orc_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class OrcFormat(DatasetStorageFormat): + """The data stored in Optimized Row Columnar (ORC) format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'OrcFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py new file mode 100644 index 000000000000..aef855d955f0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ParameterSpecification(Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :type type: str or ~azure.mgmt.datafactory.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParameterSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py new file mode 100644 index 000000000000..d5b6f981d365 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parameter_specification_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ParameterSpecification(Model): + """Definition of a single parameter for an entity. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Parameter type. Possible values include: 'Object', + 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' + :type type: str or ~azure.mgmt.datafactory.models.ParameterType + :param default_value: Default value of parameter. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(ParameterSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py new file mode 100644 index 000000000000..ffaf8e1f6d93 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetDataset, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.compression_codec = kwargs.get('compression_codec', None) + self.type = 'Parquet' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py new file mode 100644 index 000000000000..4d754450ce15 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_dataset_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ParquetDataset(Dataset): + """Parquet dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param location: Required. The location of the parquet storage. + :type location: ~azure.mgmt.datafactory.models.DatasetLocation + :param compression_codec: + :type compression_codec: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: + super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.location = location + self.compression_codec = compression_codec + self.type = 'Parquet' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py new file mode 100644 index 000000000000..d742ff24b522 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ParquetFormat, self).__init__(**kwargs) + self.type = 'ParquetFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py new file mode 100644 index 000000000000..36a6f5c88c4d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_format_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class ParquetFormat(DatasetStorageFormat): + """The data stored in Parquet format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: + super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.type = 'ParquetFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py new file mode 100644 index 000000000000..38c634ed10dd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + } + + def __init__(self, **kwargs): + super(ParquetSink, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py new file mode 100644 index 000000000000..96c0c1b57926 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_sink_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class ParquetSink(CopySink): + """A copy activity Parquet sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorWriteSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorWriteSetting'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py new file mode 100644 index 000000000000..02e74641d506 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, **kwargs): + super(ParquetSource, self).__init__(**kwargs) + self.store_settings = kwargs.get('store_settings', None) + self.type = 'ParquetSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py new file mode 100644 index 000000000000..bfe077dd9999 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/parquet_source_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ParquetSource(CopySource): + """A copy activity Parquet source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param store_settings: Parquet store settings. + :type store_settings: ~azure.mgmt.datafactory.models.ConnectorReadSetting + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'store_settings': {'key': 'storeSettings', 'type': 'ConnectorReadSetting'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.store_settings = store_settings + self.type = 'ParquetSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py new file mode 100644 index 000000000000..d7ae0bc075e7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) + :type host: object + :param client_id: Required. The client ID associated with your PayPal + application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Paypal' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py new file mode 100644 index 000000000000..c11cda7a52f3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_linked_service_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PaypalLinkedService(LinkedService): + """Paypal Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the PayPal instance. (i.e. + api.sandbox.paypal.com) + :type host: object + :param client_id: Required. The client ID associated with your PayPal + application. + :type client_id: object + :param client_secret: The client secret associated with your PayPal + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Paypal' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py new file mode 100644 index 000000000000..d0fdc678841b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'PaypalObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py new file mode 100644 index 000000000000..55df7c97166d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PaypalObjectDataset(Dataset): + """Paypal Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'PaypalObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py new file mode 100644 index 000000000000..94cdbccae6ee --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PaypalSource(CopySource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PaypalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PaypalSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py new file mode 100644 index 000000000000..05730d0ae067 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/paypal_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PaypalSource(CopySource): + """A copy activity Paypal Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PaypalSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py new file mode 100644 index 000000000000..308a8e4cf592 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for + client connections. The default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. + (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix + if using WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.http_path = kwargs.get('http_path', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Phoenix' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py new file mode 100644 index 000000000000..de8210c2cc89 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_linked_service_py3.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PhoenixLinkedService(LinkedService): + """Phoenix server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Phoenix server. + (i.e. 192.168.222.160) + :type host: object + :param port: The TCP port that the Phoenix server uses to listen for + client connections. The default value is 8765. + :type port: object + :param http_path: The partial URL corresponding to the Phoenix server. + (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix + if using WindowsAzureHDInsightService. + :type http_path: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Phoenix server. Possible values include: 'Anonymous', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PhoenixAuthenticationType + :param username: The user name used to connect to the Phoenix server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.http_path = http_path + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Phoenix' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py new file mode 100644 index 000000000000..2d9cd5dcd581 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'PhoenixObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py new file mode 100644 index 000000000000..32c6e5f9836f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PhoenixObjectDataset(Dataset): + """Phoenix server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'PhoenixObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py new file mode 100644 index 000000000000..30171c6177ff --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PhoenixSource(CopySource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PhoenixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PhoenixSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py new file mode 100644 index 000000000000..1384f59e1aa4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/phoenix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PhoenixSource(CopySource): + """A copy activity Phoenix server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PhoenixSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py new file mode 100644 index 000000000000..bebc05cb1824 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineFolder(Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear + at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py new file mode 100644 index 000000000000..02c9b8dbbff1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_folder_py3.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineFolder(Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear + at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(PipelineFolder, self).__init__(**kwargs) + self.name = name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py new file mode 100644 index 000000000000..aa8b23e62932 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineReference(Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__(self, **kwargs): + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.name = kwargs.get('name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py new file mode 100644 index 000000000000..ce63f06092d1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_reference_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineReference(Model): + """Pipeline reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Pipeline reference type. Default value: + "PipelineReference" . + :vartype type: str + :param reference_name: Required. Reference pipeline name. + :type reference_name: str + :param name: Reference name. + :type name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + type = "PipelineReference" + + def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: + super(PipelineReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.name = name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py new file mode 100644 index 000000000000..a39deaccc87b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class PipelineResource(SubResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. + :type description: str + :param activities: List of activities in pipeline. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + :param parameters: List of parameters for pipeline. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param variables: List of variables for pipeline. + :type variables: dict[str, + ~azure.mgmt.datafactory.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the + Pipeline. + :type annotations: list[object] + :param folder: The folder that this Pipeline is in. If not specified, + Pipeline will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__(self, **kwargs): + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.activities = kwargs.get('activities', None) + self.parameters = kwargs.get('parameters', None) + self.variables = kwargs.get('variables', None) + self.concurrency = kwargs.get('concurrency', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py new file mode 100644 index 000000000000..a7c7ed553c07 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class PipelineResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`PipelineResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[PipelineResource]'} + } + + def __init__(self, *args, **kwargs): + + super(PipelineResourcePaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py new file mode 100644 index 000000000000..8299cdb73887 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_resource_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class PipelineResource(SubResource): + """Pipeline resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: The description of the pipeline. + :type description: str + :param activities: List of activities in pipeline. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + :param parameters: List of parameters for pipeline. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param variables: List of variables for pipeline. + :type variables: dict[str, + ~azure.mgmt.datafactory.models.VariableSpecification] + :param concurrency: The max number of concurrent runs for the pipeline. + :type concurrency: int + :param annotations: List of tags that can be used for describing the + Pipeline. + :type annotations: list[object] + :param folder: The folder that this Pipeline is in. If not specified, + Pipeline will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.PipelineFolder + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'concurrency': {'minimum': 1}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, + 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, + 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, + 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, + 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: + super(PipelineResource, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.activities = activities + self.parameters = parameters + self.variables = variables + self.concurrency = concurrency + self.annotations = annotations + self.folder = folder diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py new file mode 100644 index 000000000000..a2407bd9835f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRun(Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair + used in the pipeline run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event + in ISO8601 format. + :vartype last_updated: datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str + """ + + _validation = { + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py new file mode 100644 index 000000000000..acefb80fd078 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunInvokedBy(Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py new file mode 100644 index 000000000000..c954a18b8a67 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_invoked_by_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunInvokedBy(Model): + """Provides entity name and id that started the pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: Name of the entity that started the pipeline run. + :vartype name: str + :ivar id: The ID of the entity that started the run. + :vartype id: str + :ivar invoked_by_type: The type of the entity that started the run. + :vartype invoked_by_type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'invoked_by_type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(PipelineRunInvokedBy, self).__init__(**kwargs) + self.name = None + self.id = None + self.invoked_by_type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py new file mode 100644 index 000000000000..33e0f23f24ac --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_run_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRun(Model): + """Information about a pipeline run. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar run_id: Identifier of a run. + :vartype run_id: str + :ivar run_group_id: Identifier that correlates all the recovery runs of a + pipeline run. + :vartype run_group_id: str + :ivar is_latest: Indicates if the recovered pipeline run is the latest in + its group. + :vartype is_latest: bool + :ivar pipeline_name: The pipeline name. + :vartype pipeline_name: str + :ivar parameters: The full or partial list of parameter name, value pair + used in the pipeline run. + :vartype parameters: dict[str, str] + :ivar invoked_by: Entity that started the pipeline run. + :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy + :ivar last_updated: The last updated timestamp for the pipeline run event + in ISO8601 format. + :vartype last_updated: datetime + :ivar run_start: The start time of a pipeline run in ISO8601 format. + :vartype run_start: datetime + :ivar run_end: The end time of a pipeline run in ISO8601 format. + :vartype run_end: datetime + :ivar duration_in_ms: The duration of a pipeline run. + :vartype duration_in_ms: int + :ivar status: The status of a pipeline run. + :vartype status: str + :ivar message: The message from a pipeline run. + :vartype message: str + """ + + _validation = { + 'run_id': {'readonly': True}, + 'run_group_id': {'readonly': True}, + 'is_latest': {'readonly': True}, + 'pipeline_name': {'readonly': True}, + 'parameters': {'readonly': True}, + 'invoked_by': {'readonly': True}, + 'last_updated': {'readonly': True}, + 'run_start': {'readonly': True}, + 'run_end': {'readonly': True}, + 'duration_in_ms': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, + 'is_latest': {'key': 'isLatest', 'type': 'bool'}, + 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, + 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, + 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(PipelineRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.run_id = None + self.run_group_id = None + self.is_latest = None + self.pipeline_name = None + self.parameters = None + self.invoked_by = None + self.last_updated = None + self.run_start = None + self.run_end = None + self.duration_in_ms = None + self.status = None + self.message = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py new file mode 100644 index 000000000000..c4591c5467ba --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunsQueryResponse(Model): + """A list pipeline runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py new file mode 100644 index 000000000000..fbc689ec1632 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/pipeline_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PipelineRunsQueryResponse(Model): + """A list pipeline runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of pipeline runs. + :type value: list[~azure.mgmt.datafactory.models.PipelineRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PipelineRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(PipelineRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py new file mode 100644 index 000000000000..5a261d8fea84 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PolybaseSettings(Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: 'value', + 'percentage' + :type reject_type: str or + ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that + can be rejected before the query fails. Type: number (or Expression with + resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to + retrieve before the PolyBase recalculates the percentage of rejected rows. + Type: integer (or Expression with resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in + delimited text files when PolyBase retrieves data from the text file. + Type: boolean (or Expression with resultType boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reject_type = kwargs.get('reject_type', None) + self.reject_value = kwargs.get('reject_value', None) + self.reject_sample_value = kwargs.get('reject_sample_value', None) + self.use_type_default = kwargs.get('use_type_default', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py new file mode 100644 index 000000000000..baae78b14c5f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/polybase_settings_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class PolybaseSettings(Model): + """PolyBase settings. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param reject_type: Reject type. Possible values include: 'value', + 'percentage' + :type reject_type: str or + ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType + :param reject_value: Specifies the value or the percentage of rows that + can be rejected before the query fails. Type: number (or Expression with + resultType number), minimum: 0. + :type reject_value: object + :param reject_sample_value: Determines the number of rows to attempt to + retrieve before the PolyBase recalculates the percentage of rejected rows. + Type: integer (or Expression with resultType integer), minimum: 0. + :type reject_sample_value: object + :param use_type_default: Specifies how to handle missing values in + delimited text files when PolyBase retrieves data from the text file. + Type: boolean (or Expression with resultType boolean). + :type use_type_default: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'reject_type': {'key': 'rejectType', 'type': 'str'}, + 'reject_value': {'key': 'rejectValue', 'type': 'object'}, + 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, + 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: + super(PolybaseSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reject_type = reject_type + self.reject_value = reject_value + self.reject_sample_value = reject_sample_value + self.use_type_default = use_type_default diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py new file mode 100644 index 000000000000..f8ce5bd0803e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PostgreSqlLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'PostgreSql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py new file mode 100644 index 000000000000..0221aa620064 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/postgre_sql_linked_service_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PostgreSqlLinkedService(LinkedService): + """Linked service for PostgreSQL data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'PostgreSql' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py new file mode 100644 index 000000000000..21f18f07b262 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service.py @@ -0,0 +1,132 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class PrestoLinkedService(LinkedService): + """Presto server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) + :type server_version: object + :param catalog: Required. The catalog context for all request against the + server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client + connections. The default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid + values for this option are specified in the IANA Time Zone Database. The + default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.server_version = kwargs.get('server_version', None) + self.catalog = kwargs.get('catalog', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.time_zone_id = kwargs.get('time_zone_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Presto' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py new file mode 100644 index 000000000000..75ab99d5a58f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_linked_service_py3.py @@ -0,0 +1,132 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class PrestoLinkedService(LinkedService): + """Presto server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The IP address or host name of the Presto server. + (i.e. 192.168.222.160) + :type host: object + :param server_version: Required. The version of the Presto server. (i.e. + 0.148-t) + :type server_version: object + :param catalog: Required. The catalog context for all request against the + server. + :type catalog: object + :param port: The TCP port that the Presto server uses to listen for client + connections. The default value is 8080. + :type port: object + :param authentication_type: Required. The authentication mechanism used to + connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.PrestoAuthenticationType + :param username: The user name used to connect to the Presto server. + :type username: object + :param password: The password corresponding to the user name. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param time_zone_id: The local time zone used by the connection. Valid + values for this option are specified in the IANA Time Zone Database. The + default value is the system time zone. + :type time_zone_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'server_version': {'required': True}, + 'catalog': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, + 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: + super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.server_version = server_version + self.catalog = catalog + self.port = port + self.authentication_type = authentication_type + self.username = username + self.password = password + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.time_zone_id = time_zone_id + self.encrypted_credential = encrypted_credential + self.type = 'Presto' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py new file mode 100644 index 000000000000..35ceaa1389a7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'PrestoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py new file mode 100644 index 000000000000..193004e2c381 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class PrestoObjectDataset(Dataset): + """Presto server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'PrestoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py new file mode 100644 index 000000000000..9b7274011265 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class PrestoSource(CopySource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(PrestoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'PrestoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py new file mode 100644 index 000000000000..47fe3eb5f790 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/presto_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class PrestoSource(CopySource): + """A copy activity Presto server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'PrestoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py new file mode 100644 index 000000000000..6353c1cda96a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com) + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to + authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 + authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.company_id = kwargs.get('company_id', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.consumer_secret = kwargs.get('consumer_secret', None) + self.access_token = kwargs.get('access_token', None) + self.access_token_secret = kwargs.get('access_token_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'QuickBooks' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py new file mode 100644 index 000000000000..be12fc5cfba5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_linked_service_py3.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class QuickBooksLinkedService(LinkedService): + """QuickBooks server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. + quickbooks.api.intuit.com) + :type endpoint: object + :param company_id: Required. The company ID of the QuickBooks company to + authorize. + :type company_id: object + :param consumer_key: Required. The consumer key for OAuth 1.0 + authentication. + :type consumer_key: object + :param consumer_secret: Required. The consumer secret for OAuth 1.0 + authentication. + :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase + :param access_token: Required. The access token for OAuth 1.0 + authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param access_token_secret: Required. The access token secret for OAuth + 1.0 authentication. + :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'company_id': {'required': True}, + 'consumer_key': {'required': True}, + 'consumer_secret': {'required': True}, + 'access_token': {'required': True}, + 'access_token_secret': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, + 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, company_id, consumer_key, consumer_secret, access_token, access_token_secret, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: + super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.company_id = company_id + self.consumer_key = consumer_key + self.consumer_secret = consumer_secret + self.access_token = access_token + self.access_token_secret = access_token_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.encrypted_credential = encrypted_credential + self.type = 'QuickBooks' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py new file mode 100644 index 000000000000..73446d0ed938 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'QuickBooksObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py new file mode 100644 index 000000000000..65f67d2b20af --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class QuickBooksObjectDataset(Dataset): + """QuickBooks server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'QuickBooksObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py new file mode 100644 index 000000000000..cce0a026ae5a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class QuickBooksSource(CopySource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(QuickBooksSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'QuickBooksSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py new file mode 100644 index 000000000000..a00f35d4e1c1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/quick_books_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class QuickBooksSource(CopySource): + """A copy activity QuickBooks server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'QuickBooksSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py new file mode 100644 index 000000000000..f23d452392b0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__(self, **kwargs): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + self.month_days = kwargs.get('month_days', None) + self.monthly_occurrences = kwargs.get('monthly_occurrences', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py new file mode 100644 index 000000000000..bbbe1fa28f17 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceScheduleOccurrence(Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: 'Sunday', + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'DayOfWeek'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.day = kwargs.get('day', None) + self.occurrence = kwargs.get('occurrence', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py new file mode 100644 index 000000000000..10aea1f00163 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_occurrence_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceScheduleOccurrence(Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: 'Sunday', + 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'DayOfWeek'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py new file mode 100644 index 000000000000..fbe44fa3f021 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/recurrence_schedule_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: + list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py new file mode 100644 index 000000000000..a2e3bddb9425 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedirectIncompatibleRowSettings(Model): + """Redirect incompatible row settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py new file mode 100644 index 000000000000..b47878ef4354 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redirect_incompatible_row_settings_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedirectIncompatibleRowSettings(Model): + """Redirect incompatible row settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Name of the Azure Storage, Storage + SAS, or Azure Data Lake Store linked service used for redirecting + incompatible row. Must be specified if redirectIncompatibleRowSettings is + specified. Type: string (or Expression with resultType string). + :type linked_service_name: object + :param path: The path for storing the redirect incompatible row data. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py new file mode 100644 index 000000000000..7114b85e10db --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedshiftUnloadSettings(Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from + Amazon Redshift with unload. With this, data from Amazon Redshift source + will be unloaded into S3 first and then copied into the targeted sink from + the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. + :type s3_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = kwargs.get('s3_linked_service_name', None) + self.bucket_name = kwargs.get('bucket_name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py new file mode 100644 index 000000000000..a40d014a32f9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/redshift_unload_settings_py3.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RedshiftUnloadSettings(Model): + """The Amazon S3 settings needed for the interim Amazon S3 when copying from + Amazon Redshift with unload. With this, data from Amazon Redshift source + will be unloaded into S3 first and then copied into the targeted sink from + the interim S3. + + All required parameters must be populated in order to send to Azure. + + :param s3_linked_service_name: Required. The name of the Amazon S3 linked + service which will be used for the unload operation when copying from the + Amazon Redshift source. + :type s3_linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param bucket_name: Required. The bucket of the interim Amazon S3 which + will be used to store the unloaded data from Amazon Redshift source. The + bucket must be in the same region as the Amazon Redshift source. Type: + string (or Expression with resultType string). + :type bucket_name: object + """ + + _validation = { + 's3_linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + } + + def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: + super(RedshiftUnloadSettings, self).__init__(**kwargs) + self.s3_linked_service_name = s3_linked_service_name + self.bucket_name = bucket_name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py new file mode 100644 index 000000000000..2450f31222df --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RelationalSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'RelationalSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py new file mode 100644 index 000000000000..f88383cbd729 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RelationalSource(CopySource): + """A copy activity source for various relational databases. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'RelationalSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py new file mode 100644 index 000000000000..e5dd2e0786c8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class RelationalTableDataset(Dataset): + """The relational table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The relational table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RelationalTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'RelationalTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py new file mode 100644 index 000000000000..3c85d95f8033 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/relational_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RelationalTableDataset(Dataset): + """The relational table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The relational table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'RelationalTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py new file mode 100644 index 000000000000..8de6a70ecc99 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, **kwargs): + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py new file mode 100644 index 000000000000..23d971c1082e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class RerunTriggerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`RerunTriggerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py new file mode 100644 index 000000000000..19814ad0d76f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_trigger_resource_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class RerunTriggerResource(SubResource): + """RerunTrigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the rerun trigger. + :type properties: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(RerunTriggerResource, self).__init__(**kwargs) + self.properties = properties diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py new file mode 100644 index 000000000000..8c5ca2d67f3c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger import Trigger + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTrigger, self).__init__(**kwargs) + self.parent_trigger = kwargs.get('parent_trigger', None) + self.requested_start_time = kwargs.get('requested_start_time', None) + self.requested_end_time = kwargs.get('requested_end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.type = 'RerunTumblingWindowTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py new file mode 100644 index 000000000000..4b87f070b6be --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.max_concurrency = kwargs.get('max_concurrency', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py new file mode 100644 index 000000000000..6fadecca588b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_action_parameters_py3.py @@ -0,0 +1,47 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RerunTumblingWindowTriggerActionParameters(Model): + """Rerun tumbling window trigger Parameters. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. The start time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type start_time: datetime + :param end_time: Required. The end time for the time period for which + restatement is initiated. Only UTC time is currently supported. + :type end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: + super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) + self.start_time = start_time + self.end_time = end_time + self.max_concurrency = max_concurrency diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py new file mode 100644 index 000000000000..4a7a20759c1b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rerun_tumbling_window_trigger_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows + from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param parent_trigger: The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period + for which restatement is initiated. Only UTC time is currently supported. + :type requested_start_time: datetime + :param requested_end_time: Required. The end time for the time period for + which restatement is initiated. Only UTC time is currently supported. + :type requested_end_time: datetime + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a rerun is triggered. + :type max_concurrency: int + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + } + + def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.max_concurrency = max_concurrency + self.type = 'RerunTumblingWindowTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py new file mode 100644 index 000000000000..f6b2d7d3b512 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Resource(Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.e_tag = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py new file mode 100644 index 000000000000..cfc0e4b09aa5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/resource_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Resource(Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags + self.e_tag = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py new file mode 100644 index 000000000000..16d1af502787 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Responsys' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py new file mode 100644 index 000000000000..6d8a74a0a34b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_linked_service_py3.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ResponsysLinkedService(LinkedService): + """Responsys linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Responsys server. + :type endpoint: object + :param client_id: Required. The client ID associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_id: object + :param client_secret: The client secret associated with the Responsys + application. Type: string (or Expression with resultType string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Responsys' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py new file mode 100644 index 000000000000..f459e69113a1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ResponsysObjectDataset(Dataset): + """Responsys dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ResponsysObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py new file mode 100644 index 000000000000..c5f375910aaf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ResponsysObjectDataset(Dataset): + """Responsys dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ResponsysObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py new file mode 100644 index 000000000000..fd25b8e71377 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ResponsysSource(CopySource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ResponsysSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ResponsysSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py new file mode 100644 index 000000000000..8d5e4ac091f7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/responsys_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ResponsysSource(CopySource): + """A copy activity Responsys source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ResponsysSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py new file mode 100644 index 000000000000..9a5d41858e54 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestResourceDataset, self).__init__(**kwargs) + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.pagination_rules = kwargs.get('pagination_rules', None) + self.type = 'RestResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py new file mode 100644 index 000000000000..99f39c97f373 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_resource_dataset_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class RestResourceDataset(Dataset): + """A Rest service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param relative_url: The relative URL to the resource that the RESTful API + provides. Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method used to call the RESTful API. The + default is GET. Type: string (or Expression with resultType string). + :type request_method: object + :param request_body: The HTTP request body to the RESTful API if + requestMethod is POST. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The additional HTTP headers in the request to + the RESTful API. Type: string (or Expression with resultType string). + :type additional_headers: object + :param pagination_rules: The pagination rules to compose next page + requests. Type: string (or Expression with resultType string). + :type pagination_rules: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: + super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.pagination_rules = pagination_rules + self.type = 'RestResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py new file mode 100644 index 000000000000..0fbb15654438 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestServiceLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.enable_server_certificate_validation = kwargs.get('enable_server_certificate_validation', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.aad_resource_id = kwargs.get('aad_resource_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'RestService' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py new file mode 100644 index 000000000000..9af9f609e52b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_service_linked_service_py3.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class RestServiceLinkedService(LinkedService): + """Rest Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The base URL of the REST service. + :type url: object + :param enable_server_certificate_validation: Whether to validate server + side SSL certificate when connecting to the endpoint.The default value is + true. Type: boolean (or Expression with resultType boolean). + :type enable_server_certificate_validation: object + :param authentication_type: Required. Type of authentication used to + connect to the REST service. Possible values include: 'Anonymous', + 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.RestServiceAuthenticationType + :param user_name: The user name used in Basic authentication type. + :type user_name: object + :param password: The password used in Basic authentication type. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param service_principal_id: The application's client ID used in + AadServicePrincipal authentication type. + :type service_principal_id: object + :param service_principal_key: The application's key used in + AadServicePrincipal authentication type. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The tenant information (domain name or tenant ID) used in + AadServicePrincipal authentication type under which your application + resides. + :type tenant: object + :param aad_resource_id: The resource you are requesting authorization to + use. + :type aad_resource_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: + super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.enable_server_certificate_validation = enable_server_certificate_validation + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.aad_resource_id = aad_resource_id + self.encrypted_credential = encrypted_credential + self.type = 'RestService' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py new file mode 100644 index 000000000000..a8c7efca21e3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(RestSource, self).__init__(**kwargs) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.type = 'RestSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py new file mode 100644 index 000000000000..cf0878e050e0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/rest_source_py3.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class RestSource(CopySource): + """A copy activity Rest service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP + response. It is the timeout to get a response, not the timeout to read + response data. Default value: 00:01:40. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next page + request. + :type request_interval: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.type = 'RestSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py new file mode 100644 index 000000000000..e6f5b1876259 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RetryPolicy(Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default + is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(RetryPolicy, self).__init__(**kwargs) + self.count = kwargs.get('count', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py new file mode 100644 index 000000000000..b51b87a49938 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/retry_policy_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RetryPolicy(Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer + (or Expression with resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default + is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: + super(RetryPolicy, self).__init__(**kwargs) + self.count = count + self.interval_in_seconds = interval_in_seconds diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py new file mode 100644 index 000000000000..9271f7adf029 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunFilterParameters(Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. + :type last_updated_after: datetime + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. + :type last_updated_before: datetime + :param filters: List of filters. + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__(self, **kwargs): + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.last_updated_after = kwargs.get('last_updated_after', None) + self.last_updated_before = kwargs.get('last_updated_before', None) + self.filters = kwargs.get('filters', None) + self.order_by = kwargs.get('order_by', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py new file mode 100644 index 000000000000..c96e64eb63b3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_filter_parameters_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunFilterParameters(Model): + """Query parameters for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param last_updated_after: Required. The time at or after which the run + event was updated in 'ISO 8601' format. + :type last_updated_after: datetime + :param last_updated_before: Required. The time at or before which the run + event was updated in 'ISO 8601' format. + :type last_updated_before: datetime + :param filters: List of filters. + :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] + :param order_by: List of OrderBy option. + :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] + """ + + _validation = { + 'last_updated_after': {'required': True}, + 'last_updated_before': {'required': True}, + } + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, + 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, + 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, + 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, + } + + def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: + super(RunFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.last_updated_after = last_updated_after + self.last_updated_before = last_updated_before + self.filters = filters + self.order_by = order_by diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py new file mode 100644 index 000000000000..7d54150a6815 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryFilter(Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' + :type operator: str or + ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] + """ + + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = kwargs.get('operand', None) + self.operator = kwargs.get('operator', None) + self.values = kwargs.get('values', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py new file mode 100644 index 000000000000..814e7a4b499b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_filter_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryFilter(Model): + """Query filter option for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param operand: Required. Parameter name to be used for filter. The + allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd + and Status; to query activity runs are ActivityName, ActivityRunStart, + ActivityRunEnd, ActivityType and Status, and to query trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', + 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' + :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand + :param operator: Required. Operator to be used for filter. Possible values + include: 'Equals', 'NotEquals', 'In', 'NotIn' + :type operator: str or + ~azure.mgmt.datafactory.models.RunQueryFilterOperator + :param values: Required. List of filter values. + :type values: list[str] + """ + + _validation = { + 'operand': {'required': True}, + 'operator': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'operand': {'key': 'operand', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + } + + def __init__(self, *, operand, operator, values, **kwargs) -> None: + super(RunQueryFilter, self).__init__(**kwargs) + self.operand = operand + self.operator = operator + self.values = values diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py new file mode 100644 index 000000000000..21afabcf215f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryOrderBy(Model): + """An object to provide order by options for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ + + _validation = { + 'order_by': {'required': True}, + 'order': {'required': True}, + } + + _attribute_map = { + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = kwargs.get('order_by', None) + self.order = kwargs.get('order', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py new file mode 100644 index 000000000000..a3ddc8854d47 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/run_query_order_by_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class RunQueryOrderBy(Model): + """An object to provide order by options for listing runs. + + All required parameters must be populated in order to send to Azure. + + :param order_by: Required. Parameter name to be used for order by. The + allowed parameters to order by for pipeline runs are PipelineName, + RunStart, RunEnd and Status; for activity runs are ActivityName, + ActivityRunStart, ActivityRunEnd and Status; for trigger runs are + TriggerName, TriggerRunTimestamp and Status. Possible values include: + 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', + 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' + :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField + :param order: Required. Sorting order of the parameter. Possible values + include: 'ASC', 'DESC' + :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder + """ + + _validation = { + 'order_by': {'required': True}, + 'order': {'required': True}, + } + + _attribute_map = { + 'order_by': {'key': 'orderBy', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__(self, *, order_by, order, **kwargs) -> None: + super(RunQueryOrderBy, self).__init__(**kwargs) + self.order_by = order_by + self.order = order diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py new file mode 100644 index 000000000000..c644ac664831 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, + for example, 'https://[domain].my.salesforce.com'. Type: string (or + Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Salesforce' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py new file mode 100644 index 000000000000..05fcea7a3990 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_linked_service_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceLinkedService(LinkedService): + """Linked service for Salesforce. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce instance. Default is + 'https://login.salesforce.com'. To copy data from sandbox, specify + 'https://test.salesforce.com'. To copy data from custom domain, specify, + for example, 'https://[domain].my.salesforce.com'. Type: string (or + Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.encrypted_credential = encrypted_credential + self.type = 'Salesforce' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py new file mode 100644 index 000000000000..93b4fcdb3d1f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceMarketingCloud' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py new file mode 100644 index 000000000000..d7e09e27a43f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceMarketingCloudLinkedService(LinkedService): + """Salesforce Marketing Cloud linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param client_id: Required. The client ID associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_id: object + :param client_secret: The client secret associated with the Salesforce + Marketing Cloud application. Type: string (or Expression with resultType + string). + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. Type: boolean (or + Expression with resultType boolean). + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. Type: + boolean (or Expression with resultType boolean). + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceMarketingCloud' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py new file mode 100644 index 000000000000..20f581ce1c50 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SalesforceMarketingCloudObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..526ac806649f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceMarketingCloudObjectDataset(Dataset): + """Salesforce Marketing Cloud dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SalesforceMarketingCloudObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py new file mode 100644 index 000000000000..09a0eca1758e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceMarketingCloudSource(CopySource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceMarketingCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SalesforceMarketingCloudSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py new file mode 100644 index 000000000000..9b898af0c3a1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_marketing_cloud_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceMarketingCloudSource(CopySource): + """A copy activity Salesforce Marketing Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SalesforceMarketingCloudSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py new file mode 100644 index 000000000000..10cfce97fe0f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py new file mode 100644 index 000000000000..3c3f75d6059e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceObjectDataset(Dataset): + """The Salesforce object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce object API name. Type: string (or + Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py new file mode 100644 index 000000000000..4d1a93c08915 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. + :type write_behavior: object + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py new file mode 100644 index 000000000000..ed7591fbb59b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_sink_py3.py @@ -0,0 +1,83 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceSink(CopySink): + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. + :type write_behavior: object + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py new file mode 100644 index 000000000000..57a10411f487 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceSource(CopySource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. + :type read_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py new file mode 100644 index 000000000000..08e6776f5f98 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_source_py3.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceSource(CopySource): + """A copy activity Salesforce source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. + :type read_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py new file mode 100644 index 000000000000..a57164c7215d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapBWLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapBW' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py new file mode 100644 index 000000000000..92aef25dc215 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_bw_linked_service_py3.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapBWLinkedService(LinkedService): + """SAP Business Warehouse Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance. Type: string + (or Expression with resultType string). + :type server: object + :param system_number: Required. System number of the BW system. (Usually a + two-digit decimal number represented as a string.) Type: string (or + Expression with resultType string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system. + (Usually a three-digit decimal number represented as a string) Type: + string (or Expression with resultType string). + :type client_id: object + :param user_name: Username to access the SAP BW server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP BW server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapBW' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py new file mode 100644 index 000000000000..53d47ab8ae41 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapCloudForCustomer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py new file mode 100644 index 000000000000..9e47fd696503 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_linked_service_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapCloudForCustomerLinkedService(LinkedService): + """Linked service for SAP Cloud for Customer. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP Cloud for Customer OData API. For + example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: + string (or Expression with resultType string). + :type url: object + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: object + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapCloudForCustomer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py new file mode 100644 index 000000000000..436b251207a4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'SapCloudForCustomerResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py new file mode 100644 index 000000000000..455bad7c9095 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapCloudForCustomerResourceDataset(Dataset): + """The path of the SAP Cloud for Customer OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP Cloud for Customer OData + entity. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapCloudForCustomerResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py new file mode 100644 index 000000000000..ae99093f277e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + 'Insert'. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'SapCloudForCustomerSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py new file mode 100644 index 000000000000..bdbc2cefcbd1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SapCloudForCustomerSink(CopySink): + """A copy activity SAP Cloud for Customer sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + 'Insert'. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'SapCloudForCustomerSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py new file mode 100644 index 000000000000..561c1b342f93 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapCloudForCustomerSource(CopySource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP Cloud for Customer OData query. For example, "$top=1". + Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapCloudForCustomerSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapCloudForCustomerSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py new file mode 100644 index 000000000000..e9dab6ad1899 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_cloud_for_customer_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapCloudForCustomerSource(CopySource): + """A copy activity source for SAP Cloud for Customer source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP Cloud for Customer OData query. For example, "$top=1". + Type: string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapCloudForCustomerSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py new file mode 100644 index 000000000000..0ca69242055f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or + Expression with resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapEccLinkedService, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapEcc' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py new file mode 100644 index 000000000000..7afd76b8fe09 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_linked_service_py3.py @@ -0,0 +1,76 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapEccLinkedService(LinkedService): + """Linked service for SAP ERP Central Component(SAP ECC). + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param url: Required. The URL of SAP ECC OData API. For example, + '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or + Expression with resultType string). + :type url: str + :param username: The username for Basic authentication. Type: string (or + Expression with resultType string). + :type username: str + :param password: The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Either encryptedCredential or username/password must + be provided. Type: string (or Expression with resultType string). + :type encrypted_credential: str + """ + + _validation = { + 'type': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'str'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, + } + + def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: + super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.url = url + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapEcc' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py new file mode 100644 index 000000000000..f79367f49b3d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapEccResourceDataset, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.type = 'SapEccResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py new file mode 100644 index 000000000000..76aaeb9bb9f2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_resource_dataset_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapEccResourceDataset(Dataset): + """The path of the SAP ECC OData entity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param path: Required. The path of the SAP ECC OData entity. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.path = path + self.type = 'SapEccResource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py new file mode 100644 index 000000000000..6379c33713d4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapEccSource(CopySource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapEccSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SapEccSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py new file mode 100644 index 000000000000..4412cac39960 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_ecc_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapEccSource(CopySource): + """A copy activity source for SAP ECC source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: SAP ECC OData query. For example, "$top=1". Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SapEccSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py new file mode 100644 index 000000000000..391bd79f8c28 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the SAP HANA server. Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapHanaLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapHana' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py new file mode 100644 index 000000000000..bbf307d1bede --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_hana_linked_service_py3.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapHanaLinkedService(LinkedService): + """SAP HANA Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP HANA server. Type: string + (or Expression with resultType string). + :type server: object + :param authentication_type: The authentication type to be used to connect + to the SAP HANA server. Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SapHanaAuthenticationType + :param user_name: Username to access the SAP HANA server. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password to access the SAP HANA server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapHana' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py new file mode 100644 index 000000000000..bfe9c323d302 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.system_number = kwargs.get('system_number', None) + self.client_id = kwargs.get('client_id', None) + self.language = kwargs.get('language', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SapOpenHub' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py new file mode 100644 index 000000000000..eddc50b0f1c5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_linked_service_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SapOpenHubLinkedService(LinkedService): + """SAP Business Warehouse Open Hub Destination Linked Service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Host name of the SAP BW instance where the open + hub destination is located. Type: string (or Expression with resultType + string). + :type server: object + :param system_number: Required. System number of the BW system where the + open hub destination is located. (Usually a two-digit decimal number + represented as a string.) Type: string (or Expression with resultType + string). + :type system_number: object + :param client_id: Required. Client ID of the client on the BW system where + the open hub destination is located. (Usually a three-digit decimal number + represented as a string) Type: string (or Expression with resultType + string). + :type client_id: object + :param language: Language of the BW system where the open hub destination + is located. The default value is EN. Type: string (or Expression with + resultType string). + :type language: object + :param user_name: Username to access the SAP BW server where the open hub + destination is located. Type: string (or Expression with resultType + string). + :type user_name: object + :param password: Password to access the SAP BW server where the open hub + destination is located. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'system_number': {'required': True}, + 'client_id': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, language=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.system_number = system_number + self.client_id = client_id + self.language = language + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SapOpenHub' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py new file mode 100644 index 000000000000..ea98207a18cf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubSource, self).__init__(**kwargs) + self.type = 'SapOpenHubSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py new file mode 100644 index 000000000000..9cfa4e5243b6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_source_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SapOpenHubSource(CopySource): + """A copy activity source for SAP Business Warehouse Open Hub Destination + source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SapOpenHubSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py new file mode 100644 index 000000000000..2682969c5016 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SapOpenHubTableDataset, self).__init__(**kwargs) + self.open_hub_destination_name = kwargs.get('open_hub_destination_name', None) + self.exclude_last_request = kwargs.get('exclude_last_request', None) + self.base_request_id = kwargs.get('base_request_id', None) + self.type = 'SapOpenHubTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py new file mode 100644 index 000000000000..b06a53c10db3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sap_open_hub_table_dataset_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SapOpenHubTableDataset(Dataset): + """Sap Business Warehouse Open Hub Destination Table properties. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param open_hub_destination_name: Required. The name of the Open Hub + Destination with destination type as Database Table. Type: string (or + Expression with resultType string). + :type open_hub_destination_name: object + :param exclude_last_request: Whether to exclude the records of the last + request. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type exclude_last_request: object + :param base_request_id: The ID of request for delta loading. Once it is + set, only data with requestId larger than the value of this property will + be retrieved. The default value is 0. Type: integer (or Expression with + resultType integer ). + :type base_request_id: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'open_hub_destination_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, + 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, + 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.open_hub_destination_name = open_hub_destination_name + self.exclude_last_request = exclude_last_request + self.base_request_id = base_request_id + self.type = 'SapOpenHubTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py new file mode 100644 index 000000000000..b9ea331b8c6e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger import MultiplePipelineTrigger + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__(self, **kwargs): + super(ScheduleTrigger, self).__init__(**kwargs) + self.recurrence = kwargs.get('recurrence', None) + self.type = 'ScheduleTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py new file mode 100644 index 000000000000..f13f01c7fa13 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_py3.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .multiple_pipeline_trigger_py3 import MultiplePipelineTrigger + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipelines: Pipelines that need to be started. + :type pipelines: + list[~azure.mgmt.datafactory.models.TriggerPipelineReference] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.recurrence = recurrence + self.type = 'ScheduleTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py new file mode 100644 index 000000000000..85408c45547b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScheduleTriggerRecurrence(Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: 'NotSpecified', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: datetime + :param end_time: The end time. + :type end_time: datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, **kwargs): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py new file mode 100644 index 000000000000..a9b6eded7b96 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/schedule_trigger_recurrence_py3.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScheduleTriggerRecurrence(Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: 'NotSpecified', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: datetime + :param end_time: The end time. + :type end_time: datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py new file mode 100644 index 000000000000..50bc0131a5cf --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScriptAction(Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should + be executed. + :type roles: object + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ScriptAction, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.uri = kwargs.get('uri', None) + self.roles = kwargs.get('roles', None) + self.parameters = kwargs.get('parameters', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py new file mode 100644 index 000000000000..c0e278073219 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/script_action_py3.py @@ -0,0 +1,49 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class ScriptAction(Model): + """Custom script action to run on HDI ondemand cluster once it's up. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The user provided name of the script action. + :type name: str + :param uri: Required. The URI for the script action. + :type uri: str + :param roles: Required. The node types on which the script action should + be executed. + :type roles: object + :param parameters: The parameters for the script action. + :type parameters: str + """ + + _validation = { + 'name': {'required': True}, + 'uri': {'required': True}, + 'roles': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'roles': {'key': 'roles', 'type': 'object'}, + 'parameters': {'key': 'parameters', 'type': 'str'}, + } + + def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: + super(ScriptAction, self).__init__(**kwargs) + self.name = name + self.uri = uri + self.roles = roles + self.parameters = parameters diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py new file mode 100644 index 000000000000..3d9475dd4382 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretBase(Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString, AzureKeyVaultSecretReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} + } + + def __init__(self, **kwargs): + super(SecretBase, self).__init__(**kwargs) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py new file mode 100644 index 000000000000..29403e61b245 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secret_base_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SecretBase(Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString, AzureKeyVaultSecretReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} + } + + def __init__(self, **kwargs) -> None: + super(SecretBase, self).__init__(**kwargs) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py new file mode 100644 index 000000000000..bec430fdf8a4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base import SecretBase + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be + masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SecureString, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.type = 'SecureString' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py new file mode 100644 index 000000000000..d7ebd5e13e78 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/secure_string_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .secret_base_py3 import SecretBase + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be + masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, *, value: str, **kwargs) -> None: + super(SecureString, self).__init__(**kwargs) + self.value = value + self.type = 'SecureString' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py new file mode 100644 index 000000000000..fc56f8e8a799 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference import DependencyReference + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) + self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py new file mode 100644 index 000000000000..1dd1e575c2e8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_dependency_tumbling_window_trigger_reference_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference_py3 import DependencyReference + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling + window when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.offset = offset + self.size = size + self.type = 'SelfDependencyTumblingWindowTriggerReference' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py new file mode 100644 index 000000000000..20744f02306d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime import IntegrationRuntime + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param linked_info: + :type linked_info: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) + self.linked_info = kwargs.get('linked_info', None) + self.type = 'SelfHosted' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py new file mode 100644 index 000000000000..1491a80dc19a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py @@ -0,0 +1,139 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SelfHostedIntegrationRuntimeNode(Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration + runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values + include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', + 'Initializing', 'InitializeFailed' + :vartype status: str or + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was + registered in ISO8601 format. + :vartype register_time: datetime + :ivar last_connect_time: The most recent time at which the integration + runtime was connected in ISO8601 format. + :vartype last_connect_time: datetime + :ivar expiry_time: The time at which the integration runtime will expire + in ISO8601 format. + :vartype expiry_time: datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: datetime + :ivar last_update_result: The result of the last integration runtime node + update. Possible values include: 'None', 'Succeed', 'Fail' + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime + node update start. + :vartype last_start_update_time: datetime + :ivar last_end_update_time: The last time for the integration runtime node + update end. + :vartype last_end_update_time: datetime + :ivar is_active_dispatcher: Indicates whether this node is the active + dispatcher for integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py new file mode 100644 index 000000000000..59b703737a5d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node_py3.py @@ -0,0 +1,139 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SelfHostedIntegrationRuntimeNode(Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration + runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values + include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', + 'Initializing', 'InitializeFailed' + :vartype status: str or + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was + registered in ISO8601 format. + :vartype register_time: datetime + :ivar last_connect_time: The most recent time at which the integration + runtime was connected in ISO8601 format. + :vartype last_connect_time: datetime + :ivar expiry_time: The time at which the integration runtime will expire + in ISO8601 format. + :vartype expiry_time: datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: datetime + :ivar last_update_result: The result of the last integration runtime node + update. Possible values include: 'None', 'Succeed', 'Fail' + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime + node update start. + :vartype last_start_update_time: datetime + :ivar last_end_update_time: The last time for the integration runtime node + update end. + :vartype last_end_update_time: datetime + :ivar is_active_dispatcher: Indicates whether this node is the active + dispatcher for integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration + runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration + runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py new file mode 100644 index 000000000000..a25d04373849 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_py3 import IntegrationRuntime + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Integration runtime description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param linked_info: + :type linked_info: + ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.linked_info = linked_info + self.type = 'SelfHosted' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py new file mode 100644 index 000000000000..5dd9995987d9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status import IntegrationRuntimeStatus + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode + for node-node communication channel (when more than 2 self-hosted + integration runtime nodes exist). Possible values include: 'NotSet', + 'SslEncrypted', 'NotEncrypted' + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: + list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime + will be scheduled to update, in ISO8601 format. + :vartype scheduled_update_date: datetime + :ivar update_delay_offset: The time in the date scheduled by service to + update the integration runtime, e.g., PT03H is 3 hours + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration + runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime + backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has + been turned on. Possible values include: 'On', 'Off' + :vartype auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to + share with this integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to + update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration + runtime will be updated. + :vartype auto_update_eta: datetime + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = kwargs.get('nodes', None) + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = kwargs.get('links', None) + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + self.type = 'SelfHosted' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py new file mode 100644 index 000000000000..acad7661fc15 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_status_py3.py @@ -0,0 +1,146 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .integration_runtime_status_py3 import IntegrationRuntimeStatus + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar data_factory_name: The data factory name which the integration + runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: + 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', + 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' + :vartype state: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param type: Required. Constant filled by server. + :type type: str + :ivar create_time: The time at which the integration runtime was created, + in ISO8601 format. + :vartype create_time: datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode + for node-node communication channel (when more than 2 self-hosted + integration runtime nodes exist). Possible values include: 'NotSet', + 'SslEncrypted', 'NotEncrypted' + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: + list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime + will be scheduled to update, in ISO8601 format. + :vartype scheduled_update_date: datetime + :ivar update_delay_offset: The time in the date scheduled by service to + update the integration runtime, e.g., PT03H is 3 hours + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration + runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime + backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has + been turned on. Possible values include: 'On', 'Off' + :vartype auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to + share with this integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to + update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration + runtime will be updated. + :vartype auto_update_eta: datetime + """ + + _validation = { + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'type': {'required': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: + super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = nodes + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = links + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + self.type = 'SelfHosted' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py new file mode 100644 index 000000000000..4d42f575e769 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + .service-now.com) + :type endpoint: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server + for Basic and OAuth2 authentication. + :type username: object + :param password: The password corresponding to the user name for Basic and + OAuth2 authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'ServiceNow' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py new file mode 100644 index 000000000000..b9d166f241d6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_linked_service_py3.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ServiceNowLinkedService(LinkedService): + """ServiceNow server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. + .service-now.com) + :type endpoint: object + :param authentication_type: Required. The authentication type to use. + Possible values include: 'Basic', 'OAuth2' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType + :param username: The user name used to connect to the ServiceNow server + for Basic and OAuth2 authentication. + :type username: object + :param password: The password corresponding to the user name for Basic and + OAuth2 authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id for OAuth2 authentication. + :type client_id: object + :param client_secret: The client secret for OAuth2 authentication. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.authentication_type = authentication_type + self.username = username + self.password = password + self.client_id = client_id + self.client_secret = client_secret + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'ServiceNow' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py new file mode 100644 index 000000000000..a9821ba0fd10 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ServiceNowObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py new file mode 100644 index 000000000000..fcd2fd537a31 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ServiceNowObjectDataset(Dataset): + """ServiceNow server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ServiceNowObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py new file mode 100644 index 000000000000..16b10bb8de5e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ServiceNowSource(CopySource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ServiceNowSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ServiceNowSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py new file mode 100644 index 000000000000..20d1a64d04d3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/service_now_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ServiceNowSource(CopySource): + """A copy activity ServiceNow server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ServiceNowSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py new file mode 100644 index 000000000000..e8dd1690862d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class SetVariableActivity(ControlActivity): + """Set value for a Variable. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SetVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) + self.type = 'SetVariable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py new file mode 100644 index 000000000000..e045abee3dfb --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/set_variable_activity_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class SetVariableActivity(ControlActivity): + """Set value for a Variable. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Name of the variable whose value needs to be set. + :type variable_name: str + :param value: Value to be set. Could be a static value or Expression + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: + super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.variable_name = variable_name + self.value = value + self.type = 'SetVariable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py new file mode 100644 index 000000000000..5b8fd4e42ba2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpLocation, self).__init__(**kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py new file mode 100644 index 000000000000..c5e2feafa971 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_location_py3.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_location_py3 import DatasetLocation + + +class SftpLocation(DatasetLocation): + """The location of SFTP dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(SftpLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py new file mode 100644 index 000000000000..e0cd7ea8fda1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpReadSetting, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py new file mode 100644 index 000000000000..39beb756905a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_read_setting_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .connector_read_setting_py3 import ConnectorReadSetting + + +class SftpReadSetting(ConnectorReadSetting): + """Sftp read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or + Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Sftp wildcardFileName. Type: string (or + Expression with resultType string). + :type wildcard_file_name: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(SftpReadSetting, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py new file mode 100644 index 000000000000..aa4c535fc514 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. . + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for + client connections. Default value is 22. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'SshPublicKey' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: Password to logon the SFTP server for Basic + authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey + authentication. Only valid for on-premises copy. For on-premises copy with + SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent + should be specified. SSH private key should be OpenSSH format. Type: + string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for + SshPublicKey authentication. For on-premises copy with SshPublicKey + authentication, either PrivateKeyPath or PrivateKeyContent should be + specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH + private key is encrypted. + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key + validation. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. + When SkipHostKeyValidation is false, HostKeyFingerprint should be + specified. Type: string (or Expression with resultType string). + :type host_key_fingerprint: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SftpServerLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.private_key_path = kwargs.get('private_key_path', None) + self.private_key_content = kwargs.get('private_key_content', None) + self.pass_phrase = kwargs.get('pass_phrase', None) + self.skip_host_key_validation = kwargs.get('skip_host_key_validation', None) + self.host_key_fingerprint = kwargs.get('host_key_fingerprint', None) + self.type = 'Sftp' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py new file mode 100644 index 000000000000..7decd7781348 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sftp_server_linked_service_py3.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SftpServerLinkedService(LinkedService): + """A linked service for an SSH File Transfer Protocol (SFTP) server. . + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The SFTP server host name. Type: string (or + Expression with resultType string). + :type host: object + :param port: The TCP port number that the SFTP server uses to listen for + client connections. Default value is 22. Type: integer (or Expression with + resultType integer), minimum: 0. + :type port: object + :param authentication_type: The authentication type to be used to connect + to the FTP server. Possible values include: 'Basic', 'SshPublicKey' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SftpAuthenticationType + :param user_name: The username used to log on to the SFTP server. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: Password to logon the SFTP server for Basic + authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + :param private_key_path: The SSH private key file path for SshPublicKey + authentication. Only valid for on-premises copy. For on-premises copy with + SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent + should be specified. SSH private key should be OpenSSH format. Type: + string (or Expression with resultType string). + :type private_key_path: object + :param private_key_content: Base64 encoded SSH private key content for + SshPublicKey authentication. For on-premises copy with SshPublicKey + authentication, either PrivateKeyPath or PrivateKeyContent should be + specified. SSH private key should be OpenSSH format. + :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase + :param pass_phrase: The password to decrypt the SSH private key if the SSH + private key is encrypted. + :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase + :param skip_host_key_validation: If true, skip the SSH host key + validation. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type skip_host_key_validation: object + :param host_key_fingerprint: The host key finger-print of the SFTP server. + When SkipHostKeyValidation is false, HostKeyFingerprint should be + specified. Type: string (or Expression with resultType string). + :type host_key_fingerprint: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, + 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, + 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, + 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, + 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: + super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.authentication_type = authentication_type + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.private_key_path = private_key_path + self.private_key_content = private_key_content + self.pass_phrase = pass_phrase + self.skip_host_key_validation = skip_host_key_validation + self.host_key_fingerprint = host_key_fingerprint + self.type = 'Sftp' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py new file mode 100644 index 000000000000..ee5311dceb7a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Shopify server. (i.e. + mystore.myshopify.com) + :type host: object + :param access_token: The API access token that can be used to access + Shopify’s data. The token won't expire if it is offline mode. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifyLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Shopify' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py new file mode 100644 index 000000000000..ea6189277552 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ShopifyLinkedService(LinkedService): + """Shopify Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Shopify server. (i.e. + mystore.myshopify.com) + :type host: object + :param access_token: The API access token that can be used to access + Shopify’s data. The token won't expire if it is offline mode. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Shopify' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py new file mode 100644 index 000000000000..ab3e475b9c97 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifyObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ShopifyObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py new file mode 100644 index 000000000000..98b9c43c21e8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ShopifyObjectDataset(Dataset): + """Shopify Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ShopifyObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py new file mode 100644 index 000000000000..d4596976d459 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ShopifySource(CopySource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ShopifySource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ShopifySource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py new file mode 100644 index 000000000000..6b56edd62904 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/shopify_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ShopifySource(CopySource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ShopifySource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py new file mode 100644 index 000000000000..4f9ab49a7bba --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Spark server + :type host: object + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: + 'SharkServer', 'SharkServer2', 'SparkThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.port = kwargs.get('port', None) + self.server_type = kwargs.get('server_type', None) + self.thrift_transport_protocol = kwargs.get('thrift_transport_protocol', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.http_path = kwargs.get('http_path', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.trusted_cert_path = kwargs.get('trusted_cert_path', None) + self.use_system_trust_store = kwargs.get('use_system_trust_store', None) + self.allow_host_name_cn_mismatch = kwargs.get('allow_host_name_cn_mismatch', None) + self.allow_self_signed_server_cert = kwargs.get('allow_self_signed_server_cert', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Spark' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py new file mode 100644 index 000000000000..f6433b6ab187 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_linked_service_py3.py @@ -0,0 +1,131 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SparkLinkedService(LinkedService): + """Spark Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. IP address or host name of the Spark server + :type host: object + :param port: Required. The TCP port that the Spark server uses to listen + for client connections. + :type port: object + :param server_type: The type of Spark server. Possible values include: + 'SharkServer', 'SharkServer2', 'SparkThriftServer' + :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType + :param thrift_transport_protocol: The transport protocol to use in the + Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' + :type thrift_transport_protocol: str or + ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol + :param authentication_type: Required. The authentication method used to + access the Spark server. Possible values include: 'Anonymous', 'Username', + 'UsernameAndPassword', 'WindowsAzureHDInsightService' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SparkAuthenticationType + :param username: The user name that you use to access Spark Server. + :type username: object + :param password: The password corresponding to the user name that you + provided in the Username field + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param http_path: The partial URL corresponding to the Spark server. + :type http_path: object + :param enable_ssl: Specifies whether the connections to the server are + encrypted using SSL. The default value is false. + :type enable_ssl: object + :param trusted_cert_path: The full path of the .pem file containing + trusted CA certificates for verifying the server when connecting over SSL. + This property can only be set when using SSL on self-hosted IR. The + default value is the cacerts.pem file installed with the IR. + :type trusted_cert_path: object + :param use_system_trust_store: Specifies whether to use a CA certificate + from the system trust store or from a specified PEM file. The default + value is false. + :type use_system_trust_store: object + :param allow_host_name_cn_mismatch: Specifies whether to require a + CA-issued SSL certificate name to match the host name of the server when + connecting over SSL. The default value is false. + :type allow_host_name_cn_mismatch: object + :param allow_self_signed_server_cert: Specifies whether to allow + self-signed certificates from the server. The default value is false. + :type allow_self_signed_server_cert: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'port': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, + 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, + 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, + 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, + 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, + 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, + 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: + super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.port = port + self.server_type = server_type + self.thrift_transport_protocol = thrift_transport_protocol + self.authentication_type = authentication_type + self.username = username + self.password = password + self.http_path = http_path + self.enable_ssl = enable_ssl + self.trusted_cert_path = trusted_cert_path + self.use_system_trust_store = use_system_trust_store + self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch + self.allow_self_signed_server_cert = allow_self_signed_server_cert + self.encrypted_credential = encrypted_credential + self.type = 'Spark' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py new file mode 100644 index 000000000000..8d1493ea9c7f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SparkObjectDataset(Dataset): + """Spark Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SparkObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py new file mode 100644 index 000000000000..3ab167dd3540 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SparkObjectDataset(Dataset): + """Spark Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SparkObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py new file mode 100644 index 000000000000..6d670c1c6b2a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SparkSource(CopySource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SparkSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SparkSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py new file mode 100644 index 000000000000..8da01b0cd823 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/spark_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SparkSource(CopySource): + """A copy activity Spark Server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SparkSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py new file mode 100644 index 000000000000..6b4785b91ab4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL + Data Warehouse when applicable. Type: boolean (or Expression with + resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when + allowPolyBase is true. + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + } + + def __init__(self, **kwargs): + super(SqlDWSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.allow_poly_base = kwargs.get('allow_poly_base', None) + self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.type = 'SqlDWSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py new file mode 100644 index 000000000000..efe63dcf788a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlDWSink(CopySink): + """A copy activity SQL Data Warehouse sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param allow_poly_base: Indicates to use PolyBase to copy data into SQL + Data Warehouse when applicable. Type: boolean (or Expression with + resultType boolean). + :type allow_poly_base: object + :param poly_base_settings: Specifies PolyBase-related settings when + allowPolyBase is true. + :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, + 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.allow_poly_base = allow_poly_base + self.poly_base_settings = poly_base_settings + self.type = 'SqlDWSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py new file mode 100644 index 000000000000..1a020672f7c2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlDWSource(CopySource): + """A copy activity SQL Data Warehouse source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or + Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Data Warehouse source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + Type: object (or Expression with resultType object), itemType: + StoredProcedureParameter. + :type stored_procedure_parameters: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlDWSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlDWSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py new file mode 100644 index 000000000000..ae8fe605024f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_dw_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlDWSource(CopySource): + """A copy activity SQL Data Warehouse source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or + Expression with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Data Warehouse source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + Type: object (or Expression with resultType object), itemType: + StoredProcedureParameter. + :type stored_procedure_parameters: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlDWSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py new file mode 100644 index 000000000000..45d342212ea4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SqlServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py new file mode 100644 index 000000000000..3eb8c5063dc1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_linked_service_py3.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SqlServerLinkedService(LinkedService): + """SQL Server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param user_name: The on-premises Windows authentication user name. Type: + string (or Expression with resultType string). + :type user_name: object + :param password: The on-premises Windows authentication password. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'SqlServer' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py new file mode 100644 index 000000000000..45b1f1273903 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlServerSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py new file mode 100644 index 000000000000..dbe1bf44e418 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlServerSink(CopySink): + """A copy activity SQL server sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlServerSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py new file mode 100644 index 000000000000..f9aa011047ea --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlServerSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py new file mode 100644 index 000000000000..27d12985e595 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlServerSource(CopySource): + """A copy activity SQL server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlServerSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py new file mode 100644 index 000000000000..6f31002f32d1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class SqlServerStoredProcedureActivity(ExecutionActivity): + """SQL stored procedure activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, **kwargs): + super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) + self.stored_procedure_name = kwargs.get('stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlServerStoredProcedure' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py new file mode 100644 index 000000000000..477f0c6c775c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_stored_procedure_activity_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class SqlServerStoredProcedureActivity(ExecutionActivity): + """SQL stored procedure activity type. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param stored_procedure_name: Required. Stored procedure name. Type: + string (or Expression with resultType string). + :type stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'stored_procedure_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.stored_procedure_name = stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlServerStoredProcedure' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py new file mode 100644 index 000000000000..d50540de4704 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the SQL Server dataset. Type: string + (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlServerTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SqlServerTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py new file mode 100644 index 000000000000..bc8d4bec92e0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_server_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SqlServerTableDataset(Dataset): + """The on-premises SQL Server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name of the SQL Server dataset. Type: string + (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SqlServerTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py new file mode 100644 index 000000000000..7ec0313aab4b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlSink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py new file mode 100644 index 000000000000..1f6bb9685082 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlSink(CopySink): + """A copy activity SQL sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlSink' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py new file mode 100644 index 000000000000..bb31474b1f7c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlSource(CopySource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, **kwargs): + super(SqlSource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.type = 'SqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py new file mode 100644 index 000000000000..dcad458fd4a6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_source_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlSource(CopySource): + """A copy activity SQL source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a SQL Database source. This cannot be used at the same time as + SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.type = 'SqlSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py new file mode 100644 index 000000000000..4edfc8b211f7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SquareLinkedService(LinkedService): + """Square Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) + :type host: object + :param client_id: Required. The client ID associated with your Square + application. + :type client_id: object + :param client_secret: The client secret associated with your Square + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) + :type redirect_uri: object + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + 'redirect_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + self.redirect_uri = kwargs.get('redirect_uri', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Square' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py new file mode 100644 index 000000000000..40719f600a18 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_linked_service_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SquareLinkedService(LinkedService): + """Square Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The URL of the Square instance. (i.e. + mystore.mysquare.com) + :type host: object + :param client_id: Required. The client ID associated with your Square + application. + :type client_id: object + :param client_secret: The client secret associated with your Square + application. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase + :param redirect_uri: Required. The redirect URL assigned in the Square + application dashboard. (i.e. http://localhost:2500) + :type redirect_uri: object + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + 'client_id': {'required': True}, + 'redirect_uri': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, + 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, client_id, redirect_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Square' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py new file mode 100644 index 000000000000..3903382d2e3a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SquareObjectDataset(Dataset): + """Square Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'SquareObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py new file mode 100644 index 000000000000..6d624dc6feef --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SquareObjectDataset(Dataset): + """Square Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'SquareObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py new file mode 100644 index 000000000000..f083df43f13a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SquareSource(CopySource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SquareSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'SquareSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py new file mode 100644 index 000000000000..ec8a741d564c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/square_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SquareSource(CopySource): + """A copy activity Square Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'SquareSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py new file mode 100644 index 000000000000..5dff9764e2a2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironment, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) + self.type = 'Environment' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py new file mode 100644 index 000000000000..43697ba62146 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.variables = variables + self.type = 'Environment' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py new file mode 100644 index 000000000000..e7d31d369392 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py new file mode 100644 index 000000000000..14cbfca99d4f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_environment_reference_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisEnvironmentReference(Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py new file mode 100644 index 000000000000..c090694416a9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionCredential(Model): + """SSIS package execution credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py new file mode 100644 index 000000000000..051eaffa2bf2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionCredential(Model): + """SSIS package execution credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISExecutionCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py new file mode 100644 index 000000000000..36f295c5a4aa --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionParameter(Model): + """SSIS execution parameter. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). + :type value: object + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py new file mode 100644 index 000000000000..cd10dd457a42 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_execution_parameter_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISExecutionParameter(Model): + """SSIS execution parameter. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package execution parameter value. Type: + string (or Expression with resultType string). + :type value: object + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, value, **kwargs) -> None: + super(SSISExecutionParameter, self).__init__(**kwargs) + self.value = value diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py new file mode 100644 index 000000000000..350b0d92852b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisFolder, self).__init__(**kwargs) + self.type = 'Folder' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py new file mode 100644 index 000000000000..d6483fda2c08 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_folder_py3.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type = 'Folder' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py new file mode 100644 index 000000000000..811075137f41 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadata(Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + + def __init__(self, **kwargs): + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py new file mode 100644 index 000000000000..a029c9f7ebc4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataListResponse(Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py new file mode 100644 index 000000000000..79931e1ceaf7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_list_response_py3.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataListResponse(Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining + results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py new file mode 100644 index 000000000000..45f7e15af4fa --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadata(Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: + super(SsisObjectMetadata, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py new file mode 100644 index 000000000000..9b782613ee08 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataStatusResponse(Model): + """The status of the operation. + + :param status: The status of the operation. + :type status: str + :param name: The operation name. + :type name: str + :param properties: The operation properties. + :type properties: str + :param error: The operation error message. + :type error: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.name = kwargs.get('name', None) + self.properties = kwargs.get('properties', None) + self.error = kwargs.get('error', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py new file mode 100644 index 000000000000..a4b82b8f6bcd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_object_metadata_status_response_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisObjectMetadataStatusResponse(Model): + """The status of the operation. + + :param status: The status of the operation. + :type status: str + :param name: The operation name. + :type name: str + :param properties: The operation properties. + :type properties: str + :param error: The operation error message. + :type error: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: + super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) + self.status = status + self.name = name + self.properties = properties + self.error = error diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py new file mode 100644 index 000000000000..b04fc1138797 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisPackage, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Package' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py new file mode 100644 index 000000000000..81a17eb8fe53 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPackageLocation(Model): + """SSIS package location. + + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). + :type package_path: object + """ + + _validation = { + 'package_path': {'required': True}, + } + + _attribute_map = { + 'package_path': {'key': 'packagePath', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = kwargs.get('package_path', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py new file mode 100644 index 000000000000..af139da47d88 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPackageLocation(Model): + """SSIS package location. + + All required parameters must be populated in order to send to Azure. + + :param package_path: Required. The SSIS package path. Type: string (or + Expression with resultType string). + :type package_path: object + """ + + _validation = { + 'package_path': {'required': True}, + } + + _attribute_map = { + 'package_path': {'key': 'packagePath', 'type': 'object'}, + } + + def __init__(self, *, package_path, **kwargs) -> None: + super(SSISPackageLocation, self).__init__(**kwargs) + self.package_path = package_path diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py new file mode 100644 index 000000000000..e1e932e97ae6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + self.type = 'Package' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py new file mode 100644 index 000000000000..c456af0bab48 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py new file mode 100644 index 000000000000..6a4ff73768f0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_parameter_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisParameter(Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py new file mode 100644 index 000000000000..c29a36fb628e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, **kwargs): + super(SsisProject, self).__init__(**kwargs) + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) + self.type = 'Project' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py new file mode 100644 index 000000000000..11b95a644e2f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_project_py3.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .ssis_object_metadata_py3 import SsisObjectMetadata + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param type: Required. Constant filled by server. + :type type: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project + :type environment_refs: + list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters + self.type = 'Project' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py new file mode 100644 index 000000000000..30b78594e6ab --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPropertyOverride(Model): + """SSIS property override. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). + :type value: object + :param is_sensitive: Whether SSIS package property override value is + sensitive data. Value will be encrypted in SSISDB if it is true + :type is_sensitive: bool + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.is_sensitive = kwargs.get('is_sensitive', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py new file mode 100644 index 000000000000..b425a19adc7e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_property_override_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISPropertyOverride(Model): + """SSIS property override. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. SSIS package property override value. Type: string + (or Expression with resultType string). + :type value: object + :param is_sensitive: Whether SSIS package property override value is + sensitive data. Value will be encrypted in SSISDB if it is true + :type is_sensitive: bool + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, + } + + def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: + super(SSISPropertyOverride, self).__init__(**kwargs) + self.value = value + self.is_sensitive = is_sensitive diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py new file mode 100644 index 000000000000..73fda3b27967 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py new file mode 100644 index 000000000000..e709842ff465 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_variable_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SsisVariable(Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py new file mode 100644 index 000000000000..05ca8dff2c52 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StagingSettings(Model): + """Staging settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Staging linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing the interim data. Type: + string (or Expression with resultType string). + :type path: object + :param enable_compression: Specifies whether to use compression when + copying data via an interim staging. Default value is false. Type: boolean + (or Expression with resultType boolean). + :type enable_compression: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) + self.enable_compression = kwargs.get('enable_compression', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py new file mode 100644 index 000000000000..13b4353963a3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/staging_settings_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StagingSettings(Model): + """Staging settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Staging linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing the interim data. Type: + string (or Expression with resultType string). + :type path: object + :param enable_compression: Specifies whether to use compression when + copying data via an interim staging. Default value is false. Type: boolean + (or Expression with resultType boolean). + :type enable_compression: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: + super(StagingSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path + self.enable_compression = enable_compression diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py new file mode 100644 index 000000000000..ff16595aa8c7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoredProcedureParameter(Model): + """SQL stored procedure parameter. + + :param value: Stored procedure parameter value. Type: string (or + Expression with resultType string). + :type value: object + :param type: Stored procedure parameter type. Possible values include: + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' + :type type: str or + ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.type = kwargs.get('type', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py new file mode 100644 index 000000000000..2842ef9ae35c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/stored_procedure_parameter_py3.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class StoredProcedureParameter(Model): + """SQL stored procedure parameter. + + :param value: Stored procedure parameter value. Type: string (or + Expression with resultType string). + :type value: object + :param type: Stored procedure parameter type. Possible values include: + 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' + :type type: str or + ~azure.mgmt.datafactory.models.StoredProcedureParameterType + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, value=None, type=None, **kwargs) -> None: + super(StoredProcedureParameter, self).__init__(**kwargs) + self.value = value + self.type = type diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py new file mode 100644 index 000000000000..c80b531db7d1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py new file mode 100644 index 000000000000..3b2d9ec62366 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sub_resource_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py new file mode 100644 index 000000000000..83de0e6f61f2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SybaseLinkedService(LinkedService): + """Linked service for Sybase data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param schema: Schema name for connection. Type: string (or Expression + with resultType string). + :type schema: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SybaseLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.database = kwargs.get('database', None) + self.schema = kwargs.get('schema', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Sybase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py new file mode 100644 index 000000000000..5b6cc0ce6ded --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sybase_linked_service_py3.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SybaseLinkedService(LinkedService): + """Linked service for Sybase data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param database: Required. Database name for connection. Type: string (or + Expression with resultType string). + :type database: object + :param schema: Schema name for connection. Type: string (or Expression + with resultType string). + :type schema: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.SybaseAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.database = database + self.schema = schema + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Sybase' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py new file mode 100644 index 000000000000..78b89638b359 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class TeradataLinkedService(LinkedService): + """Linked service for Teradata data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataLinkedService, self).__init__(**kwargs) + self.server = kwargs.get('server', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Teradata' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py new file mode 100644 index 000000000000..e80b776454c0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class TeradataLinkedService(LinkedService): + """Linked service for Teradata data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param server: Required. Server name for connection. Type: string (or + Expression with resultType string). + :type server: object + :param authentication_type: AuthenticationType to be used for connection. + Possible values include: 'Basic', 'Windows' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.TeradataAuthenticationType + :param username: Username for authentication. Type: string (or Expression + with resultType string). + :type username: object + :param password: Password for authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'server': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'server': {'key': 'typeProperties.server', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.server = server + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Teradata' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py new file mode 100644 index 000000000000..48f32bf10133 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format import DatasetStorageFormat + + +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode + encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following + link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as + null. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when + parsing text files. The default value is 0. Type: integer (or Expression + with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TextFormat, self).__init__(**kwargs) + self.column_delimiter = kwargs.get('column_delimiter', None) + self.row_delimiter = kwargs.get('row_delimiter', None) + self.escape_char = kwargs.get('escape_char', None) + self.quote_char = kwargs.get('quote_char', None) + self.null_value = kwargs.get('null_value', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.treat_empty_as_null = kwargs.get('treat_empty_as_null', None) + self.skip_line_count = kwargs.get('skip_line_count', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.type = 'TextFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py new file mode 100644 index 000000000000..0d876f62b112 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/text_format_py3.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_storage_format_py3 import DatasetStorageFormat + + +class TextFormat(DatasetStorageFormat): + """The data stored in text format. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param serializer: Serializer. Type: string (or Expression with resultType + string). + :type serializer: object + :param deserializer: Deserializer. Type: string (or Expression with + resultType string). + :type deserializer: object + :param type: Required. Constant filled by server. + :type type: str + :param column_delimiter: The column delimiter. Type: string (or Expression + with resultType string). + :type column_delimiter: object + :param row_delimiter: The row delimiter. Type: string (or Expression with + resultType string). + :type row_delimiter: object + :param escape_char: The escape character. Type: string (or Expression with + resultType string). + :type escape_char: object + :param quote_char: The quote character. Type: string (or Expression with + resultType string). + :type quote_char: object + :param null_value: The null value string. Type: string (or Expression with + resultType string). + :type null_value: object + :param encoding_name: The code page name of the preferred encoding. If + miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode + encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following + link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string + (or Expression with resultType string). + :type encoding_name: object + :param treat_empty_as_null: Treat empty column values in the text file as + null. The default value is true. Type: boolean (or Expression with + resultType boolean). + :type treat_empty_as_null: object + :param skip_line_count: The number of lines/rows to be skipped when + parsing text files. The default value is 0. Type: integer (or Expression + with resultType integer). + :type skip_line_count: object + :param first_row_as_header: When used as input, treat the first row of + data as headers. When used as output,write the headers into the output as + the first row of data. The default value is false. Type: boolean (or + Expression with resultType boolean). + :type first_row_as_header: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'serializer': {'key': 'serializer', 'type': 'object'}, + 'deserializer': {'key': 'deserializer', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, + 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, + 'escape_char': {'key': 'escapeChar', 'type': 'object'}, + 'quote_char': {'key': 'quoteChar', 'type': 'object'}, + 'null_value': {'key': 'nullValue', 'type': 'object'}, + 'encoding_name': {'key': 'encodingName', 'type': 'object'}, + 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, + 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: + super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) + self.column_delimiter = column_delimiter + self.row_delimiter = row_delimiter + self.escape_char = escape_char + self.quote_char = quote_char + self.null_value = null_value + self.encoding_name = encoding_name + self.treat_empty_as_null = treat_empty_as_null + self.skip_line_count = skip_line_count + self.first_row_as_header = first_row_as_header + self.type = 'TextFormat' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py new file mode 100644 index 000000000000..728ffc32bcb5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Trigger(Model): + """Azure data factory nested object which contains information about creating + pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + } + + def __init__(self, **kwargs): + super(Trigger, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.description = kwargs.get('description', None) + self.runtime_state = None + self.annotations = kwargs.get('annotations', None) + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py new file mode 100644 index 000000000000..089aa9a3e5fc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference import DependencyReference + + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__(self, **kwargs): + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = kwargs.get('reference_trigger', None) + self.type = 'TriggerDependencyReference' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py new file mode 100644 index 000000000000..716a0d926f8b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_dependency_reference_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dependency_reference_py3 import DependencyReference + + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + } + + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } + + def __init__(self, *, reference_trigger, **kwargs) -> None: + super(TriggerDependencyReference, self).__init__(**kwargs) + self.reference_trigger = reference_trigger + self.type = 'TriggerDependencyReference' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py new file mode 100644 index 000000000000..70c9f2904347 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerPipelineReference(Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = kwargs.get('pipeline_reference', None) + self.parameters = kwargs.get('parameters', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py new file mode 100644 index 000000000000..e32af8006326 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_pipeline_reference_py3.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerPipelineReference(Model): + """Pipeline that needs to be triggered with the given parameters. + + :param pipeline_reference: Pipeline reference. + :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference + :param parameters: Pipeline parameters. + :type parameters: dict[str, object] + """ + + _attribute_map = { + 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: + super(TriggerPipelineReference, self).__init__(**kwargs) + self.pipeline_reference = pipeline_reference + self.parameters = parameters diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py new file mode 100644 index 000000000000..862973544ab4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_py3.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class Trigger(Model): + """Azure data factory nested object which contains information about creating + pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, + MultiplePipelineTrigger + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + } + + def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: + super(Trigger, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.description = description + self.runtime_state = None + self.annotations = annotations + self.type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py new file mode 100644 index 000000000000..a4f952dac85f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerReference(Model): + """Trigger reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "TriggerReference" + + def __init__(self, **kwargs): + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py new file mode 100644 index 000000000000..805e407e80a7 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_reference_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerReference(Model): + """Trigger reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: + "TriggerReference" . + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "TriggerReference" + + def __init__(self, *, reference_name: str, **kwargs) -> None: + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = reference_name diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py new file mode 100644 index 000000000000..539ac4775350 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource import SubResource + + +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, + } + + def __init__(self, **kwargs): + super(TriggerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py new file mode 100644 index 000000000000..1a7a003f4a6e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_paged.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.paging import Paged + + +class TriggerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`TriggerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[TriggerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(TriggerResourcePaged, self).__init__(*args, **kwargs) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py new file mode 100644 index 000000000000..ae6a04ac3128 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_resource_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .sub_resource_py3 import SubResource + + +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Trigger'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(TriggerResource, self).__init__(**kwargs) + self.properties = properties diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py new file mode 100644 index 000000000000..9fad7bbfd9fa --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRun(Model): + """Trigger runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar trigger_run_id: Trigger run id. + :vartype trigger_run_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar trigger_type: Trigger type. + :vartype trigger_type: str + :ivar trigger_run_timestamp: Trigger run start time. + :vartype trigger_run_timestamp: datetime + :ivar status: Trigger run status. Possible values include: 'Succeeded', + 'Failed', 'Inprogress' + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus + :ivar message: Trigger error message. + :vartype message: str + :ivar properties: List of property name and value related to trigger run. + Name, value pair depends on type of trigger. + :vartype properties: dict[str, str] + :ivar triggered_pipelines: List of pipeline name and run Id triggered by + the trigger run. + :vartype triggered_pipelines: dict[str, str] + """ + + _validation = { + 'trigger_run_id': {'readonly': True}, + 'trigger_name': {'readonly': True}, + 'trigger_type': {'readonly': True}, + 'trigger_run_timestamp': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + 'properties': {'readonly': True}, + 'triggered_pipelines': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.trigger_run_id = None + self.trigger_name = None + self.trigger_type = None + self.trigger_run_timestamp = None + self.status = None + self.message = None + self.properties = None + self.triggered_pipelines = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py new file mode 100644 index 000000000000..5a9fe50f6894 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_run_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRun(Model): + """Trigger runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar trigger_run_id: Trigger run id. + :vartype trigger_run_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar trigger_type: Trigger type. + :vartype trigger_type: str + :ivar trigger_run_timestamp: Trigger run start time. + :vartype trigger_run_timestamp: datetime + :ivar status: Trigger run status. Possible values include: 'Succeeded', + 'Failed', 'Inprogress' + :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus + :ivar message: Trigger error message. + :vartype message: str + :ivar properties: List of property name and value related to trigger run. + Name, value pair depends on type of trigger. + :vartype properties: dict[str, str] + :ivar triggered_pipelines: List of pipeline name and run Id triggered by + the trigger run. + :vartype triggered_pipelines: dict[str, str] + """ + + _validation = { + 'trigger_run_id': {'readonly': True}, + 'trigger_name': {'readonly': True}, + 'trigger_type': {'readonly': True}, + 'trigger_run_timestamp': {'readonly': True}, + 'status': {'readonly': True}, + 'message': {'readonly': True}, + 'properties': {'readonly': True}, + 'triggered_pipelines': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(TriggerRun, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.trigger_run_id = None + self.trigger_name = None + self.trigger_type = None + self.trigger_run_timestamp = None + self.status = None + self.message = None + self.properties = None + self.triggered_pipelines = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py new file mode 100644 index 000000000000..7684fe7eb7dc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRunsQueryResponse(Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py new file mode 100644 index 000000000000..391a2441b3d1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/trigger_runs_query_response_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TriggerRunsQueryResponse(Model): + """A list of trigger runs. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of trigger runs. + :type value: list[~azure.mgmt.datafactory.models.TriggerRun] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerRun]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(TriggerRunsQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py new file mode 100644 index 000000000000..939624ae5042 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger import Trigger + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows + from a start time without gaps and also supports backfill scenarios (when + start time is in the past). + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' + :type frequency: str or + ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. + :type start_time: datetime + :param end_time: The end time for the time period for the trigger during + which events are fired for windows that are ready. Only UTC time is + currently supported. + :type end_time: datetime + :param delay: Specifies how long the trigger waits past due time before + triggering new run. It doesn't alter window start and end time. The + default is 0. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline + runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling + window triggers are supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__(self, **kwargs): + super(TumblingWindowTrigger, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.delay = kwargs.get('delay', None) + self.max_concurrency = kwargs.get('max_concurrency', None) + self.retry_policy = kwargs.get('retry_policy', None) + self.depends_on = kwargs.get('depends_on', None) + self.type = 'TumblingWindowTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py new file mode 100644 index 000000000000..89dcefbc8c09 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_dependency_reference import TriggerDependencyReference + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) + self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py new file mode 100644 index 000000000000..648f25e59937 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_dependency_reference_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_dependency_reference_py3 import TriggerDependencyReference + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window + when evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If + undefined the frequency of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) + self.offset = offset + self.size = size + self.type = 'TumblingWindowTriggerDependencyReference' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py new file mode 100644 index 000000000000..6856629c8b91 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/tumbling_window_trigger_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .trigger_py3 import Trigger + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows + from a start time without gaps and also supports backfill scenarios (when + start time is in the past). + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when an + event is fired for trigger window that is ready. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param frequency: Required. The frequency of the time windows. Possible + values include: 'Minute', 'Hour' + :type frequency: str or + ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum + interval allowed is 15 Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the + trigger during which events are fired for windows that are ready. Only UTC + time is currently supported. + :type start_time: datetime + :param end_time: The end time for the time period for the trigger during + which events are fired for windows that are ready. Only UTC time is + currently supported. + :type end_time: datetime + :param delay: Specifies how long the trigger waits past due time before + triggering new run. It doesn't alter window start and end time. The + default is 0. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows + (ready for execution) for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline + runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling + window triggers are supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, + } + + def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on + self.type = 'TumblingWindowTrigger' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py new file mode 100644 index 000000000000..eede36501d6c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class UntilActivity(ControlActivity): + """This activity executes inner activities until the specified boolean + expression results to true or timeout is reached, whichever is earlier. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true + :type expression: ~azure.mgmt.datafactory.models.Expression + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: + string (or Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param activities: Required. List of activities to execute. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, **kwargs): + super(UntilActivity, self).__init__(**kwargs) + self.expression = kwargs.get('expression', None) + self.timeout = kwargs.get('timeout', None) + self.activities = kwargs.get('activities', None) + self.type = 'Until' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py new file mode 100644 index 000000000000..40c03ce18591 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/until_activity_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class UntilActivity(ControlActivity): + """This activity executes inner activities until the specified boolean + expression results to true or timeout is reached, whichever is earlier. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param expression: Required. An expression that would evaluate to Boolean. + The loop will continue until this expression evaluates to true + :type expression: ~azure.mgmt.datafactory.models.Expression + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: + string (or Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param activities: Required. List of activities to execute. + :type activities: list[~azure.mgmt.datafactory.models.Activity] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'expression': {'required': True}, + 'activities': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, + } + + def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: + super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.expression = expression + self.timeout = timeout + self.activities = activities + self.type = 'Until' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py new file mode 100644 index 000000000000..c6460310225a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeNodeRequest(Model): + """Update integration runtime node request. + + :param concurrent_jobs_limit: The number of concurrent jobs permitted to + run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + """ + + _validation = { + 'concurrent_jobs_limit': {'minimum': 1}, + } + + _attribute_map = { + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = kwargs.get('concurrent_jobs_limit', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py new file mode 100644 index 000000000000..de1605885139 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_node_request_py3.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeNodeRequest(Model): + """Update integration runtime node request. + + :param concurrent_jobs_limit: The number of concurrent jobs permitted to + run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + """ + + _validation = { + 'concurrent_jobs_limit': {'minimum': 1}, + } + + _attribute_map = { + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + } + + def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) + self.concurrent_jobs_limit = concurrent_jobs_limit diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py new file mode 100644 index 000000000000..bd5e332b50f5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeRequest(Model): + """Update integration runtime request. + + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: + 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, e.g., + PT03H is 3 hours. The integration runtime auto update will happen on that + time. + :type update_delay_offset: str + """ + + _attribute_map = { + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = kwargs.get('auto_update', None) + self.update_delay_offset = kwargs.get('update_delay_offset', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py new file mode 100644 index 000000000000..731cb942b472 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/update_integration_runtime_request_py3.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UpdateIntegrationRuntimeRequest(Model): + """Update integration runtime request. + + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: + 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, e.g., + PT03H is 3 hours. The integration runtime auto update will happen on that + time. + :type update_delay_offset: str + """ + + _attribute_map = { + 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, + } + + def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = auto_update + self.update_delay_offset = update_delay_offset diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py new file mode 100644 index 000000000000..b51e313b6f0c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserAccessPolicy(Model): + """Get Data Plane read only token request definition. + + :param permissions: The string with permissions for Data Plane access. + Currently only 'r' is supported which grants read only access. + :type permissions: str + :param access_resource_path: The resource path to get access relative to + factory. Currently only empty string is supported which corresponds to the + factory resource. + :type access_resource_path: str + :param profile_name: The name of the profile. Currently only the default + is supported. The default value is DefaultProfile. + :type profile_name: str + :param start_time: Start time for the token. If not specified the current + time will be used. + :type start_time: str + :param expire_time: Expiration time for the token. Maximum duration for + the token is eight hours and by default the token will expire in eight + hours. + :type expire_time: str + """ + + _attribute_map = { + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, + 'profile_name': {'key': 'profileName', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'expire_time': {'key': 'expireTime', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = kwargs.get('permissions', None) + self.access_resource_path = kwargs.get('access_resource_path', None) + self.profile_name = kwargs.get('profile_name', None) + self.start_time = kwargs.get('start_time', None) + self.expire_time = kwargs.get('expire_time', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py new file mode 100644 index 000000000000..26e2a7639a09 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_access_policy_py3.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserAccessPolicy(Model): + """Get Data Plane read only token request definition. + + :param permissions: The string with permissions for Data Plane access. + Currently only 'r' is supported which grants read only access. + :type permissions: str + :param access_resource_path: The resource path to get access relative to + factory. Currently only empty string is supported which corresponds to the + factory resource. + :type access_resource_path: str + :param profile_name: The name of the profile. Currently only the default + is supported. The default value is DefaultProfile. + :type profile_name: str + :param start_time: Start time for the token. If not specified the current + time will be used. + :type start_time: str + :param expire_time: Expiration time for the token. Maximum duration for + the token is eight hours and by default the token will expire in eight + hours. + :type expire_time: str + """ + + _attribute_map = { + 'permissions': {'key': 'permissions', 'type': 'str'}, + 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, + 'profile_name': {'key': 'profileName', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'expire_time': {'key': 'expireTime', 'type': 'str'}, + } + + def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: + super(UserAccessPolicy, self).__init__(**kwargs) + self.permissions = permissions + self.access_resource_path = access_resource_path + self.profile_name = profile_name + self.start_time = start_time + self.expire_time = expire_time diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py new file mode 100644 index 000000000000..30692d2960ec --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserProperty(Model): + """User property. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. + :type name: str + :param value: Required. User property value. Type: string (or Expression + with resultType string). + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(UserProperty, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py new file mode 100644 index 000000000000..7b4f3beb0195 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/user_property_py3.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class UserProperty(Model): + """User property. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. User property name. + :type name: str + :param value: Required. User property value. Type: string (or Expression + with resultType string). + :type value: object + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, name: str, value, **kwargs) -> None: + super(UserProperty, self).__init__(**kwargs) + self.name = name + self.value = value diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py new file mode 100644 index 000000000000..0d92d32c12b0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, **kwargs): + super(ValidationActivity, self).__init__(**kwargs) + self.timeout = kwargs.get('timeout', None) + self.sleep = kwargs.get('sleep', None) + self.minimum_size = kwargs.get('minimum_size', None) + self.child_items = kwargs.get('child_items', None) + self.dataset = kwargs.get('dataset', None) + self.type = 'Validation' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py new file mode 100644 index 000000000000..f4680400b447 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/validation_activity_py3.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class ValidationActivity(ControlActivity): + """This activity verifies that an external resource exists. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param timeout: Specifies the timeout for the activity to run. If there is + no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 + week as default. Type: string (or Expression with resultType string), + pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: object + :param sleep: A delay in seconds between validation attempts. If no value + is specified, 10 seconds will be used as the default. Type: integer (or + Expression with resultType integer). + :type sleep: object + :param minimum_size: Can be used if dataset points to a file. The file + must be greater than or equal in size to the value specified. Type: + integer (or Expression with resultType integer). + :type minimum_size: object + :param child_items: Can be used if dataset points to a folder. If set to + true, the folder must have at least one file. If set to false, the folder + must be empty. Type: boolean (or Expression with resultType boolean). + :type child_items: object + :param dataset: Required. Validation activity dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'dataset': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, + 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, + 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, + 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, + 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + } + + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: + super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.timeout = timeout + self.sleep = sleep + self.minimum_size = minimum_size + self.child_items = child_items + self.dataset = dataset + self.type = 'Validation' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py new file mode 100644 index 000000000000..6d7fd808fa44 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class VariableSpecification(Model): + """Definition of a single variable for a Pipeline. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' + :type type: str or ~azure.mgmt.datafactory.models.VariableType + :param default_value: Default value of variable. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VariableSpecification, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.default_value = kwargs.get('default_value', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py new file mode 100644 index 000000000000..d60b3b4b1591 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/variable_specification_py3.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class VariableSpecification(Model): + """Definition of a single variable for a Pipeline. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Variable type. Possible values include: 'String', + 'Bool', 'Array' + :type type: str or ~azure.mgmt.datafactory.models.VariableType + :param default_value: Default value of variable. + :type default_value: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'object'}, + } + + def __init__(self, *, type, default_value=None, **kwargs) -> None: + super(VariableSpecification, self).__init__(**kwargs) + self.type = type + self.default_value = default_value diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py new file mode 100644 index 000000000000..6b5e8d0103f5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.pwd = kwargs.get('pwd', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Vertica' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py new file mode 100644 index 000000000000..3aee3a5ae0f6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service_py3.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param pwd: The Azure key vault secret reference of password in connection + string. + :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: + super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.pwd = pwd + self.encrypted_credential = encrypted_credential + self.type = 'Vertica' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py new file mode 100644 index 000000000000..d0b642f15d38 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'VerticaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py new file mode 100644 index 000000000000..a1c4d755f2b4 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'VerticaSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py new file mode 100644 index 000000000000..e84465f8ba07 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(VerticaTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'VerticaTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py new file mode 100644 index 000000000000..87d69bb9a443 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'VerticaTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py new file mode 100644 index 000000000000..91f3decc7473 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class WaitActivity(ControlActivity): + """This activity suspends pipeline execution for the specified interval. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param wait_time_in_seconds: Required. Duration in seconds. + :type wait_time_in_seconds: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'wait_time_in_seconds': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(WaitActivity, self).__init__(**kwargs) + self.wait_time_in_seconds = kwargs.get('wait_time_in_seconds', None) + self.type = 'Wait' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py new file mode 100644 index 000000000000..ff85c9d16733 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/wait_activity_py3.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WaitActivity(ControlActivity): + """This activity suspends pipeline execution for the specified interval. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param wait_time_in_seconds: Required. Duration in seconds. + :type wait_time_in_seconds: int + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'wait_time_in_seconds': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + } + + def __init__(self, *, name: str, wait_time_in_seconds: int, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: + super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.wait_time_in_seconds = wait_time_in_seconds + self.type = 'Wait' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py new file mode 100644 index 000000000000..70264719d52e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class WebActivity(ExecutionActivity): + """Web activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). + :type url: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + :param datasets: List of datasets passed to web endpoint. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :param linked_services: List of linked services passed to web endpoint. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, + 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, + } + + def __init__(self, **kwargs): + super(WebActivity, self).__init__(**kwargs) + self.method = kwargs.get('method', None) + self.url = kwargs.get('url', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.datasets = kwargs.get('datasets', None) + self.linked_services = kwargs.get('linked_services', None) + self.type = 'WebActivity' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py new file mode 100644 index 000000000000..6ebb193ae5e9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebActivityAuthentication(Model): + """Web activity authentication properties. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) + :type type: str + :param pfx: Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecureString + :param username: Web activity authentication user name for basic + authentication. + :type username: str + :param password: Password for the PFX file or basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + :param resource: Resource for which Azure Auth token will be requested + when using MSI Authentication. + :type resource: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecureString'}, + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + 'resource': {'key': 'resource', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.pfx = kwargs.get('pfx', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.resource = kwargs.get('resource', None) diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py new file mode 100644 index 000000000000..4c2b68ba7161 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_authentication_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebActivityAuthentication(Model): + """Web activity authentication properties. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI) + :type type: str + :param pfx: Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecureString + :param username: Web activity authentication user name for basic + authentication. + :type username: str + :param password: Password for the PFX file or basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + :param resource: Resource for which Azure Auth token will be requested + when using MSI Authentication. + :type resource: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecureString'}, + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + 'resource': {'key': 'resource', 'type': 'str'}, + } + + def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: + super(WebActivityAuthentication, self).__init__(**kwargs) + self.type = type + self.pfx = pfx + self.username = username + self.password = password + self.resource = resource diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py new file mode 100644 index 000000000000..9a64114a00c6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_activity_py3.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity_py3 import ExecutionActivity + + +class WebActivity(ExecutionActivity): + """Web activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param method: Required. Rest API method for target endpoint. Possible + values include: 'GET', 'POST', 'PUT', 'DELETE' + :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod + :param url: Required. Web activity target endpoint and path. Type: string + (or Expression with resultType string). + :type url: object + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + :param datasets: List of datasets passed to web endpoint. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] + :param linked_services: List of linked services passed to web endpoint. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceReference] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, + 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, + } + + def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, **kwargs) -> None: + super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.method = method + self.url = url + self.headers = headers + self.body = body + self.authentication = authentication + self.datasets = datasets + self.linked_services = linked_services + self.type = 'WebActivity' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py new file mode 100644 index 000000000000..d3bd2f2594ab --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + + +class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses anonymous authentication to communicate with + an HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebAnonymousAuthentication, self).__init__(**kwargs) + self.authentication_type = 'Anonymous' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py new file mode 100644 index 000000000000..ee7a4e780a1f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_anonymous_authentication_py3.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses anonymous authentication to communicate with + an HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) + self.authentication_type = 'Anonymous' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py new file mode 100644 index 000000000000..90050f7dae28 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + + +class WebBasicAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses basic authentication to communicate with an + HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). + :type username: object + :param password: Required. The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(WebBasicAuthentication, self).__init__(**kwargs) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.authentication_type = 'Basic' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py new file mode 100644 index 000000000000..71577ec86565 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_basic_authentication_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebBasicAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses basic authentication to communicate with an + HTTP endpoint. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param username: Required. User name for Basic authentication. Type: + string (or Expression with resultType string). + :type username: object + :param password: Required. The password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, username, password, **kwargs) -> None: + super(WebBasicAuthentication, self).__init__(url=url, **kwargs) + self.username = username + self.password = password + self.authentication_type = 'Basic' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py new file mode 100644 index 000000000000..671808ca85d1 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties import WebLinkedServiceTypeProperties + + +class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses client certificate based authentication to + communicate with an HTTP endpoint. This scheme follows mutual + authentication; the server must also provide valid credentials to the + client. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param pfx: Required. Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecretBase + :param password: Required. Password for the PFX file. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'pfx': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(WebClientCertificateAuthentication, self).__init__(**kwargs) + self.pfx = kwargs.get('pfx', None) + self.password = kwargs.get('password', None) + self.authentication_type = 'ClientCertificate' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py new file mode 100644 index 000000000000..7ac859b677a8 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_client_certificate_authentication_py3.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .web_linked_service_type_properties_py3 import WebLinkedServiceTypeProperties + + +class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): + """A WebLinkedService that uses client certificate based authentication to + communicate with an HTTP endpoint. This scheme follows mutual + authentication; the server must also provide valid credentials to the + client. + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + :param pfx: Required. Base64-encoded contents of a PFX file. + :type pfx: ~azure.mgmt.datafactory.models.SecretBase + :param password: Required. Password for the PFX file. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + 'pfx': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, + 'password': {'key': 'password', 'type': 'SecretBase'}, + } + + def __init__(self, *, url, pfx, password, **kwargs) -> None: + super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) + self.pfx = pfx + self.password = password + self.authentication_type = 'ClientCertificate' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py new file mode 100644 index 000000000000..1c648c42c3e2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, **kwargs): + super(WebHookActivity, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.timeout = kwargs.get('timeout', None) + self.headers = kwargs.get('headers', None) + self.body = kwargs.get('body', None) + self.authentication = kwargs.get('authentication', None) + self.type = 'WebHook' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py new file mode 100644 index 000000000000..40cdc6f732da --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_hook_activity_py3.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .control_activity_py3 import ControlActivity + + +class WebHookActivity(ControlActivity): + """WebHook activity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :ivar method: Required. Rest API method for target endpoint. Default + value: "POST" . + :vartype method: str + :param url: Required. WebHook activity target endpoint and path. Type: + string (or Expression with resultType string). + :type url: object + :param timeout: The timeout within which the webhook should be called + back. If there is no value specified, it defaults to 10 minutes. Type: + string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type timeout: str + :param headers: Represents the headers that will be sent to the request. + For example, to set the language and type on a request: "headers" : { + "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: + string (or Expression with resultType string). + :type headers: object + :param body: Represents the payload that will be sent to the endpoint. + Required for POST/PUT method, not allowed for GET method Type: string (or + Expression with resultType string). + :type body: object + :param authentication: Authentication method used for calling the + endpoint. + :type authentication: + ~azure.mgmt.datafactory.models.WebActivityAuthentication + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'method': {'required': True, 'constant': True}, + 'url': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'method': {'key': 'typeProperties.method', 'type': 'str'}, + 'url': {'key': 'typeProperties.url', 'type': 'object'}, + 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, + 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, + 'body': {'key': 'typeProperties.body', 'type': 'object'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, + } + + method = "POST" + + def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, **kwargs) -> None: + super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.url = url + self.timeout = timeout + self.headers = headers + self.body = body + self.authentication = authentication + self.type = 'WebHook' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py new file mode 100644 index 000000000000..18fadba3f3ee --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class WebLinkedService(LinkedService): + """Web linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Web linked service properties. + :type type_properties: + ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + } + + def __init__(self, **kwargs): + super(WebLinkedService, self).__init__(**kwargs) + self.type_properties = kwargs.get('type_properties', None) + self.type = 'Web' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py new file mode 100644 index 000000000000..3e491b0fac4d --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_py3.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class WebLinkedService(LinkedService): + """Web linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param type_properties: Required. Web linked service properties. + :type type_properties: + ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties + """ + + _validation = { + 'type': {'required': True}, + 'type_properties': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, + } + + def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type_properties = type_properties + self.type = 'Web' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py new file mode 100644 index 000000000000..22290e80b19f --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebLinkedServiceTypeProperties(Model): + """Base definition of WebLinkedServiceTypeProperties, this typeProperties is + polymorphic based on authenticationType, so not flattened in SDK models. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebClientCertificateAuthentication, + WebBasicAuthentication, WebAnonymousAuthentication + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + _subtype_map = { + 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} + } + + def __init__(self, **kwargs): + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = kwargs.get('url', None) + self.authentication_type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py new file mode 100644 index 000000000000..1c162c2f1004 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_linked_service_type_properties_py3.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class WebLinkedServiceTypeProperties(Model): + """Base definition of WebLinkedServiceTypeProperties, this typeProperties is + polymorphic based on authenticationType, so not flattened in SDK models. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: WebClientCertificateAuthentication, + WebBasicAuthentication, WebAnonymousAuthentication + + All required parameters must be populated in order to send to Azure. + + :param url: Required. The URL of the web service endpoint, e.g. + http://www.microsoft.com . Type: string (or Expression with resultType + string). + :type url: object + :param authentication_type: Required. Constant filled by server. + :type authentication_type: str + """ + + _validation = { + 'url': {'required': True}, + 'authentication_type': {'required': True}, + } + + _attribute_map = { + 'url': {'key': 'url', 'type': 'object'}, + 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, + } + + _subtype_map = { + 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} + } + + def __init__(self, *, url, **kwargs) -> None: + super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) + self.url = url + self.authentication_type = None diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py new file mode 100644 index 000000000000..c5d3a2a8f00a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WebSource, self).__init__(**kwargs) + self.type = 'WebSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py new file mode 100644 index 000000000000..684e1d4233cc --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_source_py3.py @@ -0,0 +1,52 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class WebSource(CopySource): + """A copy activity source for web page table. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'WebSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py new file mode 100644 index 000000000000..3980fe3d885a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class WebTableDataset(Dataset): + """The dataset points to a HTML table in the web page. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. + :type index: object + :param path: The relative URL to the web page from the linked service URL. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index': {'key': 'typeProperties.index', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(WebTableDataset, self).__init__(**kwargs) + self.index = kwargs.get('index', None) + self.path = kwargs.get('path', None) + self.type = 'WebTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py new file mode 100644 index 000000000000..edb2344c35d2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/web_table_dataset_py3.py @@ -0,0 +1,78 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class WebTableDataset(Dataset): + """The dataset points to a HTML table in the web page. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param index: Required. The zero-based index of the table in the web page. + Type: integer (or Expression with resultType integer), minimum: 0. + :type index: object + :param path: The relative URL to the web page from the linked service URL. + Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + 'index': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'index': {'key': 'typeProperties.index', 'type': 'object'}, + 'path': {'key': 'typeProperties.path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: + super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.index = index + self.path = path + self.type = 'WebTable' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py new file mode 100644 index 000000000000..24973f577133 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class XeroLinkedService(LinkedService): + """Xero Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) + :type host: object + :param consumer_key: The consumer key associated with the Xero + application. + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :param private_key: The private key from the .pem file that was generated + for your Xero private application. You must include all the text from the + .pem file, including the Unix line endings( + ). + :type private_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, + 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.consumer_key = kwargs.get('consumer_key', None) + self.private_key = kwargs.get('private_key', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Xero' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py new file mode 100644 index 000000000000..433c65ade739 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_linked_service_py3.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class XeroLinkedService(LinkedService): + """Xero Service linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. The endpoint of the Xero server. (i.e. + api.xero.com) + :type host: object + :param consumer_key: The consumer key associated with the Xero + application. + :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase + :param private_key: The private key from the .pem file that was generated + for your Xero private application. You must include all the text from the + .pem file, including the Unix line endings( + ). + :type private_key: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, + 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.consumer_key = consumer_key + self.private_key = private_key + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Xero' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py new file mode 100644 index 000000000000..53c5edd44cec --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class XeroObjectDataset(Dataset): + """Xero Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'XeroObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py new file mode 100644 index 000000000000..673d41e1771e --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class XeroObjectDataset(Dataset): + """Xero Service dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'XeroObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py new file mode 100644 index 000000000000..a37852a5b419 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class XeroSource(CopySource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(XeroSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'XeroSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py new file mode 100644 index 000000000000..bbee6c6fa1f0 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/xero_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class XeroSource(CopySource): + """A copy activity Xero Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'XeroSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py new file mode 100644 index 000000000000..fe34dff77ea9 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class ZohoLinkedService(LinkedService): + """Zoho server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Zoho server. (i.e. + crm.zoho.com/crm/private) + :type endpoint: object + :param access_token: The access token for Zoho authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoLinkedService, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.access_token = kwargs.get('access_token', None) + self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) + self.use_host_verification = kwargs.get('use_host_verification', None) + self.use_peer_verification = kwargs.get('use_peer_verification', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Zoho' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py new file mode 100644 index 000000000000..f82f6221592b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_linked_service_py3.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class ZohoLinkedService(LinkedService): + """Zoho server linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param endpoint: Required. The endpoint of the Zoho server. (i.e. + crm.zoho.com/crm/private) + :type endpoint: object + :param access_token: The access token for Zoho authentication. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param use_encrypted_endpoints: Specifies whether the data source + endpoints are encrypted using HTTPS. The default value is true. + :type use_encrypted_endpoints: object + :param use_host_verification: Specifies whether to require the host name + in the server's certificate to match the host name of the server when + connecting over SSL. The default value is true. + :type use_host_verification: object + :param use_peer_verification: Specifies whether to verify the identity of + the server when connecting over SSL. The default value is true. + :type use_peer_verification: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, + 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, + 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, endpoint, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: + super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.endpoint = endpoint + self.access_token = access_token + self.use_encrypted_endpoints = use_encrypted_endpoints + self.use_host_verification = use_host_verification + self.use_peer_verification = use_peer_verification + self.encrypted_credential = encrypted_credential + self.type = 'Zoho' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py new file mode 100644 index 000000000000..062d508860a6 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class ZohoObjectDataset(Dataset): + """Zoho server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoObjectDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'ZohoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py new file mode 100644 index 000000000000..ef5a67d4fe35 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class ZohoObjectDataset(Dataset): + """Zoho server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The table name. Type: string (or Expression with + resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'ZohoObject' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py new file mode 100644 index 000000000000..274c6fc09f19 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class ZohoSource(CopySource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ZohoSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'ZohoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py new file mode 100644 index 000000000000..6d7dc29bdf8a --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/zoho_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class ZohoSource(CopySource): + """A copy activity Zoho server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'ZohoSource' diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py new file mode 100644 index 000000000000..ffc98f67bed2 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .operations import Operations +from .factories_operations import FactoriesOperations +from .exposure_control_operations import ExposureControlOperations +from .integration_runtimes_operations import IntegrationRuntimesOperations +from .integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from .integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from .linked_services_operations import LinkedServicesOperations +from .datasets_operations import DatasetsOperations +from .pipelines_operations import PipelinesOperations +from .pipeline_runs_operations import PipelineRunsOperations +from .activity_runs_operations import ActivityRunsOperations +from .triggers_operations import TriggersOperations +from .rerun_triggers_operations import RerunTriggersOperations +from .trigger_runs_operations import TriggerRunsOperations + +__all__ = [ + 'Operations', + 'FactoriesOperations', + 'ExposureControlOperations', + 'IntegrationRuntimesOperations', + 'IntegrationRuntimeObjectMetadataOperations', + 'IntegrationRuntimeNodesOperations', + 'LinkedServicesOperations', + 'DatasetsOperations', + 'PipelinesOperations', + 'PipelineRunsOperations', + 'ActivityRunsOperations', + 'TriggersOperations', + 'RerunTriggersOperations', + 'TriggerRunsOperations', +] diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py new file mode 100644 index 000000000000..f338a1a9c835 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/activity_runs_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class ActivityRunsOperations(object): + """ActivityRunsOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def query_by_pipeline_run( + self, resource_group_name, factory_name, run_id, filter_parameters, custom_headers=None, raw=False, **operation_config): + """Query activity runs based on input filter conditions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param filter_parameters: Parameters to filter the activity runs. + :type filter_parameters: + ~azure.mgmt.datafactory.models.RunFilterParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ActivityRunsQueryResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.ActivityRunsQueryResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.query_by_pipeline_run.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ActivityRunsQueryResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py new file mode 100644 index 000000000000..278815d03479 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/datasets_operations.py @@ -0,0 +1,314 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class DatasetsOperations(object): + """DatasetsOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Lists datasets. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of DatasetResource + :rtype: + ~azure.mgmt.datafactory.models.DatasetResourcePaged[~azure.mgmt.datafactory.models.DatasetResource] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.DatasetResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} + + def create_or_update( + self, resource_group_name, factory_name, dataset_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :param properties: Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + :param if_match: ETag of the dataset entity. Should only be specified + for update, for which it should match existing entity or can be * for + unconditional update. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DatasetResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.DatasetResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + dataset = models.DatasetResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(dataset, 'DatasetResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} + + def get( + self, resource_group_name, factory_name, dataset_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :param if_none_match: ETag of the dataset entity. Should only be + specified for get. If the ETag matches the existing entity tag, or if + * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DatasetResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.DatasetResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 304]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DatasetResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} + + def delete( + self, resource_group_name, factory_name, dataset_name, custom_headers=None, raw=False, **operation_config): + """Deletes a dataset. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dataset_name: The dataset name. + :type dataset_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py new file mode 100644 index 000000000000..4a648d96586c --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/exposure_control_operations.py @@ -0,0 +1,179 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class ExposureControlOperations(object): + """ExposureControlOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def get_feature_value( + self, location_id, feature_name=None, feature_type=None, custom_headers=None, raw=False, **operation_config): + """Get exposure control feature for specific location. + + :param location_id: The location identifier. + :type location_id: str + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ExposureControlResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + + # Construct URL + url = self.get_feature_value.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'locationId': self._serialize.url("location_id", location_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ExposureControlResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} + + def get_feature_value_by_factory( + self, resource_group_name, factory_name, feature_name=None, feature_type=None, custom_headers=None, raw=False, **operation_config): + """Get exposure control feature for specific factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param feature_name: The feature name. + :type feature_name: str + :param feature_type: The feature type. + :type feature_type: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ExposureControlResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.ExposureControlResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) + + # Construct URL + url = self.get_feature_value_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ExposureControlResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py new file mode 100644 index 000000000000..b06c12f3e8c5 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/factories_operations.py @@ -0,0 +1,644 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class FactoriesOperations(object): + """FactoriesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists factories under the specified subscription. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Factory + :rtype: + ~azure.mgmt.datafactory.models.FactoryPaged[~azure.mgmt.datafactory.models.Factory] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} + + def configure_factory_repo( + self, location_id, factory_resource_id=None, repo_configuration=None, custom_headers=None, raw=False, **operation_config): + """Updates a factory's repo information. + + :param location_id: The location identifier. + :type location_id: str + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: + ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Factory or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.Factory or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) + + # Construct URL + url = self.configure_factory_repo.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'locationId': self._serialize.url("location_id", location_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Factory', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} + + def list_by_resource_group( + self, resource_group_name, custom_headers=None, raw=False, **operation_config): + """Lists factories. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Factory + :rtype: + ~azure.mgmt.datafactory.models.FactoryPaged[~azure.mgmt.datafactory.models.Factory] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.FactoryPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.FactoryPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} + + def create_or_update( + self, resource_group_name, factory_name, factory, if_match=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param factory: Factory resource definition. + :type factory: ~azure.mgmt.datafactory.models.Factory + :param if_match: ETag of the factory entity. Should only be specified + for update, for which it should match existing entity or can be * for + unconditional update. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Factory or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.Factory or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(factory, 'Factory') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Factory', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} + + def update( + self, resource_group_name, factory_name, tags=None, identity=None, custom_headers=None, raw=False, **operation_config): + """Updates a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param tags: The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Factory or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.Factory or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Factory', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} + + def get( + self, resource_group_name, factory_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param if_none_match: ETag of the factory entity. Should only be + specified for get. If the ETag matches the existing entity tag, or if + * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: Factory or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.Factory or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 304]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('Factory', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} + + def delete( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Deletes a factory. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} + + def get_git_hub_access_token( + self, resource_group_name, factory_name, git_hub_access_token_request, custom_headers=None, raw=False, **operation_config): + """Get GitHub Access Token. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param git_hub_access_token_request: Get GitHub access token request + definition. + :type git_hub_access_token_request: + ~azure.mgmt.datafactory.models.GitHubAccessTokenRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: GitHubAccessTokenResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.GitHubAccessTokenResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_git_hub_access_token.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('GitHubAccessTokenResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} + + def get_data_plane_access( + self, resource_group_name, factory_name, policy, custom_headers=None, raw=False, **operation_config): + """Get Data Plane access. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param policy: Data Plane user access policy definition. + :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: AccessPolicyResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.AccessPolicyResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_data_plane_access.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(policy, 'UserAccessPolicy') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('AccessPolicyResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py new file mode 100644 index 000000000000..81467b9e3385 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_nodes_operations.py @@ -0,0 +1,316 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class IntegrationRuntimeNodesOperations(object): + """IntegrationRuntimeNodesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def get( + self, resource_group_name, factory_name, integration_runtime_name, node_name, custom_headers=None, raw=False, **operation_config): + """Gets a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SelfHostedIntegrationRuntimeNode or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} + + def delete( + self, resource_group_name, factory_name, integration_runtime_name, node_name, custom_headers=None, raw=False, **operation_config): + """Deletes a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} + + def update( + self, resource_group_name, factory_name, integration_runtime_name, node_name, concurrent_jobs_limit=None, custom_headers=None, raw=False, **operation_config): + """Updates a self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :param concurrent_jobs_limit: The number of concurrent jobs permitted + to run on the integration runtime node. Values between 1 and + maxConcurrentJobs(inclusive) are allowed. + :type concurrent_jobs_limit: int + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SelfHostedIntegrationRuntimeNode or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} + + def get_ip_address( + self, resource_group_name, factory_name, integration_runtime_name, node_name, custom_headers=None, raw=False, **operation_config): + """Get the IP address of self-hosted integration runtime node. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param node_name: The integration runtime node name. + :type node_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeNodeIpAddress or ClientRawResponse if + raw=true + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeNodeIpAddress + or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_ip_address.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py new file mode 100644 index 000000000000..230f12d023c3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtime_object_metadata_operations.py @@ -0,0 +1,218 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class IntegrationRuntimeObjectMetadataOperations(object): + """IntegrationRuntimeObjectMetadataOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + + def _refresh_initial( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.refresh.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SsisObjectMetadataStatusResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def refresh( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Refresh a SSIS integration runtime object metadata. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + SsisObjectMetadataStatusResponse or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.SsisObjectMetadataStatusResponse]] + :raises: :class:`CloudError` + """ + raw_result = self._refresh_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('SsisObjectMetadataStatusResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} + + def get( + self, resource_group_name, factory_name, integration_runtime_name, metadata_path=None, custom_headers=None, raw=False, **operation_config): + """Get a SSIS integration runtime object metadata by specified path. The + return is pageable metadata list. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param metadata_path: Metadata path. + :type metadata_path: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: SsisObjectMetadataListResponse or ClientRawResponse if + raw=true + :rtype: ~azure.mgmt.datafactory.models.SsisObjectMetadataListResponse + or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + get_metadata_request = None + if metadata_path is not None: + get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) + + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if get_metadata_request is not None: + body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('SsisObjectMetadataListResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py new file mode 100644 index 000000000000..0a64be3b1441 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/integration_runtimes_operations.py @@ -0,0 +1,1181 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class IntegrationRuntimesOperations(object): + """IntegrationRuntimesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Lists integration runtimes. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of IntegrationRuntimeResource + :rtype: + ~azure.mgmt.datafactory.models.IntegrationRuntimeResourcePaged[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.IntegrationRuntimeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} + + def create_or_update( + self, resource_group_name, factory_name, integration_runtime_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param properties: Integration runtime properties. + :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime + :param if_match: ETag of the integration runtime entity. Should only + be specified for update, for which it should match existing entity or + can be * for unconditional update. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + integration_runtime = models.IntegrationRuntimeResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} + + def get( + self, resource_group_name, factory_name, integration_runtime_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param if_none_match: ETag of the integration runtime entity. Should + only be specified for get. If the ETag matches the existing entity + tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 304]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} + + def update( + self, resource_group_name, factory_name, integration_runtime_name, auto_update=None, update_delay_offset=None, custom_headers=None, raw=False, **operation_config): + """Updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param auto_update: Enables or disables the auto-update feature of the + self-hosted integration runtime. See + https://go.microsoft.com/fwlink/?linkid=854189. Possible values + include: 'On', 'Off' + :type auto_update: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :param update_delay_offset: The time offset (in hours) in the day, + e.g., PT03H is 3 hours. The integration runtime auto update will + happen on that time. + :type update_delay_offset: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} + + def delete( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + """Deletes an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} + + def get_status( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + """Gets detailed status information for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeStatusResponse or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_status.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} + + def get_connection_info( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + """Gets the on-premises integration runtime connection information for + encrypting the on-premises data source credentials. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeConnectionInfo or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.IntegrationRuntimeConnectionInfo or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_connection_info.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} + + def regenerate_auth_key( + self, resource_group_name, factory_name, integration_runtime_name, key_name=None, custom_headers=None, raw=False, **operation_config): + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param key_name: The name of the authentication key to regenerate. + Possible values include: 'authKey1', 'authKey2' + :type key_name: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeAuthKeys or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) + + # Construct URL + url = self.regenerate_auth_key.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} + + def list_auth_keys( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the authentication keys for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeAuthKeys or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeys or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.list_auth_keys.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeAuthKeys', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_auth_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} + + + def _start_initial( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.start.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def start( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Starts a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + IntegrationRuntimeStatusResponse or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse]] + :raises: :class:`CloudError` + """ + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} + + + def _stop_initial( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.stop.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def stop( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Stops a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} + + def sync_credentials( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + """Force the integration runtime to synchronize credentials across + integration runtime nodes, and this will override the credentials + across all worker nodes with those available on the dispatcher node. If + you already have the latest credential backup file, you should manually + import it (preferred) on any self-hosted integration runtime node than + using this API directly. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.sync_credentials.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} + + def get_monitoring_data( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + """Get the integration runtime monitoring data, which includes the monitor + data for all the nodes under this integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeMonitoringData or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.IntegrationRuntimeMonitoringData or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get_monitoring_data.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeMonitoringData', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} + + def upgrade( + self, resource_group_name, factory_name, integration_runtime_name, custom_headers=None, raw=False, **operation_config): + """Upgrade self-hosted integration runtime to latest version if + availability. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.upgrade.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} + + def remove_links( + self, resource_group_name, factory_name, integration_runtime_name, linked_factory_name, custom_headers=None, raw=False, **operation_config): + """Remove all linked integration runtimes under specific data factory in a + self-hosted integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param linked_factory_name: The data factory name for linked + integration runtime. + :type linked_factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) + + # Construct URL + url = self.remove_links.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + remove_links.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} + + def create_linked_integration_runtime( + self, resource_group_name, factory_name, integration_runtime_name, create_linked_integration_runtime_request, custom_headers=None, raw=False, **operation_config): + """Create a linked integration runtime entry in a shared integration + runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param create_linked_integration_runtime_request: The linked + integration runtime properties. + :type create_linked_integration_runtime_request: + ~azure.mgmt.datafactory.models.CreateLinkedIntegrationRuntimeRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: IntegrationRuntimeStatusResponse or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_linked_integration_runtime.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py new file mode 100644 index 000000000000..e6878336df91 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/linked_services_operations.py @@ -0,0 +1,314 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class LinkedServicesOperations(object): + """LinkedServicesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Lists linked services. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of LinkedServiceResource + :rtype: + ~azure.mgmt.datafactory.models.LinkedServiceResourcePaged[~azure.mgmt.datafactory.models.LinkedServiceResource] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.LinkedServiceResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} + + def create_or_update( + self, resource_group_name, factory_name, linked_service_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param properties: Properties of linked service. + :type properties: ~azure.mgmt.datafactory.models.LinkedService + :param if_match: ETag of the linkedService entity. Should only be + specified for update, for which it should match existing entity or can + be * for unconditional update. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: LinkedServiceResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + linked_service = models.LinkedServiceResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(linked_service, 'LinkedServiceResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} + + def get( + self, resource_group_name, factory_name, linked_service_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param if_none_match: ETag of the linked service entity. Should only + be specified for get. If the ETag matches the existing entity tag, or + if * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: LinkedServiceResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.LinkedServiceResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 304]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('LinkedServiceResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} + + def delete( + self, resource_group_name, factory_name, linked_service_name, custom_headers=None, raw=False, **operation_config): + """Deletes a linked service. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param linked_service_name: The linked service name. + :type linked_service_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py new file mode 100644 index 000000000000..2273e12d5ada --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/operations.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class Operations(object): + """Operations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists the available Azure Data Factory API operations. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of Operation + :rtype: + ~azure.mgmt.datafactory.models.OperationPaged[~azure.mgmt.datafactory.models.Operation] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list.metadata['url'] + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.OperationPaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.OperationPaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py new file mode 100644 index 000000000000..de8744612d20 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipeline_runs_operations.py @@ -0,0 +1,233 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class PipelineRunsOperations(object): + """PipelineRunsOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def query_by_factory( + self, resource_group_name, factory_name, filter_parameters, custom_headers=None, raw=False, **operation_config): + """Query pipeline runs in the factory based on input filter conditions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: + ~azure.mgmt.datafactory.models.RunFilterParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PipelineRunsQueryResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.PipelineRunsQueryResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.query_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PipelineRunsQueryResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} + + def get( + self, resource_group_name, factory_name, run_id, custom_headers=None, raw=False, **operation_config): + """Get a pipeline run by its run ID. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PipelineRun or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.PipelineRun or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PipelineRun', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} + + def cancel( + self, resource_group_name, factory_name, run_id, is_recursive=None, custom_headers=None, raw=False, **operation_config): + """Cancel a pipeline run by its run ID. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param run_id: The pipeline run identifier. + :type run_id: str + :param is_recursive: If true, cancel all the Child pipelines that are + triggered by the current pipeline. + :type is_recursive: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.cancel.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'runId': self._serialize.url("run_id", run_id, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if is_recursive is not None: + query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py new file mode 100644 index 000000000000..343396e705ac --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/pipelines_operations.py @@ -0,0 +1,404 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class PipelinesOperations(object): + """PipelinesOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Lists pipelines. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of PipelineResource + :rtype: + ~azure.mgmt.datafactory.models.PipelineResourcePaged[~azure.mgmt.datafactory.models.PipelineResource] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.PipelineResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} + + def create_or_update( + self, resource_group_name, factory_name, pipeline_name, pipeline, if_match=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param pipeline: Pipeline resource definition. + :type pipeline: ~azure.mgmt.datafactory.models.PipelineResource + :param if_match: ETag of the pipeline entity. Should only be + specified for update, for which it should match existing entity or can + be * for unconditional update. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PipelineResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.PipelineResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(pipeline, 'PipelineResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} + + def get( + self, resource_group_name, factory_name, pipeline_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param if_none_match: ETag of the pipeline entity. Should only be + specified for get. If the ETag matches the existing entity tag, or if + * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PipelineResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.PipelineResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 304]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('PipelineResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} + + def delete( + self, resource_group_name, factory_name, pipeline_name, custom_headers=None, raw=False, **operation_config): + """Deletes a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} + + def create_run( + self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, parameters=None, custom_headers=None, raw=False, **operation_config): + """Creates a run of a pipeline. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param pipeline_name: The pipeline name. + :type pipeline_name: str + :param reference_pipeline_run_id: The pipeline run identifier. If run + ID is specified the parameters of the specified run will be used to + create a new run. + :type reference_pipeline_run_id: str + :param is_recovery: Recovery mode flag. If recovery mode is set to + true, the specified referenced pipeline run and the new run will be + grouped under the same groupId. + :type is_recovery: bool + :param start_activity_name: In recovery mode, the rerun will start + from this activity. If not specified, all activities will run. + :type start_activity_name: str + :param parameters: Parameters of the pipeline run. These parameters + will be used only if the runId is not specified. + :type parameters: dict[str, object] + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: CreateRunResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.CreateRunResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create_run.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + if reference_pipeline_run_id is not None: + query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') + if is_recovery is not None: + query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') + if start_activity_name is not None: + query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, '{object}') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('CreateRunResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py new file mode 100644 index 000000000000..58e0066a60dd --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/rerun_triggers_operations.py @@ -0,0 +1,450 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class RerunTriggersOperations(object): + """RerunTriggersOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def create( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, rerun_tumbling_window_trigger_action_parameters, custom_headers=None, raw=False, **operation_config): + """Creates a rerun trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param rerun_tumbling_window_trigger_action_parameters: Rerun tumbling + window trigger action parameters. + :type rerun_tumbling_window_trigger_action_parameters: + ~azure.mgmt.datafactory.models.RerunTumblingWindowTriggerActionParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(rerun_tumbling_window_trigger_action_parameters, 'RerunTumblingWindowTriggerActionParameters') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}'} + + + def _start_initial( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.start.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def start( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Starts a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + rerun_trigger_name=rerun_trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/start'} + + + def _stop_initial( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.stop.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def stop( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Stops a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + rerun_trigger_name=rerun_trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/stop'} + + + def _cancel_initial( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.cancel.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def cancel( + self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Cancels a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param rerun_trigger_name: The rerun trigger name. + :type rerun_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._cancel_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + rerun_trigger_name=rerun_trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/cancel'} + + def list_by_trigger( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + """Lists rerun triggers by an original trigger name. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of RerunTriggerResource + :rtype: + ~azure.mgmt.datafactory.models.RerunTriggerResourcePaged[~azure.mgmt.datafactory.models.RerunTriggerResource] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_trigger.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py new file mode 100644 index 000000000000..51e9b0ac37a3 --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/trigger_runs_operations.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class TriggerRunsOperations(object): + """TriggerRunsOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def query_by_factory( + self, resource_group_name, factory_name, filter_parameters, custom_headers=None, raw=False, **operation_config): + """Query trigger runs. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: + ~azure.mgmt.datafactory.models.RunFilterParameters + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerRunsQueryResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerRunsQueryResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.query_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerRunsQueryResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py new file mode 100644 index 000000000000..f80cfcb2870b --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/triggers_operations.py @@ -0,0 +1,482 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class TriggersOperations(object): + """TriggersOperations operations. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Lists triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of TriggerResource + :rtype: + ~azure.mgmt.datafactory.models.TriggerResourcePaged[~azure.mgmt.datafactory.models.TriggerResource] + :raises: :class:`CloudError` + """ + def internal_paging(next_link=None, raw=False): + + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + deserialized = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies) + + if raw: + header_dict = {} + client_raw_response = models.TriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + return client_raw_response + + return deserialized + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} + + def create_or_update( + self, resource_group_name, factory_name, trigger_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param properties: Properties of the trigger. + :type properties: ~azure.mgmt.datafactory.models.Trigger + :param if_match: ETag of the trigger entity. Should only be specified + for update, for which it should match existing entity or can be * for + unconditional update. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + trigger = models.TriggerResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(trigger, 'TriggerResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} + + def get( + self, resource_group_name, factory_name, trigger_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param if_none_match: ETag of the trigger entity. Should only be + specified for get. If the ETag matches the existing entity tag, or if + * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 304]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} + + def delete( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + """Deletes a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} + + + def _start_initial( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.start.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def start( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Starts a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} + + + def _stop_initial( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.stop.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def stop( + self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Stops a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: :class:`CloudError` + """ + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} diff --git a/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py new file mode 100644 index 000000000000..a39916c162ce --- /dev/null +++ b/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0" +