Kinetica   C#   API  Version 7.2.3.0
AlterDatasink.cs
Go to the documentation of this file.
1 /*
2  * This file was autogenerated by the Kinetica schema processor.
3  *
4  * DO NOT EDIT DIRECTLY.
5  */
6 
7 using System.Collections.Generic;
8 
9 namespace kinetica
10 {
18  {
23  public struct DatasinkUpdatesMap
24  {
30  public const string DESTINATION = "destination";
31 
34  public const string CONNECTION_TIMEOUT = "connection_timeout";
35 
38  public const string WAIT_TIMEOUT = "wait_timeout";
39 
43  public const string CREDENTIAL = "credential";
44 
47  public const string S3_BUCKET_NAME = "s3_bucket_name";
48 
51  public const string S3_REGION = "s3_region";
52 
72  public const string S3_VERIFY_SSL = "s3_verify_ssl";
73 
74  public const string TRUE = "true";
75  public const string FALSE = "false";
76 
97  public const string S3_USE_VIRTUAL_ADDRESSING = "s3_use_virtual_addressing";
98 
101  public const string S3_AWS_ROLE_ARN = "s3_aws_role_arn";
102 
105  public const string S3_ENCRYPTION_CUSTOMER_ALGORITHM = "s3_encryption_customer_algorithm";
106 
109  public const string S3_ENCRYPTION_CUSTOMER_KEY = "s3_encryption_customer_key";
110 
112  public const string S3_ENCRYPTION_TYPE = "s3_encryption_type";
113 
115  public const string S3_KMS_KEY_ID = "s3_kms_key_id";
116 
120  public const string HDFS_KERBEROS_KEYTAB = "hdfs_kerberos_keytab";
121 
123  public const string HDFS_DELEGATION_TOKEN = "hdfs_delegation_token";
124 
140  public const string HDFS_USE_KERBEROS = "hdfs_use_kerberos";
141 
144  public const string AZURE_STORAGE_ACCOUNT_NAME = "azure_storage_account_name";
145 
148  public const string AZURE_CONTAINER_NAME = "azure_container_name";
149 
151  public const string AZURE_TENANT_ID = "azure_tenant_id";
152 
155  public const string AZURE_SAS_TOKEN = "azure_sas_token";
156 
159  public const string AZURE_OAUTH_TOKEN = "azure_oauth_token";
160 
163  public const string GCS_BUCKET_NAME = "gcs_bucket_name";
164 
167  public const string GCS_PROJECT_ID = "gcs_project_id";
168 
171  public const string GCS_SERVICE_ACCOUNT_KEYS = "gcs_service_account_keys";
172 
175  public const string JDBC_DRIVER_JAR_PATH = "jdbc_driver_jar_path";
176 
178  public const string JDBC_DRIVER_CLASS_NAME = "jdbc_driver_class_name";
179 
182  public const string KAFKA_URL = "kafka_url";
183 
186  public const string KAFKA_TOPIC_NAME = "kafka_topic_name";
187 
203  public const string ANONYMOUS = "anonymous";
204 
220  public const string USE_MANAGED_CREDENTIALS = "use_managed_credentials";
221 
237  public const string USE_HTTPS = "use_https";
238 
242  public const string MAX_BATCH_SIZE = "max_batch_size";
243 
247  public const string MAX_MESSAGE_SIZE = "max_message_size";
248 
268  public const string JSON_FORMAT = "json_format";
269 
271  public const string FLAT = "flat";
272 
274  public const string NESTED = "nested";
275 
291  public const string SKIP_VALIDATION = "skip_validation";
292 
300  public const string SCHEMA_NAME = "schema_name";
301  } // end struct DatasinkUpdatesMap
302 
305  public string name { get; set; }
306 
689  public IDictionary<string, string> datasink_updates_map { get; set; } = new Dictionary<string, string>();
690 
692  public IDictionary<string, string> options { get; set; } = new Dictionary<string, string>();
693 
696  public AlterDatasinkRequest() { }
697 
1086  public AlterDatasinkRequest( string name,
1087  IDictionary<string, string> datasink_updates_map,
1088  IDictionary<string, string> options)
1089  {
1090  this.name = name ?? "";
1091  this.datasink_updates_map = datasink_updates_map ?? new Dictionary<string, string>();
1092  this.options = options ?? new Dictionary<string, string>();
1093  } // end constructor
1094  } // end class AlterDatasinkRequest
1095 
1100  {
1102  public IDictionary<string, string> updated_properties_map { get; set; } = new Dictionary<string, string>();
1103 
1105  public IDictionary<string, string> info { get; set; } = new Dictionary<string, string>();
1106  } // end class AlterDatasinkResponse
1107 } // end namespace kinetica
const string AZURE_STORAGE_ACCOUNT_NAME
Name of the Azure storage account to use as the data sink, this is valid only if tenant_id is specifi...
const string AZURE_CONTAINER_NAME
Name of the Azure storage container to use as the data sink
IDictionary< string, string > info
Additional information.
const string GCS_PROJECT_ID
Name of the Google Cloud project to use as the data sink
const string WAIT_TIMEOUT
Timeout in seconds for waiting for a response from this sink
const string S3_USE_VIRTUAL_ADDRESSING
Whether to use virtual addressing when referencing the Amazon S3 sink.
const string JDBC_DRIVER_CLASS_NAME
Name of the JDBC driver class
const string USE_HTTPS
Use https to connect to datasink if true, otherwise use http.
const string CREDENTIAL
Name of the credential object to be used in this data sink
A set of parameters for Kinetica.alterDatasink.
const string SCHEMA_NAME
Updates the schema name.
AlterDatasinkRequest()
Constructs an AlterDatasinkRequest object with default parameters.
const string MAX_MESSAGE_SIZE
Maximum size in bytes of each notification message.
const string USE_MANAGED_CREDENTIALS
When no credentials are supplied, we use anonymous access by default.
const string KAFKA_URL
The publicly-accessible full path URL to the kafka broker, e.g., 'http://172.123.45....
const string JSON_FORMAT
The desired format of JSON encoded notifications message.
A set of results returned by Kinetica.alterDatasink.
const string S3_BUCKET_NAME
Name of the Amazon S3 bucket to use as the data sink
const string FLAT
A single record is returned per message
IDictionary< string, string > datasink_updates_map
Map containing the properties of the data sink to be updated.
const string CONNECTION_TIMEOUT
Timeout in seconds for connecting to this sink
const string DESTINATION
Destination for the output data in format 'destination_type://path[:port]'.
const string KAFKA_TOPIC_NAME
Name of the Kafka topic to use for this data sink, if it references a Kafka broker
const string S3_ENCRYPTION_CUSTOMER_KEY
Customer encryption key to encrypt or decrypt data
const string S3_VERIFY_SSL
Whether to verify SSL connections.
const string JDBC_DRIVER_JAR_PATH
JDBC driver jar file location.
const string SKIP_VALIDATION
Bypass validation of connection to this data sink.
const string HDFS_DELEGATION_TOKEN
Delegation token for the given HDFS user
const string MAX_BATCH_SIZE
Maximum number of records per notification message.
const string S3_ENCRYPTION_CUSTOMER_ALGORITHM
Customer encryption algorithm used encrypting data
const string AZURE_TENANT_ID
Active Directory tenant ID (or directory ID)
const string HDFS_USE_KERBEROS
Use kerberos authentication for the given HDFS cluster.
A set of string constants for the parameter datasink_updates_map.
IDictionary< string, string > options
Optional parameters.
const string AZURE_SAS_TOKEN
Shared access signature token for Azure storage account to use as the data sink
const string S3_ENCRYPTION_TYPE
Server side encryption type
const string GCS_BUCKET_NAME
Name of the Google Cloud Storage bucket to use as the data sink
string name
Name of the data sink to be altered.
const string S3_AWS_ROLE_ARN
Amazon IAM Role ARN which has required S3 permissions that can be assumed for the given S3 IAM user
const string NESTED
Records are returned as an array per message
const string S3_REGION
Name of the Amazon S3 region where the given bucket is located
const string ANONYMOUS
Create an anonymous connection to the storage provider–DEPRECATED: this is now the default.
KineticaData - class to help with Avro Encoding for Kinetica
Definition: KineticaData.cs:14
AlterDatasinkRequest(string name, IDictionary< string, string > datasink_updates_map, IDictionary< string, string > options)
Constructs an AlterDatasinkRequest object with the specified parameters.
IDictionary< string, string > updated_properties_map
Map of values updated
const string GCS_SERVICE_ACCOUNT_KEYS
Google Cloud service account keys to use for authenticating the data sink
const string AZURE_OAUTH_TOKEN
Oauth token to access given storage container
const string HDFS_KERBEROS_KEYTAB
Kerberos keytab file location for the given HDFS user.