Kinetica   C#   API  Version 7.2.3.0
AlterDatasource.cs
Go to the documentation of this file.
1 /*
2  * This file was autogenerated by the Kinetica schema processor.
3  *
4  * DO NOT EDIT DIRECTLY.
5  */
6 
7 using System.Collections.Generic;
8 
9 namespace kinetica
10 {
18  {
23  public struct DatasourceUpdatesMap
24  {
31  public const string LOCATION = "location";
32 
35  public const string USER_NAME = "user_name";
36 
39  public const string PASSWORD = "password";
40 
56  public const string SKIP_VALIDATION = "skip_validation";
57 
58  public const string TRUE = "true";
59  public const string FALSE = "false";
60 
63  public const string CONNECTION_TIMEOUT = "connection_timeout";
64 
67  public const string WAIT_TIMEOUT = "wait_timeout";
68 
72  public const string CREDENTIAL = "credential";
73 
76  public const string S3_BUCKET_NAME = "s3_bucket_name";
77 
80  public const string S3_REGION = "s3_region";
81 
101  public const string S3_VERIFY_SSL = "s3_verify_ssl";
102 
123  public const string S3_USE_VIRTUAL_ADDRESSING = "s3_use_virtual_addressing";
124 
127  public const string S3_AWS_ROLE_ARN = "s3_aws_role_arn";
128 
131  public const string S3_ENCRYPTION_CUSTOMER_ALGORITHM = "s3_encryption_customer_algorithm";
132 
135  public const string S3_ENCRYPTION_CUSTOMER_KEY = "s3_encryption_customer_key";
136 
140  public const string HDFS_KERBEROS_KEYTAB = "hdfs_kerberos_keytab";
141 
143  public const string HDFS_DELEGATION_TOKEN = "hdfs_delegation_token";
144 
160  public const string HDFS_USE_KERBEROS = "hdfs_use_kerberos";
161 
164  public const string AZURE_STORAGE_ACCOUNT_NAME = "azure_storage_account_name";
165 
168  public const string AZURE_CONTAINER_NAME = "azure_container_name";
169 
171  public const string AZURE_TENANT_ID = "azure_tenant_id";
172 
175  public const string AZURE_SAS_TOKEN = "azure_sas_token";
176 
179  public const string AZURE_OAUTH_TOKEN = "azure_oauth_token";
180 
183  public const string GCS_BUCKET_NAME = "gcs_bucket_name";
184 
187  public const string GCS_PROJECT_ID = "gcs_project_id";
188 
191  public const string GCS_SERVICE_ACCOUNT_KEYS = "gcs_service_account_keys";
192 
195  public const string JDBC_DRIVER_JAR_PATH = "jdbc_driver_jar_path";
196 
198  public const string JDBC_DRIVER_CLASS_NAME = "jdbc_driver_class_name";
199 
202  public const string KAFKA_URL = "kafka_url";
203 
206  public const string KAFKA_TOPIC_NAME = "kafka_topic_name";
207 
223  public const string ANONYMOUS = "anonymous";
224 
240  public const string USE_MANAGED_CREDENTIALS = "use_managed_credentials";
241 
257  public const string USE_HTTPS = "use_https";
258 
266  public const string SCHEMA_NAME = "schema_name";
267 
270  public const string SCHEMA_REGISTRY_LOCATION = "schema_registry_location";
271 
275  public const string SCHEMA_REGISTRY_CREDENTIAL = "schema_registry_credential";
276 
278  public const string SCHEMA_REGISTRY_PORT = "schema_registry_port";
279  } // end struct DatasourceUpdatesMap
280 
283  public string name { get; set; }
284 
666  public IDictionary<string, string> datasource_updates_map { get; set; } = new Dictionary<string, string>();
667 
669  public IDictionary<string, string> options { get; set; } = new Dictionary<string, string>();
670 
674 
1063  IDictionary<string, string> datasource_updates_map,
1064  IDictionary<string, string> options)
1065  {
1066  this.name = name ?? "";
1067  this.datasource_updates_map = datasource_updates_map ?? new Dictionary<string, string>();
1068  this.options = options ?? new Dictionary<string, string>();
1069  } // end constructor
1070  } // end class AlterDatasourceRequest
1071 
1076  {
1078  public IDictionary<string, string> updated_properties_map { get; set; } = new Dictionary<string, string>();
1079 
1081  public IDictionary<string, string> info { get; set; } = new Dictionary<string, string>();
1082  } // end class AlterDatasourceResponse
1083 } // end namespace kinetica
IDictionary< string, string > options
Optional parameters.
const string S3_ENCRYPTION_CUSTOMER_ALGORITHM
Customer encryption algorithm used encrypting data
const string JDBC_DRIVER_CLASS_NAME
Name of the JDBC driver class
const string HDFS_USE_KERBEROS
Use kerberos authentication for the given HDFS cluster.
const string AZURE_SAS_TOKEN
Shared access signature token for Azure storage account to use as the data source
const string WAIT_TIMEOUT
Timeout in seconds for reading from this storage provider
AlterDatasourceRequest()
Constructs an AlterDatasourceRequest object with default parameters.
const string S3_AWS_ROLE_ARN
Amazon IAM Role ARN which has required S3 permissions that can be assumed for the given S3 IAM user
const string S3_REGION
Name of the Amazon S3 region where the given bucket is located
const string S3_VERIFY_SSL
Whether to verify SSL connections.
const string AZURE_CONTAINER_NAME
Name of the Azure storage container to use as the data source
const string PASSWORD
Password for the remote system user; may be an empty string
const string USER_NAME
Name of the remote system user; may be an empty string
AlterDatasourceRequest(string name, IDictionary< string, string > datasource_updates_map, IDictionary< string, string > options)
Constructs an AlterDatasourceRequest object with the specified parameters.
A set of string constants for the parameter datasource_updates_map.
const string CONNECTION_TIMEOUT
Timeout in seconds for connecting to this storage provider
const string SCHEMA_REGISTRY_PORT
Confluent Schema Registry port (optional).
const string HDFS_KERBEROS_KEYTAB
Kerberos keytab file location for the given HDFS user.
A set of results returned by Kinetica.alterDatasource.
const string SCHEMA_REGISTRY_LOCATION
Location of Confluent Schema Registry in '[storage_path[:storage_port]]' format.
const string KAFKA_URL
The publicly-accessible full path URL to the Kafka broker, e.g., 'http://172.123.45....
const string HDFS_DELEGATION_TOKEN
Delegation token for the given HDFS user
const string GCS_PROJECT_ID
Name of the Google Cloud project to use as the data source
const string CREDENTIAL
Name of the credential object to be used in data source
const string SCHEMA_NAME
Updates the schema name.
A set of parameters for Kinetica.alterDatasource.
const string KAFKA_TOPIC_NAME
Name of the Kafka topic to use as the data source
IDictionary< string, string > info
Additional information.
const string S3_USE_VIRTUAL_ADDRESSING
Whether to use virtual addressing when referencing the Amazon S3 source.
const string LOCATION
Location of the remote storage in 'storage_provider_type://[storage_path[:storage_port]]' format.
const string USE_MANAGED_CREDENTIALS
When no credentials are supplied, we use anonymous access by default.
IDictionary< string, string > datasource_updates_map
Map containing the properties of the data source to be updated.
const string SCHEMA_REGISTRY_CREDENTIAL
Confluent Schema Registry credential object name.
const string JDBC_DRIVER_JAR_PATH
JDBC driver jar file location.
const string GCS_SERVICE_ACCOUNT_KEYS
Google Cloud service account keys to use for authenticating the data source
const string S3_ENCRYPTION_CUSTOMER_KEY
Customer encryption key to encrypt or decrypt data
const string SKIP_VALIDATION
Bypass validation of connection to remote source.
const string USE_HTTPS
Use https to connect to datasource if true, otherwise use http.
const string ANONYMOUS
Create an anonymous connection to the storage provider–DEPRECATED: this is now the default.
string name
Name of the data source to be altered.
const string GCS_BUCKET_NAME
Name of the Google Cloud Storage bucket to use as the data source
KineticaData - class to help with Avro Encoding for Kinetica
Definition: KineticaData.cs:14
IDictionary< string, string > updated_properties_map
Map of values updated
const string AZURE_TENANT_ID
Active Directory tenant ID (or directory ID)
const string AZURE_OAUTH_TOKEN
OAuth token to access given storage container
const string AZURE_STORAGE_ACCOUNT_NAME
Name of the Azure storage account to use as the data source, this is valid only if tenant_id is speci...
const string S3_BUCKET_NAME
Name of the Amazon S3 bucket to use as the data source