Kinetica C# API  Version 7.1.10.0
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Properties Pages
AlterDatasink.cs
Go to the documentation of this file.
1 /*
2  * This file was autogenerated by the Kinetica schema processor.
3  *
4  * DO NOT EDIT DIRECTLY.
5  */
6 
7 using System.Collections.Generic;
8 
9 
10 
11 namespace kinetica
12 {
13 
22  {
23 
291  public struct DatasinkUpdatesMap
292  {
293 
299  public const string DESTINATION = "destination";
300 
303  public const string CONNECTION_TIMEOUT = "connection_timeout";
304 
307  public const string WAIT_TIMEOUT = "wait_timeout";
308 
312  public const string CREDENTIAL = "credential";
313 
316  public const string S3_BUCKET_NAME = "s3_bucket_name";
317 
320  public const string S3_REGION = "s3_region";
321 
324  public const string S3_AWS_ROLE_ARN = "s3_aws_role_arn";
325 
328  public const string HDFS_KERBEROS_KEYTAB = "hdfs_kerberos_keytab";
329 
331  public const string HDFS_DELEGATION_TOKEN = "hdfs_delegation_token";
332 
347  public const string HDFS_USE_KERBEROS = "hdfs_use_kerberos";
348  public const string TRUE = "true";
349  public const string FALSE = "false";
350 
353  public const string AZURE_STORAGE_ACCOUNT_NAME = "azure_storage_account_name";
354 
357  public const string AZURE_CONTAINER_NAME = "azure_container_name";
358 
360  public const string AZURE_TENANT_ID = "azure_tenant_id";
361 
364  public const string AZURE_SAS_TOKEN = "azure_sas_token";
365 
368  public const string AZURE_OAUTH_TOKEN = "azure_oauth_token";
369 
372  public const string GCS_BUCKET_NAME = "gcs_bucket_name";
373 
376  public const string GCS_PROJECT_ID = "gcs_project_id";
377 
380  public const string GCS_SERVICE_ACCOUNT_KEYS = "gcs_service_account_keys";
381 
384  public const string KAFKA_URL = "kafka_url";
385 
388  public const string KAFKA_TOPIC_NAME = "kafka_topic_name";
389 
406  public const string ANONYMOUS = "anonymous";
407 
424  public const string USE_MANAGED_CREDENTIALS = "use_managed_credentials";
425 
441  public const string USE_HTTPS = "use_https";
442 
445  public const string MAX_BATCH_SIZE = "max_batch_size";
446 
449  public const string MAX_MESSAGE_SIZE = "max_message_size";
450 
469  public const string JSON_FORMAT = "json_format";
470  public const string FLAT = "flat";
471  public const string NESTED = "nested";
472 
487  public const string SKIP_VALIDATION = "skip_validation";
488 
493  public const string SCHEMA_NAME = "schema_name";
494  } // end struct DatasinkUpdatesMap
495 
496 
499  public string name { get; set; }
500 
766  public IDictionary<string, string> datasink_updates_map { get; set; } = new Dictionary<string, string>();
767 
769  public IDictionary<string, string> options { get; set; } = new Dictionary<string, string>();
770 
771 
774  public AlterDatasinkRequest() { }
775 
1046  public AlterDatasinkRequest( string name,
1047  IDictionary<string, string> datasink_updates_map,
1048  IDictionary<string, string> options)
1049  {
1050  this.name = name ?? "";
1051  this.datasink_updates_map = datasink_updates_map ?? new Dictionary<string, string>();
1052  this.options = options ?? new Dictionary<string, string>();
1053  } // end constructor
1054 
1055  } // end class AlterDatasinkRequest
1056 
1057 
1058 
1063  {
1064 
1066  public IDictionary<string, string> updated_properties_map { get; set; } = new Dictionary<string, string>();
1067 
1069  public IDictionary<string, string> info { get; set; } = new Dictionary<string, string>();
1070 
1071  } // end class AlterDatasinkResponse
1072 
1073 
1074 
1075 
1076 } // end namespace kinetica
const string AZURE_STORAGE_ACCOUNT_NAME
Name of the Azure storage account to use as the data sink, this is valid only if tenant_id is specifi...
const string AZURE_CONTAINER_NAME
Name of the Azure storage container to use as the data sink
IDictionary< string, string > info
Additional information.
const string GCS_PROJECT_ID
Name of the Google Cloud project to use as the data sink
const string WAIT_TIMEOUT
Timeout in seconds for waiting for a response from this sink
const string USE_HTTPS
Use https to connect to datasink if true, otherwise use http Supported values: TRUE FALSE The defau...
const string CREDENTIAL
Name of the credential object to be used in this data sink
A set of parameters for Kinetica.alterDatasink(string,IDictionary{string, string},IDictionary{string, string}).
const string SCHEMA_NAME
Updates the schema name.
AlterDatasinkRequest()
Constructs an AlterDatasinkRequest object with default parameters.
const string MAX_MESSAGE_SIZE
Maximum size in bytes of each notification message.
const string USE_MANAGED_CREDENTIALS
When no credentials are supplied, we use anonymous access by default.
const string KAFKA_URL
The publicly-accessible full path URL to the kafka broker, e.g., &#39;http://172.123.45.67:9300&#39;.
const string JSON_FORMAT
The desired format of JSON encoded notifications message.
A set of results returned by Kinetica.alterDatasink(string,IDictionary{string, string},IDictionary{string, string}).
const string S3_BUCKET_NAME
Name of the Amazon S3 bucket to use as the data sink
IDictionary< string, string > datasink_updates_map
Map containing the properties of the data sink to be updated.
const string CONNECTION_TIMEOUT
Timeout in seconds for connecting to this sink
const string DESTINATION
Destination for the output data in format &#39;destination_type://path[:port]&#39;.
const string KAFKA_TOPIC_NAME
Name of the Kafka topic to use for this data sink, if it references a Kafka broker
const string SKIP_VALIDATION
Bypass validation of connection to this data sink.
const string HDFS_DELEGATION_TOKEN
Delegation token for the given HDFS user
const string MAX_BATCH_SIZE
Maximum number of records per notification message.
const string AZURE_TENANT_ID
Active Directory tenant ID (or directory ID)
const string HDFS_USE_KERBEROS
Use kerberos authentication for the given HDFS cluster Supported values: TRUE FALSE The default val...
Map containing the properties of the data sink to be updated.
IDictionary< string, string > options
Optional parameters.
const string AZURE_SAS_TOKEN
Shared access signature token for Azure storage account to use as the data sink
const string GCS_BUCKET_NAME
Name of the Google Cloud Storage bucket to use as the data sink
string name
Name of the data sink to be altered.
const string S3_AWS_ROLE_ARN
Amazon IAM Role ARN which has required S3 permissions that can be assumed for the given S3 IAM user...
const string S3_REGION
Name of the Amazon S3 region where the given bucket is located
const string ANONYMOUS
Create an anonymous connection to the storage provider–DEPRECATED: this is now the default...
KineticaData - class to help with Avro Encoding for Kinetica
Definition: KineticaData.cs:14
AlterDatasinkRequest(string name, IDictionary< string, string > datasink_updates_map, IDictionary< string, string > options)
Constructs an AlterDatasinkRequest object with the specified parameters.
IDictionary< string, string > updated_properties_map
Map of values updated
const string GCS_SERVICE_ACCOUNT_KEYS
Google Cloud service account keys to use for authenticating the data sink
const string AZURE_OAUTH_TOKEN
Oauth token to access given storage container
const string HDFS_KERBEROS_KEYTAB
Kerberos keytab file location for the given HDFS user.