7 using System.Collections.Generic;
348 public const string TRUE =
"true";
349 public const string FALSE =
"false";
470 public const string FLAT =
"flat";
499 public string name {
get; set; }
769 public IDictionary<string, string>
options {
get; set; } =
new Dictionary<string, string>();
1048 IDictionary<string, string>
options)
1050 this.name = name ??
"";
1051 this.datasink_updates_map = datasink_updates_map ??
new Dictionary<string, string>();
1052 this.options = options ??
new Dictionary<string, string>();
1069 public IDictionary<string, string>
info {
get; set; } =
new Dictionary<string, string>();
const string AZURE_STORAGE_ACCOUNT_NAME
Name of the Azure storage account to use as the data sink, this is valid only if tenant_id is specifi...
const string AZURE_CONTAINER_NAME
Name of the Azure storage container to use as the data sink
IDictionary< string, string > info
Additional information.
const string GCS_PROJECT_ID
Name of the Google Cloud project to use as the data sink
const string WAIT_TIMEOUT
Timeout in seconds for waiting for a response from this sink
const string USE_HTTPS
Use https to connect to datasink if true, otherwise use http Supported values: TRUE FALSE The defau...
const string CREDENTIAL
Name of the credential object to be used in this data sink
A set of parameters for Kinetica.alterDatasink(string,IDictionary{string, string},IDictionary{string, string}).
const string SCHEMA_NAME
Updates the schema name.
AlterDatasinkRequest()
Constructs an AlterDatasinkRequest object with default parameters.
const string MAX_MESSAGE_SIZE
Maximum size in bytes of each notification message.
const string USE_MANAGED_CREDENTIALS
When no credentials are supplied, we use anonymous access by default.
const string KAFKA_URL
The publicly-accessible full path URL to the kafka broker, e.g., 'http://172.123.45.67:9300'.
const string JSON_FORMAT
The desired format of JSON encoded notifications message.
A set of results returned by Kinetica.alterDatasink(string,IDictionary{string, string},IDictionary{string, string}).
const string S3_BUCKET_NAME
Name of the Amazon S3 bucket to use as the data sink
IDictionary< string, string > datasink_updates_map
Map containing the properties of the data sink to be updated.
const string CONNECTION_TIMEOUT
Timeout in seconds for connecting to this sink
const string DESTINATION
Destination for the output data in format 'destination_type://path[:port]'.
const string KAFKA_TOPIC_NAME
Name of the Kafka topic to use for this data sink, if it references a Kafka broker
const string SKIP_VALIDATION
Bypass validation of connection to this data sink.
const string HDFS_DELEGATION_TOKEN
Delegation token for the given HDFS user
const string MAX_BATCH_SIZE
Maximum number of records per notification message.
const string AZURE_TENANT_ID
Active Directory tenant ID (or directory ID)
const string HDFS_USE_KERBEROS
Use kerberos authentication for the given HDFS cluster Supported values: TRUE FALSE The default val...
Map containing the properties of the data sink to be updated.
IDictionary< string, string > options
Optional parameters.
const string AZURE_SAS_TOKEN
Shared access signature token for Azure storage account to use as the data sink
const string GCS_BUCKET_NAME
Name of the Google Cloud Storage bucket to use as the data sink
string name
Name of the data sink to be altered.
const string S3_AWS_ROLE_ARN
Amazon IAM Role ARN which has required S3 permissions that can be assumed for the given S3 IAM user...
const string S3_REGION
Name of the Amazon S3 region where the given bucket is located
const string ANONYMOUS
Create an anonymous connection to the storage provider–DEPRECATED: this is now the default...
KineticaData - class to help with Avro Encoding for Kinetica
AlterDatasinkRequest(string name, IDictionary< string, string > datasink_updates_map, IDictionary< string, string > options)
Constructs an AlterDatasinkRequest object with the specified parameters.
IDictionary< string, string > updated_properties_map
Map of values updated
const string GCS_SERVICE_ACCOUNT_KEYS
Google Cloud service account keys to use for authenticating the data sink
const string AZURE_OAUTH_TOKEN
Oauth token to access given storage container
const string HDFS_KERBEROS_KEYTAB
Kerberos keytab file location for the given HDFS user.