Kinetica C# API  Version 7.1.10.0
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Properties Pages
CreateDatasink.cs
Go to the documentation of this file.
1 /*
2  * This file was autogenerated by the Kinetica schema processor.
3  *
4  * DO NOT EDIT DIRECTLY.
5  */
6 
7 using System.Collections.Generic;
8 
9 
10 
11 namespace kinetica
12 {
13 
23  {
24 
322  public struct Options
323  {
324 
327  public const string CONNECTION_TIMEOUT = "connection_timeout";
328 
331  public const string WAIT_TIMEOUT = "wait_timeout";
332 
336  public const string CREDENTIAL = "credential";
337 
340  public const string S3_BUCKET_NAME = "s3_bucket_name";
341 
344  public const string S3_REGION = "s3_region";
345 
362  public const string S3_VERIFY_SSL = "s3_verify_ssl";
363  public const string TRUE = "true";
364  public const string FALSE = "false";
365 
384  public const string S3_USE_VIRTUAL_ADDRESSING = "s3_use_virtual_addressing";
385 
388  public const string S3_AWS_ROLE_ARN = "s3_aws_role_arn";
389 
392  public const string S3_ENCRYPTION_CUSTOMER_ALGORITHM = "s3_encryption_customer_algorithm";
393 
396  public const string S3_ENCRYPTION_CUSTOMER_KEY = "s3_encryption_customer_key";
397 
399  public const string S3_ENCRYPTION_TYPE = "s3_encryption_type";
400 
402  public const string S3_KMS_KEY_ID = "s3_kms_key_id";
403 
406  public const string HDFS_KERBEROS_KEYTAB = "hdfs_kerberos_keytab";
407 
409  public const string HDFS_DELEGATION_TOKEN = "hdfs_delegation_token";
410 
425  public const string HDFS_USE_KERBEROS = "hdfs_use_kerberos";
426 
429  public const string AZURE_STORAGE_ACCOUNT_NAME = "azure_storage_account_name";
430 
433  public const string AZURE_CONTAINER_NAME = "azure_container_name";
434 
436  public const string AZURE_TENANT_ID = "azure_tenant_id";
437 
440  public const string AZURE_SAS_TOKEN = "azure_sas_token";
441 
444  public const string AZURE_OAUTH_TOKEN = "azure_oauth_token";
445 
448  public const string GCS_BUCKET_NAME = "gcs_bucket_name";
449 
452  public const string GCS_PROJECT_ID = "gcs_project_id";
453 
456  public const string GCS_SERVICE_ACCOUNT_KEYS = "gcs_service_account_keys";
457 
459  public const string JDBC_DRIVER_JAR_PATH = "jdbc_driver_jar_path";
460 
462  public const string JDBC_DRIVER_CLASS_NAME = "jdbc_driver_class_name";
463 
466  public const string KAFKA_TOPIC_NAME = "kafka_topic_name";
467 
470  public const string MAX_BATCH_SIZE = "max_batch_size";
471 
474  public const string MAX_MESSAGE_SIZE = "max_message_size";
475 
494  public const string JSON_FORMAT = "json_format";
495  public const string FLAT = "flat";
496  public const string NESTED = "nested";
497 
514  public const string USE_MANAGED_CREDENTIALS = "use_managed_credentials";
515 
531  public const string USE_HTTPS = "use_https";
532 
547  public const string SKIP_VALIDATION = "skip_validation";
548  } // end struct Options
549 
550 
552  public string name { get; set; }
553 
559  public string destination { get; set; }
560 
856  public IDictionary<string, string> options { get; set; } = new Dictionary<string, string>();
857 
858 
862 
1165  public CreateDatasinkRequest( string name,
1166  string destination,
1167  IDictionary<string, string> options = null)
1168  {
1169  this.name = name ?? "";
1170  this.destination = destination ?? "";
1171  this.options = options ?? new Dictionary<string, string>();
1172  } // end constructor
1173 
1174  } // end class CreateDatasinkRequest
1175 
1176 
1177 
1182  {
1183 
1186  public string name { get; set; }
1187 
1189  public IDictionary<string, string> info { get; set; } = new Dictionary<string, string>();
1190 
1191  } // end class CreateDatasinkResponse
1192 
1193 
1194 
1195 
1196 } // end namespace kinetica
const string CONNECTION_TIMEOUT
Timeout in seconds for connecting to this data sink
const string S3_REGION
Name of the Amazon S3 region where the given bucket is located
A set of results returned by Kinetica.createDatasink(string,string,IDictionary{string, string}).
A set of parameters for Kinetica.createDatasink(string,string,IDictionary{string, string})...
const string JDBC_DRIVER_JAR_PATH
JDBC driver jar file location
const string GCS_PROJECT_ID
Name of the Google Cloud project to use as the data sink
const string S3_ENCRYPTION_CUSTOMER_KEY
Customer encryption key to encrypt or decrypt data
const string MAX_BATCH_SIZE
Maximum number of records per notification message.
const string AZURE_CONTAINER_NAME
Name of the Azure storage container to use as the data sink
const string S3_ENCRYPTION_TYPE
Server side encryption type
const string AZURE_TENANT_ID
Active Directory tenant ID (or directory ID)
const string CREDENTIAL
Name of the credential object to be used in this data sink
const string HDFS_DELEGATION_TOKEN
Delegation token for the given HDFS user
const string HDFS_KERBEROS_KEYTAB
Kerberos keytab file location for the given HDFS user.
const string S3_USE_VIRTUAL_ADDRESSING
When true (default), the requests URI should be specified in virtual-hosted-style format where the bu...
const string GCS_SERVICE_ACCOUNT_KEYS
Google Cloud service account keys to use for authenticating the data sink
const string S3_ENCRYPTION_CUSTOMER_ALGORITHM
Customer encryption algorithm used encrypting data
const string WAIT_TIMEOUT
Timeout in seconds for waiting for a response from this data sink
const string AZURE_OAUTH_TOKEN
Oauth token to access given storage container
const string MAX_MESSAGE_SIZE
Maximum size in bytes of each notification message.
const string JDBC_DRIVER_CLASS_NAME
Name of the JDBC driver class
IDictionary< string, string > info
Additional information.
const string GCS_BUCKET_NAME
Name of the Google Cloud Storage bucket to use as the data sink
string destination
Destination for the output data in format &#39;storage_provider_type://path[:port]&#39;.
const string USE_MANAGED_CREDENTIALS
When no credentials are supplied, we use anonymous access by default.
IDictionary< string, string > options
Optional parameters.
const string USE_HTTPS
Use https to connect to datasink if true, otherwise use http Supported values: TRUE FALSE The defau...
const string AZURE_SAS_TOKEN
Shared access signature token for Azure storage account to use as the data sink
CreateDatasinkRequest(string name, string destination, IDictionary< string, string > options=null)
Constructs a CreateDatasinkRequest object with the specified parameters.
const string JSON_FORMAT
The desired format of JSON encoded notifications message.
const string AZURE_STORAGE_ACCOUNT_NAME
Name of the Azure storage account to use as the data sink, this is valid only if tenant_id is specifi...
const string KAFKA_TOPIC_NAME
Name of the Kafka topic to publish to if destination is a Kafka broker
const string S3_VERIFY_SSL
Set to false for testing purposes or when necessary to bypass TLS errors (e.g.
const string S3_AWS_ROLE_ARN
Amazon IAM Role ARN which has required S3 permissions that can be assumed for the given S3 IAM user...
KineticaData - class to help with Avro Encoding for Kinetica
Definition: KineticaData.cs:14
const string S3_BUCKET_NAME
Name of the Amazon S3 bucket to use as the data sink
const string SKIP_VALIDATION
Bypass validation of connection to this data sink.
CreateDatasinkRequest()
Constructs a CreateDatasinkRequest object with default parameters.
const string HDFS_USE_KERBEROS
Use kerberos authentication for the given HDFS cluster Supported values: TRUE FALSE The default val...
string name
Name of the data sink to be created.