Kinetica C# API  Version 7.1.10.0
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Properties Pages
CreateDatasource.cs
Go to the documentation of this file.
1 /*
2  * This file was autogenerated by the Kinetica schema processor.
3  *
4  * DO NOT EDIT DIRECTLY.
5  */
6 
7 using System.Collections.Generic;
8 
9 
10 
11 namespace kinetica
12 {
13 
23  {
24 
337  public struct Options
338  {
339 
354  public const string SKIP_VALIDATION = "skip_validation";
355  public const string TRUE = "true";
356  public const string FALSE = "false";
357 
360  public const string CONNECTION_TIMEOUT = "connection_timeout";
361 
364  public const string WAIT_TIMEOUT = "wait_timeout";
365 
369  public const string CREDENTIAL = "credential";
370 
373  public const string S3_BUCKET_NAME = "s3_bucket_name";
374 
377  public const string S3_REGION = "s3_region";
378 
395  public const string S3_VERIFY_SSL = "s3_verify_ssl";
396 
417  public const string S3_USE_VIRTUAL_ADDRESSING = "s3_use_virtual_addressing";
418 
421  public const string S3_AWS_ROLE_ARN = "s3_aws_role_arn";
422 
425  public const string S3_ENCRYPTION_CUSTOMER_ALGORITHM = "s3_encryption_customer_algorithm";
426 
429  public const string S3_ENCRYPTION_CUSTOMER_KEY = "s3_encryption_customer_key";
430 
433  public const string HDFS_KERBEROS_KEYTAB = "hdfs_kerberos_keytab";
434 
436  public const string HDFS_DELEGATION_TOKEN = "hdfs_delegation_token";
437 
452  public const string HDFS_USE_KERBEROS = "hdfs_use_kerberos";
453 
456  public const string AZURE_STORAGE_ACCOUNT_NAME = "azure_storage_account_name";
457 
460  public const string AZURE_CONTAINER_NAME = "azure_container_name";
461 
463  public const string AZURE_TENANT_ID = "azure_tenant_id";
464 
467  public const string AZURE_SAS_TOKEN = "azure_sas_token";
468 
471  public const string AZURE_OAUTH_TOKEN = "azure_oauth_token";
472 
475  public const string GCS_BUCKET_NAME = "gcs_bucket_name";
476 
479  public const string GCS_PROJECT_ID = "gcs_project_id";
480 
483  public const string GCS_SERVICE_ACCOUNT_KEYS = "gcs_service_account_keys";
484 
499  public const string IS_STREAM = "is_stream";
500 
503  public const string KAFKA_TOPIC_NAME = "kafka_topic_name";
504 
507  public const string JDBC_DRIVER_JAR_PATH = "jdbc_driver_jar_path";
508 
510  public const string JDBC_DRIVER_CLASS_NAME = "jdbc_driver_class_name";
511 
528  public const string ANONYMOUS = "anonymous";
529 
546  public const string USE_MANAGED_CREDENTIALS = "use_managed_credentials";
547 
563  public const string USE_HTTPS = "use_https";
564 
567  public const string SCHEMA_REGISTRY_LOCATION = "schema_registry_location";
568 
572  public const string SCHEMA_REGISTRY_CREDENTIAL = "schema_registry_credential";
573 
575  public const string SCHEMA_REGISTRY_PORT = "schema_registry_port";
576  } // end struct Options
577 
578 
580  public string name { get; set; }
581 
588  public string location { get; set; }
589 
592  public string user_name { get; set; }
593 
596  public string password { get; set; }
597 
908  public IDictionary<string, string> options { get; set; } = new Dictionary<string, string>();
909 
910 
914 
1240  string location,
1241  string user_name,
1242  string password,
1243  IDictionary<string, string> options = null)
1244  {
1245  this.name = name ?? "";
1246  this.location = location ?? "";
1247  this.user_name = user_name ?? "";
1248  this.password = password ?? "";
1249  this.options = options ?? new Dictionary<string, string>();
1250  } // end constructor
1251 
1252  } // end class CreateDatasourceRequest
1253 
1254 
1255 
1260  {
1261 
1264  public string name { get; set; }
1265 
1267  public IDictionary<string, string> info { get; set; } = new Dictionary<string, string>();
1268 
1269  } // end class CreateDatasourceResponse
1270 
1271 
1272 
1273 
1274 } // end namespace kinetica
const string AZURE_CONTAINER_NAME
Name of the Azure storage container to use as the data source
const string AZURE_SAS_TOKEN
Shared access signature token for Azure storage account to use as the data source
const string JDBC_DRIVER_CLASS_NAME
Name of the JDBC driver class
const string ANONYMOUS
Use anonymous connection to storage provider–DEPRECATED: this is now the default. ...
string name
Name of the data source to be created.
string user_name
Name of the remote system user; may be an empty string
IDictionary< string, string > info
Additional information.
const string S3_AWS_ROLE_ARN
Amazon IAM Role ARN which has required S3 permissions that can be assumed for the given S3 IAM user...
string location
Location of the remote storage in &#39;storage_provider_type://[storage_path[:storage_port]]&#39; format...
A set of results returned by Kinetica.createDatasource(string,string,string,string,IDictionary{string, string}).
const string S3_USE_VIRTUAL_ADDRESSING
Whether to use virtual addressing when referencing the Amazon S3 source Supported values: TRUE: The r...
A set of parameters for Kinetica.createDatasource(string,string,string,string,IDictionary{string, string}).
string password
Password for the remote system user; may be an empty string
const string S3_VERIFY_SSL
Set to false for testing purposes or when necessary to bypass TLS errors (e.g.
const string AZURE_TENANT_ID
Active Directory tenant ID (or directory ID)
CreateDatasourceRequest()
Constructs a CreateDatasourceRequest object with default parameters.
const string CREDENTIAL
Name of the credential object to be used in data source
const string SCHEMA_REGISTRY_CREDENTIAL
Confluent Schema Registry credential object name.
CreateDatasourceRequest(string name, string location, string user_name, string password, IDictionary< string, string > options=null)
Constructs a CreateDatasourceRequest object with the specified parameters.
const string HDFS_KERBEROS_KEYTAB
Kerberos keytab file location for the given HDFS user.
const string AZURE_OAUTH_TOKEN
OAuth token to access given storage container
const string GCS_PROJECT_ID
Name of the Google Cloud project to use as the data source
const string HDFS_DELEGATION_TOKEN
Delegation token for the given HDFS user
const string USE_HTTPS
Use https to connect to datasource if true, otherwise use http Supported values: TRUE FALSE The def...
const string SCHEMA_REGISTRY_LOCATION
Location of Confluent Schema Registry in &#39;[storage_path[:storage_port]]&#39; format.
const string S3_BUCKET_NAME
Name of the Amazon S3 bucket to use as the data source
const string S3_ENCRYPTION_CUSTOMER_ALGORITHM
Customer encryption algorithm used encrypting data
const string KAFKA_TOPIC_NAME
Name of the Kafka topic to use as the data source
const string WAIT_TIMEOUT
Timeout in seconds for reading from this storage provider
const string SCHEMA_REGISTRY_PORT
Confluent Schema Registry port (optional).
const string GCS_SERVICE_ACCOUNT_KEYS
Google Cloud service account keys to use for authenticating the data source
const string CONNECTION_TIMEOUT
Timeout in seconds for connecting to this storage provider
const string GCS_BUCKET_NAME
Name of the Google Cloud Storage bucket to use as the data source
const string S3_ENCRYPTION_CUSTOMER_KEY
Customer encryption key to encrypt or decrypt data
const string AZURE_STORAGE_ACCOUNT_NAME
Name of the Azure storage account to use as the data source, this is valid only if tenant_id is speci...
const string S3_REGION
Name of the Amazon S3 region where the given bucket is located
const string SKIP_VALIDATION
Bypass validation of connection to remote source.
const string HDFS_USE_KERBEROS
Use kerberos authentication for the given HDFS cluster Supported values: TRUE FALSE The default val...
KineticaData - class to help with Avro Encoding for Kinetica
Definition: KineticaData.cs:14
const string USE_MANAGED_CREDENTIALS
When no credentials are supplied, we use anonymous access by default.
const string IS_STREAM
To load from Azure/GCS/S3 as a stream continuously.
const string JDBC_DRIVER_JAR_PATH
JDBC driver jar file location.
IDictionary< string, string > options
Optional parameters.