7 using System.Collections.Generic;
355 public const string TRUE =
"true";
356 public const string FALSE =
"false";
580 public string name {
get; set; }
908 public IDictionary<string, string>
options {
get; set; } =
new Dictionary<string, string>();
1243 IDictionary<string, string>
options = null)
1245 this.name = name ??
"";
1246 this.location = location ??
"";
1247 this.user_name = user_name ??
"";
1248 this.password = password ??
"";
1249 this.options =
options ??
new Dictionary<string, string>();
1267 public IDictionary<string, string>
info {
get; set; } =
new Dictionary<string, string>();
const string AZURE_CONTAINER_NAME
Name of the Azure storage container to use as the data source
const string AZURE_SAS_TOKEN
Shared access signature token for Azure storage account to use as the data source
const string JDBC_DRIVER_CLASS_NAME
Name of the JDBC driver class
const string ANONYMOUS
Use anonymous connection to storage provider–DEPRECATED: this is now the default. ...
string name
Name of the data source to be created.
string user_name
Name of the remote system user; may be an empty string
IDictionary< string, string > info
Additional information.
const string S3_AWS_ROLE_ARN
Amazon IAM Role ARN which has required S3 permissions that can be assumed for the given S3 IAM user...
string location
Location of the remote storage in 'storage_provider_type://[storage_path[:storage_port]]' format...
A set of results returned by Kinetica.createDatasource(string,string,string,string,IDictionary{string, string}).
const string S3_USE_VIRTUAL_ADDRESSING
Whether to use virtual addressing when referencing the Amazon S3 source Supported values: TRUE: The r...
A set of parameters for Kinetica.createDatasource(string,string,string,string,IDictionary{string, string}).
string password
Password for the remote system user; may be an empty string
const string S3_VERIFY_SSL
Set to false for testing purposes or when necessary to bypass TLS errors (e.g.
const string AZURE_TENANT_ID
Active Directory tenant ID (or directory ID)
CreateDatasourceRequest()
Constructs a CreateDatasourceRequest object with default parameters.
const string CREDENTIAL
Name of the credential object to be used in data source
const string SCHEMA_REGISTRY_CREDENTIAL
Confluent Schema Registry credential object name.
CreateDatasourceRequest(string name, string location, string user_name, string password, IDictionary< string, string > options=null)
Constructs a CreateDatasourceRequest object with the specified parameters.
const string HDFS_KERBEROS_KEYTAB
Kerberos keytab file location for the given HDFS user.
const string AZURE_OAUTH_TOKEN
OAuth token to access given storage container
const string GCS_PROJECT_ID
Name of the Google Cloud project to use as the data source
const string HDFS_DELEGATION_TOKEN
Delegation token for the given HDFS user
const string USE_HTTPS
Use https to connect to datasource if true, otherwise use http Supported values: TRUE FALSE The def...
const string SCHEMA_REGISTRY_LOCATION
Location of Confluent Schema Registry in '[storage_path[:storage_port]]' format.
const string S3_BUCKET_NAME
Name of the Amazon S3 bucket to use as the data source
const string S3_ENCRYPTION_CUSTOMER_ALGORITHM
Customer encryption algorithm used encrypting data
const string KAFKA_TOPIC_NAME
Name of the Kafka topic to use as the data source
const string WAIT_TIMEOUT
Timeout in seconds for reading from this storage provider
const string SCHEMA_REGISTRY_PORT
Confluent Schema Registry port (optional).
const string GCS_SERVICE_ACCOUNT_KEYS
Google Cloud service account keys to use for authenticating the data source
const string CONNECTION_TIMEOUT
Timeout in seconds for connecting to this storage provider
const string GCS_BUCKET_NAME
Name of the Google Cloud Storage bucket to use as the data source
const string S3_ENCRYPTION_CUSTOMER_KEY
Customer encryption key to encrypt or decrypt data
const string AZURE_STORAGE_ACCOUNT_NAME
Name of the Azure storage account to use as the data source, this is valid only if tenant_id is speci...
const string S3_REGION
Name of the Amazon S3 region where the given bucket is located
const string SKIP_VALIDATION
Bypass validation of connection to remote source.
const string HDFS_USE_KERBEROS
Use kerberos authentication for the given HDFS cluster Supported values: TRUE FALSE The default val...
KineticaData - class to help with Avro Encoding for Kinetica
const string USE_MANAGED_CREDENTIALS
When no credentials are supplied, we use anonymous access by default.
const string IS_STREAM
To load from Azure/GCS/S3 as a stream continuously.
const string JDBC_DRIVER_JAR_PATH
JDBC driver jar file location.
IDictionary< string, string > options
Optional parameters.