Kinetica C# API  Version 7.1.10.0
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Properties Pages
InsertRecordsFromPayload.cs
Go to the documentation of this file.
1 /*
2  * This file was autogenerated by the Kinetica schema processor.
3  *
4  * DO NOT EDIT DIRECTLY.
5  */
6 
7 using System.Collections.Generic;
8 
9 
10 
11 namespace kinetica
12 {
13 
25  {
26 
248  public struct CreateTableOptions
249  {
250 
254  public const string TYPE_ID = "type_id";
255 
273  public const string NO_ERROR_IF_EXISTS = "no_error_if_exists";
274  public const string TRUE = "true";
275  public const string FALSE = "false";
276 
305  public const string IS_REPLICATED = "is_replicated";
306 
313  public const string FOREIGN_KEYS = "foreign_keys";
314 
318  public const string FOREIGN_SHARD_KEY = "foreign_shard_key";
319 
360  public const string PARTITION_TYPE = "partition_type";
361 
365  public const string RANGE = "RANGE";
366 
370  public const string INTERVAL = "INTERVAL";
371 
375  public const string LIST = "LIST";
376 
380  public const string HASH = "HASH";
381 
385  public const string SERIES = "SERIES";
386 
391  public const string PARTITION_KEYS = "partition_keys";
392 
406  public const string PARTITION_DEFINITIONS = "partition_definitions";
407 
426  public const string IS_AUTOMATIC_PARTITION = "is_automatic_partition";
427 
431  public const string TTL = "ttl";
432 
435  public const string CHUNK_SIZE = "chunk_size";
436 
459  public const string IS_RESULT_TABLE = "is_result_table";
460 
464  public const string STRATEGY_DEFINITION = "strategy_definition";
465  } // end struct CreateTableOptions
466 
467 
1208  public struct Options
1209  {
1210 
1213  public const string AVRO_HEADER_BYTES = "avro_header_bytes";
1214 
1217  public const string AVRO_NUM_RECORDS = "avro_num_records";
1218 
1222  public const string AVRO_SCHEMA = "avro_schema";
1223 
1238  public const string AVRO_SCHEMALESS = "avro_schemaless";
1239 
1242  public const string TRUE = "true";
1243 
1246  public const string FALSE = "false";
1247 
1252  public const string BAD_RECORD_TABLE_NAME = "bad_record_table_name";
1253 
1257  public const string BAD_RECORD_TABLE_LIMIT = "bad_record_table_limit";
1258 
1264  public const string BAD_RECORD_TABLE_LIMIT_PER_INPUT = "bad_record_table_limit_per_input";
1265 
1268  public const string BATCH_SIZE = "batch_size";
1269 
1285  public const string COLUMN_FORMATS = "column_formats";
1286 
1319  public const string COLUMNS_TO_LOAD = "columns_to_load";
1320 
1325  public const string COLUMNS_TO_SKIP = "columns_to_skip";
1326 
1354  public const string COMPRESSION_TYPE = "compression_type";
1355 
1357  public const string NONE = "none";
1358 
1360  public const string AUTO = "auto";
1361 
1363  public const string GZIP = "gzip";
1364 
1366  public const string BZIP2 = "bzip2";
1367 
1401  public const string DEFAULT_COLUMN_FORMATS = "default_column_formats";
1402 
1429  public const string ERROR_HANDLING = "error_handling";
1430 
1434  public const string PERMISSIVE = "permissive";
1435 
1437  public const string IGNORE_BAD_RECORDS = "ignore_bad_records";
1438 
1442  public const string ABORT = "abort";
1443 
1482  public const string FILE_TYPE = "file_type";
1483 
1485  public const string AVRO = "avro";
1486 
1489  public const string DELIMITED_TEXT = "delimited_text";
1490 
1492  public const string GDB = "gdb";
1493 
1495  public const string JSON = "json";
1496 
1498  public const string PARQUET = "parquet";
1499 
1501  public const string SHAPEFILE = "shapefile";
1502 
1506  public const string GDAL_CONFIGURATION_OPTIONS = "gdal_configuration_options";
1507 
1545  public const string IGNORE_EXISTING_PK = "ignore_existing_pk";
1546 
1575  public const string INGESTION_MODE = "ingestion_mode";
1576 
1579  public const string FULL = "full";
1580 
1584  public const string DRY_RUN = "dry_run";
1585 
1589  public const string TYPE_INFERENCE_ONLY = "type_inference_only";
1590 
1593  public const string LAYER = "layer";
1594 
1652  public const string LOADING_MODE = "loading_mode";
1653 
1656  public const string HEAD = "head";
1657 
1669  public const string DISTRIBUTED_SHARED = "distributed_shared";
1670 
1693  public const string DISTRIBUTED_LOCAL = "distributed_local";
1694 
1696  public const string LOCAL_TIME_OFFSET = "local_time_offset";
1697 
1703  public const string MAX_RECORDS_TO_LOAD = "max_records_to_load";
1704 
1707  public const string NUM_TASKS_PER_RANK = "num_tasks_per_rank";
1708 
1714  public const string POLL_INTERVAL = "poll_interval";
1715 
1719  public const string PRIMARY_KEYS = "primary_keys";
1720  public const string SCHEMA_REGISTRY_SCHEMA_ID = "schema_registry_schema_id";
1721  public const string SCHEMA_REGISTRY_SCHEMA_NAME = "schema_registry_schema_name";
1722  public const string SCHEMA_REGISTRY_SCHEMA_VERSION = "schema_registry_schema_version";
1723 
1727  public const string SHARD_KEYS = "shard_keys";
1728 
1730  public const string SKIP_LINES = "skip_lines";
1731 
1747  public const string SUBSCRIBE = "subscribe";
1748 
1765  public const string TABLE_INSERT_MODE = "table_insert_mode";
1766  public const string SINGLE = "single";
1767  public const string TABLE_PER_FILE = "table_per_file";
1768 
1776  public const string TEXT_COMMENT_STRING = "text_comment_string";
1777 
1784  public const string TEXT_DELIMITER = "text_delimiter";
1785 
1804  public const string TEXT_ESCAPE_CHARACTER = "text_escape_character";
1805 
1823  public const string TEXT_HAS_HEADER = "text_has_header";
1824 
1833  public const string TEXT_HEADER_PROPERTY_DELIMITER = "text_header_property_delimiter";
1834 
1841  public const string TEXT_NULL_STRING = "text_null_string";
1842 
1856  public const string TEXT_QUOTE_CHARACTER = "text_quote_character";
1857 
1863  public const string TEXT_SEARCH_COLUMNS = "text_search_columns";
1864 
1867  public const string TEXT_SEARCH_MIN_COLUMN_LENGTH = "text_search_min_column_length";
1868 
1884  public const string TRUNCATE_STRINGS = "truncate_strings";
1885 
1901  public const string TRUNCATE_TABLE = "truncate_table";
1902 
1922  public const string TYPE_INFERENCE_MODE = "type_inference_mode";
1923 
1926  public const string ACCURACY = "accuracy";
1927 
1931  public const string SPEED = "speed";
1932 
1965  public const string UPDATE_ON_EXISTING_PK = "update_on_existing_pk";
1966  } // end struct Options
1967 
1968 
1979  public string table_name { get; set; }
1980 
1982  public string data_text { get; set; }
1983 
1985  public byte[] data_bytes { get; set; }
1986 
1989  public IDictionary<string, IDictionary<string, string>> modify_columns { get; set; } = new Dictionary<string, IDictionary<string, string>>();
1990 
2210  public IDictionary<string, string> create_table_options { get; set; } = new Dictionary<string, string>();
2211 
2950  public IDictionary<string, string> options { get; set; } = new Dictionary<string, string>();
2951 
2952 
2956 
3916  string data_text,
3917  byte[] data_bytes,
3918  IDictionary<string, IDictionary<string, string>> modify_columns = null,
3919  IDictionary<string, string> create_table_options = null,
3920  IDictionary<string, string> options = null)
3921  {
3922  this.table_name = table_name ?? "";
3923  this.data_text = data_text ?? "";
3924  this.data_bytes = data_bytes ?? new byte[] { };
3925  this.modify_columns = modify_columns ?? new Dictionary<string, IDictionary<string, string>>();
3926  this.create_table_options = create_table_options ?? new Dictionary<string, string>();
3927  this.options = options ?? new Dictionary<string, string>();
3928  } // end constructor
3929 
3930  } // end class InsertRecordsFromPayloadRequest
3931 
3932 
3933 
3938  {
3939 
3942  public string table_name { get; set; }
3943 
3947  public string type_id { get; set; }
3948 
3951  public string type_definition { get; set; }
3952 
3955  public string type_label { get; set; }
3956 
3959  public IDictionary<string, IList<string>> type_properties { get; set; } = new Dictionary<string, IList<string>>();
3960 
3963  public long count_inserted { get; set; }
3964 
3967  public long count_skipped { get; set; }
3968 
3971  public long count_updated { get; set; }
3972 
3974  public IDictionary<string, string> info { get; set; } = new Dictionary<string, string>();
3975 
3976  } // end class InsertRecordsFromPayloadResponse
3977 
3978 
3979 
3980 
3981 } // end namespace kinetica
const string TEXT_ESCAPE_CHARACTER
Specifies the character that is used to escape other characters in the source data.
const string PARTITION_KEYS
Comma-separated list of partition keys, which are the columns or column expressions by which records ...
const string IS_REPLICATED
Affects the distribution scheme for the table&#39;s data.
const string DRY_RUN
Does not load data, but walks through the source data and determines the number of valid records...
const string IS_RESULT_TABLE
Indicates whether the table is a memory-only table.
const string TEXT_SEARCH_COLUMNS
Add &#39;text_search&#39; property to internally inferenced string columns.
string type_definition
A JSON string describing the columns of the target table
const string SPEED
Scans data and picks the widest possible column types so that &#39;all&#39; values will fit with minimum data...
const string AVRO_SCHEMA
Optional string representing avro schema, for insert records in avro format, that does not include is...
byte[] data_bytes
Records formatted as binary data
const string FOREIGN_KEYS
Semicolon-separated list of foreign keys, of the format &#39;(source_column_name [, ...]) references target_table_name(primary_key_column_name [, ...]) [as foreign_key_name]&#39;.
const string FILE_TYPE
Specifies the type of the file(s) whose records will be inserted.
const string TRUNCATE_STRINGS
If set to true, truncate string values that are longer than the column&#39;s type size.
const string DEFAULT_COLUMN_FORMATS
Specifies the default format to be applied to source data loaded into columns with the corresponding ...
const string TABLE_INSERT_MODE
Optional: table_insert_mode.
const string TYPE_ID
ID of a currently registered type.
const string IGNORE_BAD_RECORDS
Malformed records are skipped.
const string PARTITION_DEFINITIONS
Comma-separated list of partition definitions, whose format depends on the choice of partition_type...
const string BAD_RECORD_TABLE_NAME
Optional name of a table to which records that were rejected are written.
const string MAX_RECORDS_TO_LOAD
Limit the number of records to load in this request: If this number is larger than a batch_size...
string type_id
ID of the currently registered table structure type for the target table
const string TEXT_QUOTE_CHARACTER
Specifies the character that should be interpreted as a field value quoting character in the source d...
const string SUBSCRIBE
Continuously poll the data source to check for new data and load it into the table.
const string COMPRESSION_TYPE
Optional: payload compression type Supported values: NONE: Uncompressed AUTO: Default.
const string TEXT_HAS_HEADER
Indicates whether the source data contains a header row.
const string ERROR_HANDLING
Specifies how errors should be handled upon insertion.
const string LAYER
Optional: geo files layer(s) name(s): comma separated.
long count_updated
[Not yet implemented] Number of records updated within the target table.
const string CHUNK_SIZE
Indicates the number of records per chunk to be used for this table.
long count_skipped
Number of records skipped, when not running in abort error handling mode.
IDictionary< string, IList< string > > type_properties
A mapping of each target table column name to an array of column properties associated with that colu...
const string TEXT_COMMENT_STRING
Specifies the character string that should be interpreted as a comment line prefix in the source data...
const string PRIMARY_KEYS
Optional: comma separated list of column names, to set as primary keys, when not specified in the typ...
const string POLL_INTERVAL
If true, the number of seconds between attempts to load external files into the table.
const string BAD_RECORD_TABLE_LIMIT_PER_INPUT
For subscriptions: A positive integer indicating the maximum number of records that can be written to...
const string PERMISSIVE
Records with missing columns are populated with nulls if possible; otherwise, the malformed records a...
const string TEXT_SEARCH_MIN_COLUMN_LENGTH
Set minimum column size.
const string DISTRIBUTED_LOCAL
A single worker process on each node loads all files that are available to it.
const string AVRO_HEADER_BYTES
Optional number of bytes to skip when reading an avro record.
const string HEAD
The head node loads all data.
const string AVRO_SCHEMALESS
When user provides &#39;avro_schema&#39;, avro data is assumed to be schemaless, unless specified.
const string COLUMNS_TO_SKIP
Specifies a comma-delimited list of columns from the source data to skip.
string type_label
The user-defined description associated with the target table&#39;s structure
string data_text
Records formatted as delimited text
string table_name
Name of the table into which the data will be inserted, in [schema_name.
const string TYPE_INFERENCE_ONLY
Infer the type of the source data and return, without ingesting any data.
const string TYPE_INFERENCE_MODE
optimize type inference for: Supported values: ACCURACY: Scans data to get exactly-typed &amp; sized colu...
const string NUM_TASKS_PER_RANK
Optional: number of tasks for reading file per rank.
const string TRUNCATE_TABLE
If set to true, truncates the table specified by table_name prior to loading the file(s).
const string TRUE
Upsert new records when primary keys match existing records
IDictionary< string, string > info
Additional information.
const string IS_AUTOMATIC_PARTITION
If true, a new partition will be created for values which don&#39;t fall into an existing partition...
const string UPDATE_ON_EXISTING_PK
Specifies the record collision policy for inserting into a table with a primary key.
const string TEXT_DELIMITER
Specifies the character delimiting field values in the source data and field names in the header (if ...
const string AVRO_NUM_RECORDS
Optional number of avro records, if data includes only records.
const string ABORT
Stops current insertion and aborts entire operation when an error is encountered. ...
const string LOADING_MODE
Scheme for distributing the extraction and loading of data from the source data file(s).
const string DELIMITED_TEXT
Delimited text file format; e.g., CSV, TSV, PSV, etc.
const string FOREIGN_SHARD_KEY
Foreign shard key of the format &#39;source_column references shard_by_column from target_table(primary_k...
long count_inserted
Number of records inserted into the target table.
const string SHARD_KEYS
Optional: comma separated list of column names, to set as primary keys, when not specified in the typ...
const string AUTO
Default. Auto detect compression type
const string TEXT_NULL_STRING
Specifies the character string that should be interpreted as a null value in the source data...
const string TEXT_HEADER_PROPERTY_DELIMITER
Specifies the delimiter for column properties in the header row (if present).
InsertRecordsFromPayloadRequest()
Constructs an InsertRecordsFromPayloadRequest object with default parameters.
A set of results returned by Kinetica.insertRecordsFromPayload(string,string,byte[],IDictionary{string, IDictionary{string, string}},IDictionary{string, string},IDictionary{string, string}).
const string NO_ERROR_IF_EXISTS
If true, prevents an error from occurring if the table already exists and is of the given type...
const string ACCURACY
Scans data to get exactly-typed &amp; sized columns for all data scanned.
const string BAD_RECORD_TABLE_LIMIT
A positive integer indicating the maximum number of records that can be written to the bad-record-tab...
const string COLUMNS_TO_LOAD
Specifies a comma-delimited list of columns from the source data to load.
const string COLUMN_FORMATS
For each target column specified, applies the column-property-bound format to the source data loaded ...
const string STRATEGY_DEFINITION
The tier strategy for the table and its columns.
InsertRecordsFromPayloadRequest(string table_name, string data_text, byte[] data_bytes, IDictionary< string, IDictionary< string, string >> modify_columns=null, IDictionary< string, string > create_table_options=null, IDictionary< string, string > options=null)
Constructs an InsertRecordsFromPayloadRequest object with the specified parameters.
IDictionary< string, string > create_table_options
Options used when creating the target table.
IDictionary< string, string > options
Optional parameters.
KineticaData - class to help with Avro Encoding for Kinetica
Definition: KineticaData.cs:14
const string BATCH_SIZE
Internal tuning parameter–number of records per batch when inserting data.
const string IGNORE_EXISTING_PK
Specifies the record collision error-suppression policy for inserting into a table with a primary key...
const string INGESTION_MODE
Whether to do a full load, dry run, or perform a type inference on the source data.
const string TTL
Sets the TTL of the table specified in table_name.
const string FULL
Run a type inference on the source data (if needed) and ingest
const string GDAL_CONFIGURATION_OPTIONS
Comma separated list of gdal conf options, for the specific requets: key=value.
const string SKIP_LINES
Skip number of lines from begining of file.
A set of parameters for Kinetica.insertRecordsFromPayload(string,string,byte[],IDictionary{string, IDictionary{string, string}},IDictionary{string, string},IDictionary{string, string}).
const string DISTRIBUTED_SHARED
The head node coordinates loading data by worker processes across all nodes from shared files availab...
const string FALSE
Reject new records when primary keys match existing records
const string LOCAL_TIME_OFFSET
For Avro local timestamp columns
IDictionary< string, IDictionary< string, string > > modify_columns
Not implemented yet.