Kinetica   C#   API  Version 7.2.3.0
InsertRecordsFromPayload.cs
Go to the documentation of this file.
1 /*
2  * This file was autogenerated by the Kinetica schema processor.
3  *
4  * DO NOT EDIT DIRECTLY.
5  */
6 
7 using System.Collections.Generic;
8 
9 namespace kinetica
10 {
18  public class InsertRecordsFromPayloadRequest : KineticaData
19  {
26  public struct CreateTableOptions
27  {
32  public const string TYPE_ID = "type_id";
33 
50  public const string NO_ERROR_IF_EXISTS = "no_error_if_exists";
51 
52  public const string TRUE = "true";
53  public const string FALSE = "false";
54 
72  public const string IS_REPLICATED = "is_replicated";
73 
80  public const string FOREIGN_KEYS = "foreign_keys";
81 
85  public const string FOREIGN_SHARD_KEY = "foreign_shard_key";
86 
128  public const string PARTITION_TYPE = "partition_type";
129 
133  public const string RANGE = "RANGE";
134 
138  public const string INTERVAL = "INTERVAL";
139 
143  public const string LIST = "LIST";
144 
148  public const string HASH = "HASH";
149 
153  public const string SERIES = "SERIES";
154 
160  public const string PARTITION_KEYS = "partition_keys";
161 
178  public const string PARTITION_DEFINITIONS = "partition_definitions";
179 
196  public const string IS_AUTOMATIC_PARTITION = "is_automatic_partition";
197 
201  public const string TTL = "ttl";
202 
205  public const string CHUNK_SIZE = "chunk_size";
206 
209  public const string CHUNK_COLUMN_MAX_MEMORY = "chunk_column_max_memory";
210 
213  public const string CHUNK_MAX_MEMORY = "chunk_max_memory";
214 
231  public const string IS_RESULT_TABLE = "is_result_table";
232 
236  public const string STRATEGY_DEFINITION = "strategy_definition";
237 
242  public const string COMPRESSION_CODEC = "compression_codec";
243  } // end struct CreateTableOptions
244 
248  public struct Options
249  {
255  public const string BAD_RECORD_TABLE_NAME = "bad_record_table_name";
256 
260  public const string BAD_RECORD_TABLE_LIMIT = "bad_record_table_limit";
261 
268  public const string BAD_RECORD_TABLE_LIMIT_PER_INPUT = "bad_record_table_limit_per_input";
269 
272  public const string BATCH_SIZE = "batch_size";
273 
288  public const string COLUMN_FORMATS = "column_formats";
289 
316  public const string COLUMNS_TO_LOAD = "columns_to_load";
317 
323  public const string COLUMNS_TO_SKIP = "columns_to_skip";
324 
348  public const string COMPRESSION_TYPE = "compression_type";
349 
351  public const string NONE = "none";
352 
355  public const string AUTO = "auto";
356 
358  public const string GZIP = "gzip";
359 
361  public const string BZIP2 = "bzip2";
362 
392  public const string DEFAULT_COLUMN_FORMATS = "default_column_formats";
393 
422  public const string ERROR_HANDLING = "error_handling";
423 
427  public const string PERMISSIVE = "permissive";
428 
430  public const string IGNORE_BAD_RECORDS = "ignore_bad_records";
431 
436  public const string ABORT = "abort";
437 
474  public const string FILE_TYPE = "file_type";
475 
477  public const string AVRO = "avro";
478 
481  public const string DELIMITED_TEXT = "delimited_text";
482 
484  public const string GDB = "gdb";
485 
487  public const string JSON = "json";
488 
490  public const string PARQUET = "parquet";
491 
493  public const string SHAPEFILE = "shapefile";
494 
511  public const string FLATTEN_COLUMNS = "flatten_columns";
512 
515  public const string TRUE = "true";
516 
519  public const string FALSE = "false";
520 
524  public const string GDAL_CONFIGURATION_OPTIONS = "gdal_configuration_options";
525 
550  public const string IGNORE_EXISTING_PK = "ignore_existing_pk";
551 
580  public const string INGESTION_MODE = "ingestion_mode";
581 
584  public const string FULL = "full";
585 
590  public const string DRY_RUN = "dry_run";
591 
596  public const string TYPE_INFERENCE_ONLY = "type_inference_only";
597 
601  public const string LAYER = "layer";
602 
651  public const string LOADING_MODE = "loading_mode";
652 
656  public const string HEAD = "head";
657 
667  public const string DISTRIBUTED_SHARED = "distributed_shared";
668 
686  public const string DISTRIBUTED_LOCAL = "distributed_local";
687 
689  public const string LOCAL_TIME_OFFSET = "local_time_offset";
690 
696  public const string MAX_RECORDS_TO_LOAD = "max_records_to_load";
697 
702  public const string NUM_TASKS_PER_RANK = "num_tasks_per_rank";
703 
710  public const string POLL_INTERVAL = "poll_interval";
711 
715  public const string PRIMARY_KEYS = "primary_keys";
716 
719  public const string SCHEMA_REGISTRY_CONNECTION_RETRIES = "schema_registry_connection_retries";
720 
723  public const string SCHEMA_REGISTRY_CONNECTION_TIMEOUT = "schema_registry_connection_timeout";
724 
727  public const string SCHEMA_REGISTRY_MAX_CONSECUTIVE_CONNECTION_FAILURES = "schema_registry_max_consecutive_connection_failures";
728 
731  public const string MAX_CONSECUTIVE_INVALID_SCHEMA_FAILURE = "max_consecutive_invalid_schema_failure";
732 
735  public const string SCHEMA_REGISTRY_SCHEMA_NAME = "schema_registry_schema_name";
736 
740  public const string SHARD_KEYS = "shard_keys";
741 
744  public const string SKIP_LINES = "skip_lines";
745 
759  public const string SUBSCRIBE = "subscribe";
760 
775  public const string TABLE_INSERT_MODE = "table_insert_mode";
776 
777  public const string SINGLE = "single";
778  public const string TABLE_PER_FILE = "table_per_file";
779 
789  public const string TEXT_COMMENT_STRING = "text_comment_string";
790 
798  public const string TEXT_DELIMITER = "text_delimiter";
799 
814  public const string TEXT_ESCAPE_CHARACTER = "text_escape_character";
815 
829  public const string TEXT_HAS_HEADER = "text_has_header";
830 
841  public const string TEXT_HEADER_PROPERTY_DELIMITER = "text_header_property_delimiter";
842 
849  public const string TEXT_NULL_STRING = "text_null_string";
850 
863  public const string TEXT_QUOTE_CHARACTER = "text_quote_character";
864 
871  public const string TEXT_SEARCH_COLUMNS = "text_search_columns";
872 
876  public const string TEXT_SEARCH_MIN_COLUMN_LENGTH = "text_search_min_column_length";
877 
892  public const string TRUNCATE_STRINGS = "truncate_strings";
893 
908  public const string TRUNCATE_TABLE = "truncate_table";
909 
911  public const string TYPE_INFERENCE_MAX_RECORDS_READ = "type_inference_max_records_read";
912 
931  public const string TYPE_INFERENCE_MODE = "type_inference_mode";
932 
935  public const string ACCURACY = "accuracy";
936 
940  public const string SPEED = "speed";
941 
961  public const string UPDATE_ON_EXISTING_PK = "update_on_existing_pk";
962  } // end struct Options
963 
974  public string table_name { get; set; }
975 
977  public string data_text { get; set; }
978 
980  public byte[] data_bytes { get; set; }
981 
985  public IDictionary<string, IDictionary<string, string>> modify_columns { get; set; } = new Dictionary<string, IDictionary<string, string>>();
986 
1251  public IDictionary<string, string> create_table_options { get; set; } = new Dictionary<string, string>();
1252 
1998  public IDictionary<string, string> options { get; set; } = new Dictionary<string, string>();
1999 
2003 
3033  string data_text,
3034  byte[] data_bytes,
3035  IDictionary<string, IDictionary<string, string>> modify_columns = null,
3036  IDictionary<string, string> create_table_options = null,
3037  IDictionary<string, string> options = null)
3038  {
3039  this.table_name = table_name ?? "";
3040  this.data_text = data_text ?? "";
3041  this.data_bytes = data_bytes ?? new byte[] { };
3042  this.modify_columns = modify_columns ?? new Dictionary<string, IDictionary<string, string>>();
3043  this.create_table_options = create_table_options ?? new Dictionary<string, string>();
3044  this.options = options ?? new Dictionary<string, string>();
3045  } // end constructor
3046  } // end class InsertRecordsFromPayloadRequest
3047 
3051  public class InsertRecordsFromPayloadResponse : KineticaData
3052  {
3056  public string table_name { get; set; }
3057 
3061  public string type_id { get; set; }
3062 
3065  public string type_definition { get; set; }
3066 
3069  public string type_label { get; set; }
3070 
3073  public IDictionary<string, IList<string>> type_properties { get; set; } = new Dictionary<string, IList<string>>();
3074 
3077  public long count_inserted { get; set; }
3078 
3082  public long count_skipped { get; set; }
3083 
3086  public long count_updated { get; set; }
3087 
3089  public IDictionary<string, string> info { get; set; } = new Dictionary<string, string>();
3090  } // end class InsertRecordsFromPayloadResponse
3091 } // end namespace kinetica
long count_inserted
Number of records inserted into the target table.
const string CHUNK_SIZE
Indicates the number of records per chunk to be used for this table.
byte [] data_bytes
Records formatted as binary data
const string CHUNK_MAX_MEMORY
Indicates the target maximum data size for all columns in a chunk to be used for this table.
const string IS_AUTOMATIC_PARTITION
If TRUE, a new partition will be created for values which don't fall into an existing partition.
IDictionary< string, string > options
Optional parameters.
const string UPDATE_ON_EXISTING_PK
Specifies the record collision policy for inserting into a table with a primary key.
const string TEXT_QUOTE_CHARACTER
Specifies the character that should be interpreted as a field value quoting character in the source d...
long count_skipped
Number of records skipped, when not running in ABORT error handling mode.
const string PARTITION_DEFINITIONS
Comma-separated list of partition definitions, whose format depends on the choice of PARTITION_TYPE.
const string TYPE_INFERENCE_MODE
optimize type inference for:
const string BAD_RECORD_TABLE_NAME
Optional name of a table to which records that were rejected are written.
const string NO_ERROR_IF_EXISTS
If TRUE, prevents an error from occurring if the table already exists and is of the given type.
const string LOADING_MODE
Scheme for distributing the extraction and loading of data from the source data file(s).
const string SUBSCRIBE
Continuously poll the data source to check for new data and load it into the table.
const string TYPE_INFERENCE_MAX_RECORDS_READ
The default value is ''.
const string DISTRIBUTED_SHARED
The head node coordinates loading data by worker processes across all nodes from shared files availab...
const string PARTITION_KEYS
Comma-separated list of partition keys, which are the columns or column expressions by which records ...
const string SCHEMA_REGISTRY_CONNECTION_RETRIES
Confluent Schema registry connection timeout (in Secs)
const string COLUMN_FORMATS
For each target column specified, applies the column-property-bound format to the source data loaded ...
const string TRUNCATE_STRINGS
If set to TRUE, truncate string values that are longer than the column's type size.
const string PERMISSIVE
Records with missing columns are populated with nulls if possible; otherwise, the malformed records a...
string data_text
Records formatted as delimited text
const string TABLE_INSERT_MODE
Optional: table_insert_mode.
const string DEFAULT_COLUMN_FORMATS
Specifies the default format to be applied to source data loaded into columns with the corresponding ...
const string TEXT_COMMENT_STRING
Specifies the character string that should be interpreted as a comment line prefix in the source data...
const string GDAL_CONFIGURATION_OPTIONS
Comma separated list of gdal conf options, for the specific requests: key=value.
const string IS_REPLICATED
Affects the distribution scheme for the table's data.
const string SPEED
Scans data and picks the widest possible column types so that 'all' values will fit with minimum data...
const string TTL
Sets the TTL of the table specified in table_name.
const string MAX_RECORDS_TO_LOAD
Limit the number of records to load in this request: If this number is larger than a batch_size,...
const string FOREIGN_KEYS
Semicolon-separated list of foreign keys, of the format '(source_column_name [, .....
const string TEXT_NULL_STRING
Specifies the character string that should be interpreted as a null value in the source data.
const string FLATTEN_COLUMNS
Specifies how to handle nested columns.
IDictionary< string, string > info
Additional information.
const string STRATEGY_DEFINITION
The tier strategy for the table and its columns.
const string SHARD_KEYS
Optional: comma separated list of column names, to set as primary keys, when not specified in the typ...
const string TRUE
Upsert new records when primary keys match existing records
const string BAD_RECORD_TABLE_LIMIT_PER_INPUT
For subscriptions: A positive integer indicating the maximum number of records that can be written to...
const string TEXT_HEADER_PROPERTY_DELIMITER
Specifies the delimiter for column properties in the header row (if present).
const string IS_RESULT_TABLE
Indicates whether the table is a memory-only table.
const string PARQUET
Apache Parquet file format
const string BAD_RECORD_TABLE_LIMIT
A positive integer indicating the maximum number of records that can be written to the bad-record-tab...
const string ABORT
Stops current insertion and aborts entire operation when an error is encountered.
const string TRUNCATE_TABLE
If set to TRUE, truncates the table specified by table_name prior to loading the file(s).
const string BATCH_SIZE
Internal tuning parameter–number of records per batch when inserting data.
const string IGNORE_BAD_RECORDS
Malformed records are skipped.
string type_label
The user-defined description associated with the target table's structure
const string CHUNK_COLUMN_MAX_MEMORY
Indicates the target maximum data size for each column in a chunk to be used for this table.
const string COMPRESSION_CODEC
The default compression codec for this table's columns.
string type_definition
A JSON string describing the columns of the target table
const string DRY_RUN
Does not load data, but walks through the source data and determines the number of valid records,...
const string TEXT_SEARCH_COLUMNS
Add 'text_search' property to internally inferenced string columns.
const string POLL_INTERVAL
If TRUE, the number of seconds between attempts to load external files into the table.
IDictionary< string, string > create_table_options
Options used when creating the target table.
const string DISTRIBUTED_LOCAL
A single worker process on each node loads all files that are available to it.
const string PRIMARY_KEYS
Optional: comma separated list of column names, to set as primary keys, when not specified in the typ...
InsertRecordsFromPayloadRequest()
Constructs an InsertRecordsFromPayloadRequest object with default parameters.
const string INGESTION_MODE
Whether to do a full load, dry run, or perform a type inference on the source data.
const string HEAD
The head node loads all data.
IDictionary< string, IList< string > > type_properties
A mapping of each target table column name to an array of column properties associated with that colu...
const string DELIMITED_TEXT
Delimited text file format; e.g., CSV, TSV, PSV, etc.
const string TEXT_SEARCH_MIN_COLUMN_LENGTH
Set minimum column size.
const string FALSE
Reject new records when primary keys match existing records
const string COMPRESSION_TYPE
Optional: payload compression type.
InsertRecordsFromPayloadRequest(string table_name, string data_text, byte[] data_bytes, IDictionary< string, IDictionary< string, string >> modify_columns=null, IDictionary< string, string > create_table_options=null, IDictionary< string, string > options=null)
Constructs an InsertRecordsFromPayloadRequest object with the specified parameters.
const string TYPE_INFERENCE_ONLY
Infer the type of the source data and return, without ingesting any data.
const string FULL
Run a type inference on the source data (if needed) and ingest
const string TEXT_HAS_HEADER
Indicates whether the source data contains a header row.
const string FILE_TYPE
Specifies the type of the file(s) whose records will be inserted.
const string ACCURACY
Scans data to get exactly-typed & sized columns for all data scanned.
const string COLUMNS_TO_SKIP
Specifies a comma-delimited list of columns from the source data to skip.
const string SCHEMA_REGISTRY_CONNECTION_TIMEOUT
Confluent Schema registry connection timeout (in Secs)
const string FOREIGN_SHARD_KEY
Foreign shard key of the format 'source_column references shard_by_column from target_table(primary_k...
const string IGNORE_EXISTING_PK
Specifies the record collision error-suppression policy for inserting into a table with a primary key...
const string ERROR_HANDLING
Specifies how errors should be handled upon insertion.
const string TEXT_ESCAPE_CHARACTER
Specifies the character that is used to escape other characters in the source data.
string table_name
Name of the table into which the data will be inserted, in [schema_name.
const string LAYER
Optional: geo files layer(s) name(s): comma separated.
long count_updated
[Not yet implemented] Number of records updated within the target table.
const string TYPE_ID
ID of a currently registered type.
const string SCHEMA_REGISTRY_SCHEMA_NAME
Name of the Avro schema in the schema registry to use when reading Avro records.
const string LOCAL_TIME_OFFSET
For Avro local timestamp columns
const string MAX_CONSECUTIVE_INVALID_SCHEMA_FAILURE
Max records to skip due to schema related errors, before failing
const string TEXT_DELIMITER
Specifies the character delimiting field values in the source data and field names in the header (if ...
const string SKIP_LINES
Skip a number of lines from the beginning of the file.
const string SCHEMA_REGISTRY_MAX_CONSECUTIVE_CONNECTION_FAILURES
Max records to skip due to SR connection failures, before failing
string type_id
ID of the currently registered table structure type for the target table
const string NUM_TASKS_PER_RANK
Optional: number of tasks for reading file per rank.
const string COLUMNS_TO_LOAD
Specifies a comma-delimited list of columns from the source data to load.
IDictionary< string, IDictionary< string, string > > modify_columns
Not implemented yet.