bi_etl.bulk_loaders.redshift_s3_csv_loader module

class bi_etl.bulk_loaders.redshift_s3_csv_loader.RedShiftS3CSVBulk(config: S3_Bulk_Loader_Config, has_header: bool = True, s3_file_delimiter: str = '|', null_value: str = '')[source]

Bases: RedShiftS3Base

__init__(config: S3_Bulk_Loader_Config, has_header: bool = True, s3_file_delimiter: str = '|', null_value: str = '')[source]
apply_updates(table_object: Table, update_rows: Sequence[Row])

NOT TESTED !

clean_s3_folder(s3_full_folder)
get_copy_sql(s3_source_path: str, table_to_load: str, file_compression: str = '', analyze_compression: str = None, options: str = '')[source]
load_from_files(local_files: List[str | Path], table_object: Table, table_to_load: str = None, perform_rename: bool = False, file_compression: str = '', options: str = '', analyze_compression: str = None) int
load_from_iterable(iterable: Iterable, table_object: Table, table_to_load: str = None, perform_rename: bool = False, progress_frequency: int = 10, analyze_compression: str = None, parent_task: ETLTask | None = None) int[source]
load_from_iterable_partition_fixed(iterable: Iterable, table_object: Table, table_to_load: str = None, perform_rename: bool = False, progress_frequency: int = 10, analyze_compression: str = None, parent_task: ETLTask | None = None) int[source]
load_from_iterable_partition_max_rows(iterable: Iterable, table_object: Table, table_to_load: str = None, perform_rename: bool = False, progress_frequency: int = 10, analyze_compression: str = None, parent_task: ETLTask | None = None) int[source]
load_from_iterator(iterator: Iterator, table_object: Table, table_to_load: str = None, perform_rename: bool = False, progress_frequency: int = 10, analyze_compression: str = None, parent_task: ETLTask | None = None) int
load_from_s3_path(s3_source_path: str, table_object: Table, table_to_load: str = None, s3_source_path_is_absolute: bool = True, file_list: List[str] | None = None, file_compression: str = '', options: str = '', analyze_compression: str = None, perform_rename: bool = False) int
load_table_from_cache(table_object: Table, table_to_load: str = None, perform_rename: bool = False, progress_frequency: int = 10, analyze_compression: str = None) int
property needs_all_columns
rename_table(temp_table_name: str, table_object: Table)
s3_folder_contents(s3_full_folder)