1. Packages
  2. Airbyte Provider
  3. API Docs
  4. SourceSftpBulk
airbyte 0.7.0-beta2 published on Friday, Mar 7, 2025 by airbytehq

airbyte.SourceSftpBulk

Explore with Pulumi AI

airbyte logo
airbyte 0.7.0-beta2 published on Friday, Mar 7, 2025 by airbytehq

    SourceSftpBulk Resource

    Example Usage

    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.airbyte.SourceSftpBulk;
    import com.pulumi.airbyte.SourceSftpBulkArgs;
    import com.pulumi.airbyte.inputs.SourceSftpBulkConfigurationArgs;
    import com.pulumi.airbyte.inputs.SourceSftpBulkConfigurationCredentialsArgs;
    import com.pulumi.airbyte.inputs.SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs;
    import com.pulumi.airbyte.inputs.SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var mySourceSftpbulk = new SourceSftpBulk("mySourceSftpbulk", SourceSftpBulkArgs.builder()
                .configuration(SourceSftpBulkConfigurationArgs.builder()
                    .credentials(SourceSftpBulkConfigurationCredentialsArgs.builder()
                        .authenticateViaPassword(SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs.builder()
                            .password("...my_password...")
                            .build())
                        .authenticateViaPrivateKey(SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs.builder()
                            .privateKey("...my_private_key...")
                            .build())
                        .build())
                    .delivery_method(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                    .folder_path("/logs/2022")
                    .host("www.host.com")
                    .port(22)
                    .start_date("2021-01-01T00:00:00.000000Z")
                    .streams(SourceSftpBulkConfigurationStreamArgs.builder()
                        .daysToSyncIfHistoryIsFull(5)
                        .format(SourceSftpBulkConfigurationStreamFormatArgs.builder()
                            .avroFormat(SourceSftpBulkConfigurationStreamFormatAvroFormatArgs.builder()
                                .doubleAsString(false)
                                .build())
                            .csvFormat(SourceSftpBulkConfigurationStreamFormatCsvFormatArgs.builder()
                                .delimiter("...my_delimiter...")
                                .doubleQuote(true)
                                .encoding("...my_encoding...")
                                .escapeChar("...my_escape_char...")
                                .falseValues("...")
                                .headerDefinition(SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs.builder()
                                    .autogenerated()
                                    .fromCsv()
                                    .userProvided(SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs.builder()
                                        .columnNames("...")
                                        .build())
                                    .build())
                                .ignoreErrorsOnFieldsMismatch(true)
                                .nullValues("...")
                                .quoteChar("...my_quote_char...")
                                .skipRowsAfterHeader(2)
                                .skipRowsBeforeHeader(2)
                                .stringsCanBeNull(false)
                                .trueValues("...")
                                .build())
                            .excelFormat()
                            .jsonlFormat()
                            .parquetFormat(SourceSftpBulkConfigurationStreamFormatParquetFormatArgs.builder()
                                .decimalAsFloat(true)
                                .build())
                            .unstructuredDocumentFormat(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs.builder()
                                .processing(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs.builder()
                                    .local()
                                    .viaApi(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs.builder()
                                        .apiKey("...my_api_key...")
                                        .apiUrl("https://api.unstructured.com")
                                        .parameters(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs.builder()
                                            .name("combine_under_n_chars")
                                            .value("true")
                                            .build())
                                        .build())
                                    .build())
                                .skipUnprocessableFiles(false)
                                .strategy("ocr_only")
                                .build())
                            .build())
                        .globs("...")
                        .inputSchema("...my_input_schema...")
                        .name("...my_name...")
                        .recentNFilesToReadForSchemaDiscovery(4)
                        .schemaless(false)
                        .validationPolicy("Skip Record")
                        .build())
                    .username("...my_username...")
                    .build())
                .definitionId("68ea307f-7d93-44b6-91fe-f4681001bb74")
                .secretId("...my_secret_id...")
                .workspaceId("ec1a23b7-466d-4987-acf2-eae96eb456d0")
                .build());
    
        }
    }
    
    resources:
      mySourceSftpbulk:
        type: airbyte:SourceSftpBulk
        properties:
          configuration:
            credentials:
              authenticateViaPassword:
                password: '...my_password...'
              authenticateViaPrivateKey:
                privateKey: '...my_private_key...'
            delivery_method:
              copyRawFiles:
                preserveDirectoryStructure: false
              replicateRecords: {}
            folder_path: /logs/2022
            host: www.host.com
            port: 22
            start_date: 2021-01-01T00:00:00.000000Z
            streams:
              - daysToSyncIfHistoryIsFull: 5
                format:
                  avroFormat:
                    doubleAsString: false
                  csvFormat:
                    delimiter: '...my_delimiter...'
                    doubleQuote: true
                    encoding: '...my_encoding...'
                    escapeChar: '...my_escape_char...'
                    falseValues:
                      - '...'
                    headerDefinition:
                      autogenerated: {}
                      fromCsv: {}
                      userProvided:
                        columnNames:
                          - '...'
                    ignoreErrorsOnFieldsMismatch: true
                    nullValues:
                      - '...'
                    quoteChar: '...my_quote_char...'
                    skipRowsAfterHeader: 2
                    skipRowsBeforeHeader: 2
                    stringsCanBeNull: false
                    trueValues:
                      - '...'
                  excelFormat: {}
                  jsonlFormat: {}
                  parquetFormat:
                    decimalAsFloat: true
                  unstructuredDocumentFormat:
                    processing:
                      local: {}
                      viaApi:
                        apiKey: '...my_api_key...'
                        apiUrl: https://api.unstructured.com
                        parameters:
                          - name: combine_under_n_chars
                            value: 'true'
                    skipUnprocessableFiles: false
                    strategy: ocr_only
                globs:
                  - '...'
                inputSchema: '...my_input_schema...'
                name: '...my_name...'
                recentNFilesToReadForSchemaDiscovery: 4
                schemaless: false
                validationPolicy: Skip Record
            username: '...my_username...'
          definitionId: 68ea307f-7d93-44b6-91fe-f4681001bb74
          secretId: '...my_secret_id...'
          workspaceId: ec1a23b7-466d-4987-acf2-eae96eb456d0
    

    Create SourceSftpBulk Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new SourceSftpBulk(name: string, args: SourceSftpBulkArgs, opts?: CustomResourceOptions);
    @overload
    def SourceSftpBulk(resource_name: str,
                       args: SourceSftpBulkArgs,
                       opts: Optional[ResourceOptions] = None)
    
    @overload
    def SourceSftpBulk(resource_name: str,
                       opts: Optional[ResourceOptions] = None,
                       configuration: Optional[SourceSftpBulkConfigurationArgs] = None,
                       workspace_id: Optional[str] = None,
                       definition_id: Optional[str] = None,
                       name: Optional[str] = None,
                       secret_id: Optional[str] = None)
    func NewSourceSftpBulk(ctx *Context, name string, args SourceSftpBulkArgs, opts ...ResourceOption) (*SourceSftpBulk, error)
    public SourceSftpBulk(string name, SourceSftpBulkArgs args, CustomResourceOptions? opts = null)
    public SourceSftpBulk(String name, SourceSftpBulkArgs args)
    public SourceSftpBulk(String name, SourceSftpBulkArgs args, CustomResourceOptions options)
    
    type: airbyte:SourceSftpBulk
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args SourceSftpBulkArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args SourceSftpBulkArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args SourceSftpBulkArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args SourceSftpBulkArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args SourceSftpBulkArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var sourceSftpBulkResource = new Airbyte.SourceSftpBulk("sourceSftpBulkResource", new()
    {
        Configuration = new Airbyte.Inputs.SourceSftpBulkConfigurationArgs
        {
            Credentials = new Airbyte.Inputs.SourceSftpBulkConfigurationCredentialsArgs
            {
                AuthenticateViaPassword = new Airbyte.Inputs.SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs
                {
                    Password = "string",
                },
                AuthenticateViaPrivateKey = new Airbyte.Inputs.SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs
                {
                    PrivateKey = "string",
                },
            },
            Host = "string",
            Streams = new[]
            {
                new Airbyte.Inputs.SourceSftpBulkConfigurationStreamArgs
                {
                    Format = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatArgs
                    {
                        AvroFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatAvroFormatArgs
                        {
                            DoubleAsString = false,
                        },
                        CsvFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatCsvFormatArgs
                        {
                            Delimiter = "string",
                            DoubleQuote = false,
                            Encoding = "string",
                            EscapeChar = "string",
                            FalseValues = new[]
                            {
                                "string",
                            },
                            HeaderDefinition = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs
                            {
                                Autogenerated = null,
                                FromCsv = null,
                                UserProvided = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs
                                {
                                    ColumnNames = new[]
                                    {
                                        "string",
                                    },
                                },
                            },
                            IgnoreErrorsOnFieldsMismatch = false,
                            NullValues = new[]
                            {
                                "string",
                            },
                            QuoteChar = "string",
                            SkipRowsAfterHeader = 0,
                            SkipRowsBeforeHeader = 0,
                            StringsCanBeNull = false,
                            TrueValues = new[]
                            {
                                "string",
                            },
                        },
                        ExcelFormat = null,
                        JsonlFormat = null,
                        ParquetFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatParquetFormatArgs
                        {
                            DecimalAsFloat = false,
                        },
                        UnstructuredDocumentFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs
                        {
                            Processing = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs
                            {
                                Local = null,
                                ViaApi = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs
                                {
                                    ApiKey = "string",
                                    ApiUrl = "string",
                                    Parameters = new[]
                                    {
                                        new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs
                                        {
                                            Name = "string",
                                            Value = "string",
                                        },
                                    },
                                },
                            },
                            SkipUnprocessableFiles = false,
                            Strategy = "string",
                        },
                    },
                    Name = "string",
                    DaysToSyncIfHistoryIsFull = 0,
                    Globs = new[]
                    {
                        "string",
                    },
                    InputSchema = "string",
                    RecentNFilesToReadForSchemaDiscovery = 0,
                    Schemaless = false,
                    ValidationPolicy = "string",
                },
            },
            Username = "string",
            DeliveryMethod = new Airbyte.Inputs.SourceSftpBulkConfigurationDeliveryMethodArgs
            {
                CopyRawFiles = new Airbyte.Inputs.SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs
                {
                    PreserveDirectoryStructure = false,
                },
                ReplicateRecords = null,
            },
            FolderPath = "string",
            Port = 0,
            StartDate = "string",
        },
        WorkspaceId = "string",
        DefinitionId = "string",
        Name = "string",
        SecretId = "string",
    });
    
    example, err := airbyte.NewSourceSftpBulk(ctx, "sourceSftpBulkResource", &airbyte.SourceSftpBulkArgs{
    Configuration: &.SourceSftpBulkConfigurationArgs{
    Credentials: &.SourceSftpBulkConfigurationCredentialsArgs{
    AuthenticateViaPassword: &.SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs{
    Password: pulumi.String("string"),
    },
    AuthenticateViaPrivateKey: &.SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs{
    PrivateKey: pulumi.String("string"),
    },
    },
    Host: pulumi.String("string"),
    Streams: .SourceSftpBulkConfigurationStreamArray{
    &.SourceSftpBulkConfigurationStreamArgs{
    Format: &.SourceSftpBulkConfigurationStreamFormatArgs{
    AvroFormat: &.SourceSftpBulkConfigurationStreamFormatAvroFormatArgs{
    DoubleAsString: pulumi.Bool(false),
    },
    CsvFormat: &.SourceSftpBulkConfigurationStreamFormatCsvFormatArgs{
    Delimiter: pulumi.String("string"),
    DoubleQuote: pulumi.Bool(false),
    Encoding: pulumi.String("string"),
    EscapeChar: pulumi.String("string"),
    FalseValues: pulumi.StringArray{
    pulumi.String("string"),
    },
    HeaderDefinition: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs{
    Autogenerated: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionAutogeneratedArgs{
    },
    FromCsv: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionFromCsvArgs{
    },
    UserProvided: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs{
    ColumnNames: pulumi.StringArray{
    pulumi.String("string"),
    },
    },
    },
    IgnoreErrorsOnFieldsMismatch: pulumi.Bool(false),
    NullValues: pulumi.StringArray{
    pulumi.String("string"),
    },
    QuoteChar: pulumi.String("string"),
    SkipRowsAfterHeader: pulumi.Float64(0),
    SkipRowsBeforeHeader: pulumi.Float64(0),
    StringsCanBeNull: pulumi.Bool(false),
    TrueValues: pulumi.StringArray{
    pulumi.String("string"),
    },
    },
    ExcelFormat: &.SourceSftpBulkConfigurationStreamFormatExcelFormatArgs{
    },
    JsonlFormat: &.SourceSftpBulkConfigurationStreamFormatJsonlFormatArgs{
    },
    ParquetFormat: &.SourceSftpBulkConfigurationStreamFormatParquetFormatArgs{
    DecimalAsFloat: pulumi.Bool(false),
    },
    UnstructuredDocumentFormat: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs{
    Processing: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs{
    Local: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocalArgs{
    },
    ViaApi: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs{
    ApiKey: pulumi.String("string"),
    ApiUrl: pulumi.String("string"),
    Parameters: .SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArray{
    &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs{
    Name: pulumi.String("string"),
    Value: pulumi.String("string"),
    },
    },
    },
    },
    SkipUnprocessableFiles: pulumi.Bool(false),
    Strategy: pulumi.String("string"),
    },
    },
    Name: pulumi.String("string"),
    DaysToSyncIfHistoryIsFull: pulumi.Float64(0),
    Globs: pulumi.StringArray{
    pulumi.String("string"),
    },
    InputSchema: pulumi.String("string"),
    RecentNFilesToReadForSchemaDiscovery: pulumi.Float64(0),
    Schemaless: pulumi.Bool(false),
    ValidationPolicy: pulumi.String("string"),
    },
    },
    Username: pulumi.String("string"),
    DeliveryMethod: &.SourceSftpBulkConfigurationDeliveryMethodArgs{
    CopyRawFiles: &.SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs{
    PreserveDirectoryStructure: pulumi.Bool(false),
    },
    ReplicateRecords: &.SourceSftpBulkConfigurationDeliveryMethodReplicateRecordsArgs{
    },
    },
    FolderPath: pulumi.String("string"),
    Port: pulumi.Float64(0),
    StartDate: pulumi.String("string"),
    },
    WorkspaceId: pulumi.String("string"),
    DefinitionId: pulumi.String("string"),
    Name: pulumi.String("string"),
    SecretId: pulumi.String("string"),
    })
    
    var sourceSftpBulkResource = new SourceSftpBulk("sourceSftpBulkResource", SourceSftpBulkArgs.builder()
        .configuration(SourceSftpBulkConfigurationArgs.builder()
            .credentials(SourceSftpBulkConfigurationCredentialsArgs.builder()
                .authenticateViaPassword(SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs.builder()
                    .password("string")
                    .build())
                .authenticateViaPrivateKey(SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs.builder()
                    .privateKey("string")
                    .build())
                .build())
            .host("string")
            .streams(SourceSftpBulkConfigurationStreamArgs.builder()
                .format(SourceSftpBulkConfigurationStreamFormatArgs.builder()
                    .avroFormat(SourceSftpBulkConfigurationStreamFormatAvroFormatArgs.builder()
                        .doubleAsString(false)
                        .build())
                    .csvFormat(SourceSftpBulkConfigurationStreamFormatCsvFormatArgs.builder()
                        .delimiter("string")
                        .doubleQuote(false)
                        .encoding("string")
                        .escapeChar("string")
                        .falseValues("string")
                        .headerDefinition(SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs.builder()
                            .autogenerated()
                            .fromCsv()
                            .userProvided(SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs.builder()
                                .columnNames("string")
                                .build())
                            .build())
                        .ignoreErrorsOnFieldsMismatch(false)
                        .nullValues("string")
                        .quoteChar("string")
                        .skipRowsAfterHeader(0)
                        .skipRowsBeforeHeader(0)
                        .stringsCanBeNull(false)
                        .trueValues("string")
                        .build())
                    .excelFormat()
                    .jsonlFormat()
                    .parquetFormat(SourceSftpBulkConfigurationStreamFormatParquetFormatArgs.builder()
                        .decimalAsFloat(false)
                        .build())
                    .unstructuredDocumentFormat(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs.builder()
                        .processing(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs.builder()
                            .local()
                            .viaApi(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs.builder()
                                .apiKey("string")
                                .apiUrl("string")
                                .parameters(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs.builder()
                                    .name("string")
                                    .value("string")
                                    .build())
                                .build())
                            .build())
                        .skipUnprocessableFiles(false)
                        .strategy("string")
                        .build())
                    .build())
                .name("string")
                .daysToSyncIfHistoryIsFull(0)
                .globs("string")
                .inputSchema("string")
                .recentNFilesToReadForSchemaDiscovery(0)
                .schemaless(false)
                .validationPolicy("string")
                .build())
            .username("string")
            .deliveryMethod(SourceSftpBulkConfigurationDeliveryMethodArgs.builder()
                .copyRawFiles(SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs.builder()
                    .preserveDirectoryStructure(false)
                    .build())
                .replicateRecords()
                .build())
            .folderPath("string")
            .port(0)
            .startDate("string")
            .build())
        .workspaceId("string")
        .definitionId("string")
        .name("string")
        .secretId("string")
        .build());
    
    source_sftp_bulk_resource = airbyte.SourceSftpBulk("sourceSftpBulkResource",
        configuration={
            "credentials": {
                "authenticate_via_password": {
                    "password": "string",
                },
                "authenticate_via_private_key": {
                    "private_key": "string",
                },
            },
            "host": "string",
            "streams": [{
                "format": {
                    "avro_format": {
                        "double_as_string": False,
                    },
                    "csv_format": {
                        "delimiter": "string",
                        "double_quote": False,
                        "encoding": "string",
                        "escape_char": "string",
                        "false_values": ["string"],
                        "header_definition": {
                            "autogenerated": {},
                            "from_csv": {},
                            "user_provided": {
                                "column_names": ["string"],
                            },
                        },
                        "ignore_errors_on_fields_mismatch": False,
                        "null_values": ["string"],
                        "quote_char": "string",
                        "skip_rows_after_header": 0,
                        "skip_rows_before_header": 0,
                        "strings_can_be_null": False,
                        "true_values": ["string"],
                    },
                    "excel_format": {},
                    "jsonl_format": {},
                    "parquet_format": {
                        "decimal_as_float": False,
                    },
                    "unstructured_document_format": {
                        "processing": {
                            "local": {},
                            "via_api": {
                                "api_key": "string",
                                "api_url": "string",
                                "parameters": [{
                                    "name": "string",
                                    "value": "string",
                                }],
                            },
                        },
                        "skip_unprocessable_files": False,
                        "strategy": "string",
                    },
                },
                "name": "string",
                "days_to_sync_if_history_is_full": 0,
                "globs": ["string"],
                "input_schema": "string",
                "recent_n_files_to_read_for_schema_discovery": 0,
                "schemaless": False,
                "validation_policy": "string",
            }],
            "username": "string",
            "delivery_method": {
                "copy_raw_files": {
                    "preserve_directory_structure": False,
                },
                "replicate_records": {},
            },
            "folder_path": "string",
            "port": 0,
            "start_date": "string",
        },
        workspace_id="string",
        definition_id="string",
        name="string",
        secret_id="string")
    
    const sourceSftpBulkResource = new airbyte.SourceSftpBulk("sourceSftpBulkResource", {
        configuration: {
            credentials: {
                authenticateViaPassword: {
                    password: "string",
                },
                authenticateViaPrivateKey: {
                    privateKey: "string",
                },
            },
            host: "string",
            streams: [{
                format: {
                    avroFormat: {
                        doubleAsString: false,
                    },
                    csvFormat: {
                        delimiter: "string",
                        doubleQuote: false,
                        encoding: "string",
                        escapeChar: "string",
                        falseValues: ["string"],
                        headerDefinition: {
                            autogenerated: {},
                            fromCsv: {},
                            userProvided: {
                                columnNames: ["string"],
                            },
                        },
                        ignoreErrorsOnFieldsMismatch: false,
                        nullValues: ["string"],
                        quoteChar: "string",
                        skipRowsAfterHeader: 0,
                        skipRowsBeforeHeader: 0,
                        stringsCanBeNull: false,
                        trueValues: ["string"],
                    },
                    excelFormat: {},
                    jsonlFormat: {},
                    parquetFormat: {
                        decimalAsFloat: false,
                    },
                    unstructuredDocumentFormat: {
                        processing: {
                            local: {},
                            viaApi: {
                                apiKey: "string",
                                apiUrl: "string",
                                parameters: [{
                                    name: "string",
                                    value: "string",
                                }],
                            },
                        },
                        skipUnprocessableFiles: false,
                        strategy: "string",
                    },
                },
                name: "string",
                daysToSyncIfHistoryIsFull: 0,
                globs: ["string"],
                inputSchema: "string",
                recentNFilesToReadForSchemaDiscovery: 0,
                schemaless: false,
                validationPolicy: "string",
            }],
            username: "string",
            deliveryMethod: {
                copyRawFiles: {
                    preserveDirectoryStructure: false,
                },
                replicateRecords: {},
            },
            folderPath: "string",
            port: 0,
            startDate: "string",
        },
        workspaceId: "string",
        definitionId: "string",
        name: "string",
        secretId: "string",
    });
    
    type: airbyte:SourceSftpBulk
    properties:
        configuration:
            credentials:
                authenticateViaPassword:
                    password: string
                authenticateViaPrivateKey:
                    privateKey: string
            deliveryMethod:
                copyRawFiles:
                    preserveDirectoryStructure: false
                replicateRecords: {}
            folderPath: string
            host: string
            port: 0
            startDate: string
            streams:
                - daysToSyncIfHistoryIsFull: 0
                  format:
                    avroFormat:
                        doubleAsString: false
                    csvFormat:
                        delimiter: string
                        doubleQuote: false
                        encoding: string
                        escapeChar: string
                        falseValues:
                            - string
                        headerDefinition:
                            autogenerated: {}
                            fromCsv: {}
                            userProvided:
                                columnNames:
                                    - string
                        ignoreErrorsOnFieldsMismatch: false
                        nullValues:
                            - string
                        quoteChar: string
                        skipRowsAfterHeader: 0
                        skipRowsBeforeHeader: 0
                        stringsCanBeNull: false
                        trueValues:
                            - string
                    excelFormat: {}
                    jsonlFormat: {}
                    parquetFormat:
                        decimalAsFloat: false
                    unstructuredDocumentFormat:
                        processing:
                            local: {}
                            viaApi:
                                apiKey: string
                                apiUrl: string
                                parameters:
                                    - name: string
                                      value: string
                        skipUnprocessableFiles: false
                        strategy: string
                  globs:
                    - string
                  inputSchema: string
                  name: string
                  recentNFilesToReadForSchemaDiscovery: 0
                  schemaless: false
                  validationPolicy: string
            username: string
        definitionId: string
        name: string
        secretId: string
        workspaceId: string
    

    SourceSftpBulk Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The SourceSftpBulk resource accepts the following input properties:

    Configuration SourceSftpBulkConfiguration
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    Configuration SourceSftpBulkConfigurationArgs
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration SourceSftpBulkConfiguration
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration SourceSftpBulkConfiguration
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    workspaceId string
    definitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name string
    Name of the source e.g. dev-mysql-instance.
    secretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration SourceSftpBulkConfigurationArgs
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    workspace_id str
    definition_id str
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name str
    Name of the source e.g. dev-mysql-instance.
    secret_id str
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration Property Map
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the SourceSftpBulk resource produces the following output properties:

    CreatedAt double
    Id string
    The provider-assigned unique ID for this managed resource.
    SourceId string
    SourceType string
    CreatedAt float64
    Id string
    The provider-assigned unique ID for this managed resource.
    SourceId string
    SourceType string
    createdAt Double
    id String
    The provider-assigned unique ID for this managed resource.
    sourceId String
    sourceType String
    createdAt number
    id string
    The provider-assigned unique ID for this managed resource.
    sourceId string
    sourceType string
    created_at float
    id str
    The provider-assigned unique ID for this managed resource.
    source_id str
    source_type str
    createdAt Number
    id String
    The provider-assigned unique ID for this managed resource.
    sourceId String
    sourceType String

    Look up Existing SourceSftpBulk Resource

    Get an existing SourceSftpBulk resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: SourceSftpBulkState, opts?: CustomResourceOptions): SourceSftpBulk
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            configuration: Optional[SourceSftpBulkConfigurationArgs] = None,
            created_at: Optional[float] = None,
            definition_id: Optional[str] = None,
            name: Optional[str] = None,
            secret_id: Optional[str] = None,
            source_id: Optional[str] = None,
            source_type: Optional[str] = None,
            workspace_id: Optional[str] = None) -> SourceSftpBulk
    func GetSourceSftpBulk(ctx *Context, name string, id IDInput, state *SourceSftpBulkState, opts ...ResourceOption) (*SourceSftpBulk, error)
    public static SourceSftpBulk Get(string name, Input<string> id, SourceSftpBulkState? state, CustomResourceOptions? opts = null)
    public static SourceSftpBulk get(String name, Output<String> id, SourceSftpBulkState state, CustomResourceOptions options)
    resources:  _:    type: airbyte:SourceSftpBulk    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Configuration SourceSftpBulkConfiguration
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    CreatedAt double
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    SourceId string
    SourceType string
    WorkspaceId string
    Configuration SourceSftpBulkConfigurationArgs
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    CreatedAt float64
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    SourceId string
    SourceType string
    WorkspaceId string
    configuration SourceSftpBulkConfiguration
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    createdAt Double
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    sourceId String
    sourceType String
    workspaceId String
    configuration SourceSftpBulkConfiguration
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    createdAt number
    definitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name string
    Name of the source e.g. dev-mysql-instance.
    secretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    sourceId string
    sourceType string
    workspaceId string
    configuration SourceSftpBulkConfigurationArgs
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    created_at float
    definition_id str
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name str
    Name of the source e.g. dev-mysql-instance.
    secret_id str
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    source_id str
    source_type str
    workspace_id str
    configuration Property Map
    Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
    createdAt Number
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    sourceId String
    sourceType String
    workspaceId String

    Supporting Types

    SourceSftpBulkConfiguration, SourceSftpBulkConfigurationArgs

    Credentials SourceSftpBulkConfigurationCredentials
    Credentials for connecting to the SFTP Server
    Host string
    The server host address
    Streams List<SourceSftpBulkConfigurationStream>
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    Username string
    The server user
    DeliveryMethod SourceSftpBulkConfigurationDeliveryMethod
    FolderPath string
    The directory to search files for sync. Default: "/"
    Port double
    The server port. Default: 22
    StartDate string
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    Credentials SourceSftpBulkConfigurationCredentials
    Credentials for connecting to the SFTP Server
    Host string
    The server host address
    Streams []SourceSftpBulkConfigurationStream
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    Username string
    The server user
    DeliveryMethod SourceSftpBulkConfigurationDeliveryMethod
    FolderPath string
    The directory to search files for sync. Default: "/"
    Port float64
    The server port. Default: 22
    StartDate string
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    credentials SourceSftpBulkConfigurationCredentials
    Credentials for connecting to the SFTP Server
    host String
    The server host address
    streams List<SourceSftpBulkConfigurationStream>
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    username String
    The server user
    deliveryMethod SourceSftpBulkConfigurationDeliveryMethod
    folderPath String
    The directory to search files for sync. Default: "/"
    port Double
    The server port. Default: 22
    startDate String
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    credentials SourceSftpBulkConfigurationCredentials
    Credentials for connecting to the SFTP Server
    host string
    The server host address
    streams SourceSftpBulkConfigurationStream[]
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    username string
    The server user
    deliveryMethod SourceSftpBulkConfigurationDeliveryMethod
    folderPath string
    The directory to search files for sync. Default: "/"
    port number
    The server port. Default: 22
    startDate string
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    credentials SourceSftpBulkConfigurationCredentials
    Credentials for connecting to the SFTP Server
    host str
    The server host address
    streams Sequence[SourceSftpBulkConfigurationStream]
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    username str
    The server user
    delivery_method SourceSftpBulkConfigurationDeliveryMethod
    folder_path str
    The directory to search files for sync. Default: "/"
    port float
    The server port. Default: 22
    start_date str
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    credentials Property Map
    Credentials for connecting to the SFTP Server
    host String
    The server host address
    streams List<Property Map>
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    username String
    The server user
    deliveryMethod Property Map
    folderPath String
    The directory to search files for sync. Default: "/"
    port Number
    The server port. Default: 22
    startDate String
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.

    SourceSftpBulkConfigurationCredentials, SourceSftpBulkConfigurationCredentialsArgs

    SourceSftpBulkConfigurationCredentialsAuthenticateViaPassword, SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs

    Password string
    Password
    Password string
    Password
    password String
    Password
    password string
    Password
    password str
    Password
    password String
    Password

    SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKey, SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs

    PrivateKey string
    The Private key
    PrivateKey string
    The Private key
    privateKey String
    The Private key
    privateKey string
    The Private key
    private_key str
    The Private key
    privateKey String
    The Private key

    SourceSftpBulkConfigurationDeliveryMethod, SourceSftpBulkConfigurationDeliveryMethodArgs

    CopyRawFiles SourceSftpBulkConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    ReplicateRecords SourceSftpBulkConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    CopyRawFiles SourceSftpBulkConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    ReplicateRecords SourceSftpBulkConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copyRawFiles SourceSftpBulkConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicateRecords SourceSftpBulkConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copyRawFiles SourceSftpBulkConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicateRecords SourceSftpBulkConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copy_raw_files SourceSftpBulkConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicate_records SourceSftpBulkConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copyRawFiles Property Map
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicateRecords Property Map
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.

    SourceSftpBulkConfigurationDeliveryMethodCopyRawFiles, SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs

    PreserveDirectoryStructure bool
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    PreserveDirectoryStructure bool
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserveDirectoryStructure Boolean
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserveDirectoryStructure boolean
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserve_directory_structure bool
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserveDirectoryStructure Boolean
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true

    SourceSftpBulkConfigurationStream, SourceSftpBulkConfigurationStreamArgs

    Format SourceSftpBulkConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    Name string
    The name of the stream.
    DaysToSyncIfHistoryIsFull double
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    Globs List<string>
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
    InputSchema string
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    RecentNFilesToReadForSchemaDiscovery double
    The number of resent files which will be used to discover the schema for this stream.
    Schemaless bool
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    ValidationPolicy string
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    Format SourceSftpBulkConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    Name string
    The name of the stream.
    DaysToSyncIfHistoryIsFull float64
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    Globs []string
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
    InputSchema string
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    RecentNFilesToReadForSchemaDiscovery float64
    The number of resent files which will be used to discover the schema for this stream.
    Schemaless bool
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    ValidationPolicy string
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format SourceSftpBulkConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name String
    The name of the stream.
    daysToSyncIfHistoryIsFull Double
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs List<String>
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
    inputSchema String
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    recentNFilesToReadForSchemaDiscovery Double
    The number of resent files which will be used to discover the schema for this stream.
    schemaless Boolean
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    validationPolicy String
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format SourceSftpBulkConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name string
    The name of the stream.
    daysToSyncIfHistoryIsFull number
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs string[]
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
    inputSchema string
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    recentNFilesToReadForSchemaDiscovery number
    The number of resent files which will be used to discover the schema for this stream.
    schemaless boolean
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    validationPolicy string
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format SourceSftpBulkConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name str
    The name of the stream.
    days_to_sync_if_history_is_full float
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs Sequence[str]
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
    input_schema str
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    recent_n_files_to_read_for_schema_discovery float
    The number of resent files which will be used to discover the schema for this stream.
    schemaless bool
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    validation_policy str
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format Property Map
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name String
    The name of the stream.
    daysToSyncIfHistoryIsFull Number
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs List<String>
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
    inputSchema String
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    recentNFilesToReadForSchemaDiscovery Number
    The number of resent files which will be used to discover the schema for this stream.
    schemaless Boolean
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    validationPolicy String
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]

    SourceSftpBulkConfigurationStreamFormat, SourceSftpBulkConfigurationStreamFormatArgs

    SourceSftpBulkConfigurationStreamFormatAvroFormat, SourceSftpBulkConfigurationStreamFormatAvroFormatArgs

    DoubleAsString bool
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    DoubleAsString bool
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    doubleAsString Boolean
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    doubleAsString boolean
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    double_as_string bool
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    doubleAsString Boolean
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false

    SourceSftpBulkConfigurationStreamFormatCsvFormat, SourceSftpBulkConfigurationStreamFormatCsvFormatArgs

    Delimiter string
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    DoubleQuote bool
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    Encoding string
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    EscapeChar string
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    FalseValues List<string>
    A set of case-sensitive strings that should be interpreted as false values.
    HeaderDefinition SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    IgnoreErrorsOnFieldsMismatch bool
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    NullValues List<string>
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
    QuoteChar string
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    SkipRowsAfterHeader double
    The number of rows to skip after the header row. Default: 0
    SkipRowsBeforeHeader double
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    StringsCanBeNull bool
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    TrueValues List<string>
    A set of case-sensitive strings that should be interpreted as true values.
    Delimiter string
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    DoubleQuote bool
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    Encoding string
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    EscapeChar string
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    FalseValues []string
    A set of case-sensitive strings that should be interpreted as false values.
    HeaderDefinition SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    IgnoreErrorsOnFieldsMismatch bool
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    NullValues []string
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
    QuoteChar string
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    SkipRowsAfterHeader float64
    The number of rows to skip after the header row. Default: 0
    SkipRowsBeforeHeader float64
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    StringsCanBeNull bool
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    TrueValues []string
    A set of case-sensitive strings that should be interpreted as true values.
    delimiter String
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    doubleQuote Boolean
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding String
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escapeChar String
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    falseValues List<String>
    A set of case-sensitive strings that should be interpreted as false values.
    headerDefinition SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignoreErrorsOnFieldsMismatch Boolean
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    nullValues List<String>
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
    quoteChar String
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skipRowsAfterHeader Double
    The number of rows to skip after the header row. Default: 0
    skipRowsBeforeHeader Double
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    stringsCanBeNull Boolean
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    trueValues List<String>
    A set of case-sensitive strings that should be interpreted as true values.
    delimiter string
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    doubleQuote boolean
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding string
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escapeChar string
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    falseValues string[]
    A set of case-sensitive strings that should be interpreted as false values.
    headerDefinition SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignoreErrorsOnFieldsMismatch boolean
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    nullValues string[]
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
    quoteChar string
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skipRowsAfterHeader number
    The number of rows to skip after the header row. Default: 0
    skipRowsBeforeHeader number
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    stringsCanBeNull boolean
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    trueValues string[]
    A set of case-sensitive strings that should be interpreted as true values.
    delimiter str
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    double_quote bool
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding str
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escape_char str
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    false_values Sequence[str]
    A set of case-sensitive strings that should be interpreted as false values.
    header_definition SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignore_errors_on_fields_mismatch bool
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    null_values Sequence[str]
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
    quote_char str
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skip_rows_after_header float
    The number of rows to skip after the header row. Default: 0
    skip_rows_before_header float
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    strings_can_be_null bool
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    true_values Sequence[str]
    A set of case-sensitive strings that should be interpreted as true values.
    delimiter String
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    doubleQuote Boolean
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding String
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escapeChar String
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    falseValues List<String>
    A set of case-sensitive strings that should be interpreted as false values.
    headerDefinition Property Map
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignoreErrorsOnFieldsMismatch Boolean
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    nullValues List<String>
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
    quoteChar String
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skipRowsAfterHeader Number
    The number of rows to skip after the header row. Default: 0
    skipRowsBeforeHeader Number
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    stringsCanBeNull Boolean
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    trueValues List<String>
    A set of case-sensitive strings that should be interpreted as true values.

    SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinition, SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs

    SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvided, SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs

    ColumnNames List<string>
    The column names that will be used while emitting the CSV records
    ColumnNames []string
    The column names that will be used while emitting the CSV records
    columnNames List<String>
    The column names that will be used while emitting the CSV records
    columnNames string[]
    The column names that will be used while emitting the CSV records
    column_names Sequence[str]
    The column names that will be used while emitting the CSV records
    columnNames List<String>
    The column names that will be used while emitting the CSV records

    SourceSftpBulkConfigurationStreamFormatParquetFormat, SourceSftpBulkConfigurationStreamFormatParquetFormatArgs

    DecimalAsFloat bool
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    DecimalAsFloat bool
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimalAsFloat Boolean
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimalAsFloat boolean
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimal_as_float bool
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimalAsFloat Boolean
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false

    SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormat, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs

    Processing SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    SkipUnprocessableFiles bool
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    Strategy string
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    Processing SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    SkipUnprocessableFiles bool
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    Strategy string
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    skipUnprocessableFiles Boolean
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy String
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    skipUnprocessableFiles boolean
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy string
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    skip_unprocessable_files bool
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy str
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing Property Map
    Processing configuration
    skipUnprocessableFiles Boolean
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy String
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]

    SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessing, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs

    Local SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    ViaApi SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApi
    Process files via an API, using the hi_res mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
    Local SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    ViaApi SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApi
    Process files via an API, using the hi_res mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
    local SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    viaApi SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApi
    Process files via an API, using the hi_res mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
    local SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    viaApi SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApi
    Process files via an API, using the hi_res mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
    local SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    via_api SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApi
    Process files via an API, using the hi_res mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
    local Property Map
    Process files locally, supporting fast and ocr modes. This is the default option.
    viaApi Property Map
    Process files via an API, using the hi_res mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.

    SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApi, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs

    ApiKey string
    The API key to use matching the environment. Default: ""
    ApiUrl string
    The URL of the unstructured API to use. Default: "https://api.unstructured.io"
    Parameters List<SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameter>
    List of parameters send to the API
    ApiKey string
    The API key to use matching the environment. Default: ""
    ApiUrl string
    The URL of the unstructured API to use. Default: "https://api.unstructured.io"
    Parameters []SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameter
    List of parameters send to the API
    apiKey String
    The API key to use matching the environment. Default: ""
    apiUrl String
    The URL of the unstructured API to use. Default: "https://api.unstructured.io"
    parameters List<SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameter>
    List of parameters send to the API
    apiKey string
    The API key to use matching the environment. Default: ""
    apiUrl string
    The URL of the unstructured API to use. Default: "https://api.unstructured.io"
    parameters SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameter[]
    List of parameters send to the API
    api_key str
    The API key to use matching the environment. Default: ""
    api_url str
    The URL of the unstructured API to use. Default: "https://api.unstructured.io"
    parameters Sequence[SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameter]
    List of parameters send to the API
    apiKey String
    The API key to use matching the environment. Default: ""
    apiUrl String
    The URL of the unstructured API to use. Default: "https://api.unstructured.io"
    parameters List<Property Map>
    List of parameters send to the API

    SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameter, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs

    Name string
    The name of the unstructured API parameter to use
    Value string
    The value of the parameter
    Name string
    The name of the unstructured API parameter to use
    Value string
    The value of the parameter
    name String
    The name of the unstructured API parameter to use
    value String
    The value of the parameter
    name string
    The name of the unstructured API parameter to use
    value string
    The value of the parameter
    name str
    The name of the unstructured API parameter to use
    value str
    The value of the parameter
    name String
    The name of the unstructured API parameter to use
    value String
    The value of the parameter

    Import

    $ pulumi import airbyte:index/sourceSftpBulk:SourceSftpBulk my_airbyte_source_sftp_bulk ""
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    airbyte airbytehq/terraform-provider-airbyte
    License
    Notes
    This Pulumi package is based on the airbyte Terraform Provider.
    airbyte logo
    airbyte 0.7.0-beta2 published on Friday, Mar 7, 2025 by airbytehq