Not able to do one time load, from postgres to opensearch using DMS

0

Trying to migrate existing data from AWS RDS Postgres to AWS managed OpenSearch, but it is not working, no rows were migrated to opensearch, When checking the Cloudwatch log getting below error

Bulk request failed. no retry. TotalRecordCount 4080, FailedRecordCount 4080 [1026400] (elasticsearch_bulk_utils.c:181)

DMS has the following configuration:

{
  "TargetMetadata": {
    "TargetSchema": "",
    "SupportLobs": false,
    "FullLobMode": false,
    "LobChunkSize": 0,
    "LimitedSizeLobMode": false,
    "LobMaxSize": 0,
    "InlineLobMaxSize": 0,
    "LoadMaxFileSize": 0,
    "ParallelLoadThreads": 5,
    "ParallelLoadBufferSize": 100,
    "BatchApplyEnabled": false,
    "TaskRecoveryTableEnabled": false,
    "ParallelLoadQueuesPerThread": 0,
    "ParallelApplyThreads": 0,
    "ParallelApplyBufferSize": 100,
    "ParallelApplyQueuesPerThread": 0
  },
  "FullLoadSettings": {
    "TargetTablePrepMode": "DO_NOTHING",
    "CreatePkAfterFullLoad": false,
    "StopTaskCachedChangesApplied": false,
    "StopTaskCachedChangesNotApplied": false,
    "MaxFullLoadSubTasks": 8,
    "TransactionConsistencyTimeout": 600,
    "CommitRate": 50000
  },
  "Logging": {
    "EnableLogging": true,
    "LogComponents": [
      {
        "Id": "TRANSFORMATION",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "SOURCE_UNLOAD",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "IO",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "TARGET_LOAD",
        "Severity": "LOGGER_SEVERITY_DETAILED_DEBUG"
      },
      {
        "Id": "PERFORMANCE",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "SOURCE_CAPTURE",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "SORTER",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "REST_SERVER",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "VALIDATOR_EXT",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "TARGET_APPLY",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "TASK_MANAGER",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "TABLES_MANAGER",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "METADATA_MANAGER",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "FILE_FACTORY",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "COMMON",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "ADDONS",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "DATA_STRUCTURE",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "COMMUNICATION",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      },
      {
        "Id": "FILE_TRANSFER",
        "Severity": "LOGGER_SEVERITY_DEFAULT"
      }
    ],
    "CloudWatchLogGroup": null,
    "CloudWatchLogStream": null
  },
  "ControlTablesSettings": {
    "historyTimeslotInMinutes": 5,
    "ControlSchema": "",
    "HistoryTimeslotInMinutes": 5,
    "HistoryTableEnabled": true,
    "SuspendedTablesTableEnabled": false,
    "StatusTableEnabled": true,
    "FullLoadExceptionTableEnabled": false
  },
  "StreamBufferSettings": {
    "StreamBufferCount": 3,
    "StreamBufferSizeInMB": 8,
    "CtrlStreamBufferSizeInMB": 5
  },
  "ChangeProcessingDdlHandlingPolicy": {
    "HandleSourceTableDropped": true,
    "HandleSourceTableTruncated": true,
    "HandleSourceTableAltered": true
  },
  "ErrorBehavior": {
    "DataErrorPolicy": "LOG_ERROR",
    "EventErrorPolicy": null,
    "DataTruncationErrorPolicy": "LOG_ERROR",
    "DataErrorEscalationPolicy": "SUSPEND_TABLE",
    "DataErrorEscalationCount": 0,
    "TableErrorPolicy": "SUSPEND_TABLE",
    "TableErrorEscalationPolicy": "STOP_TASK",
    "TableErrorEscalationCount": 0,
    "RecoverableErrorCount": -1,
    "RecoverableErrorInterval": 5,
    "RecoverableErrorThrottling": true,
    "RecoverableErrorThrottlingMax": 1800,
    "RecoverableErrorStopRetryAfterThrottlingMax": true,
    "ApplyErrorDeletePolicy": "IGNORE_RECORD",
    "ApplyErrorInsertPolicy": "LOG_ERROR",
    "ApplyErrorUpdatePolicy": "LOG_ERROR",
    "ApplyErrorEscalationPolicy": "LOG_ERROR",
    "ApplyErrorEscalationCount": 0,
    "ApplyErrorFailOnTruncationDdl": false,
    "FullLoadIgnoreConflicts": true,
    "FailOnTransactionConsistencyBreached": false,
    "FailOnNoTablesCaptured": true
  },
  "ChangeProcessingTuning": {
    "BatchApplyPreserveTransaction": true,
    "BatchApplyTimeoutMin": 1,
    "BatchApplyTimeoutMax": 30,
    "BatchApplyMemoryLimit": 500,
    "BatchSplitSize": 0,
    "MinTransactionSize": 1000,
    "CommitTimeout": 1,
    "MemoryLimitTotal": 1024,
    "MemoryKeepTime": 60,
    "StatementCacheSize": 50
  },
  "PostProcessingRules": null,
  "CharacterSetSettings": null,
  "LoopbackPreventionSettings": null,
  "BeforeImageSettings": null,
  "FailTaskWhenCleanTaskResourceFailed": false,
  "TTSettings": null
}

Opensearch have index with following settings

{ 
    "settings": {
      "index.max_ngram_diff" :8,
      "analysis": {
        "analyzer": {
          
          "my_ngram_analyzer": {
            "type": "custom",
            "tokenizer": "standard",
            "filter": [
              "lowercase",
              "mynGram"
            ]
          }

        },
        "filter": {
          "mynGram": {
            "type": "nGram",
            "min_gram": 6,
            "max_gram": 14,
            "token_chars": [
                "letter", "digit", "whitespace", "symbol"
              ]
          }
        }
      }, 
        "number_of_shards": 6,
        "number_of_replicas": 1
    },
    "mappings" : {
      "properties" : {
        "created_at" : {
          "type" : "date"
        },
        "id" : {
          "type" : "long"
        },
        "name" : {
          "type" : "text",
          "analyzer":"my_ngram_analyzer"
          , "search_analyzer": "my_ngram_analyzer"
        },
        "phone" : {
          "type" : "text",
          "analyzer":"my_ngram_analyzer"
          , "search_analyzer": "my_ngram_analyzer"
        },
        "updated_at" : {
          "type" : "date"
        }
      }
    }
}

I have tried to insert a sample document using _bulk API on opensearch console and it worked, below is the thing I had tried over opensearch, which worked

POST _bulk
{"index":{"_index":"contacts"}}
{"name": "name","phone" : "11111111","created_at" : "2021-12-21T12:12:59","updated_at" : "2021-12-21T12:12:59","id": 101}
  • Do you fix that problema? I'm getting the same problem here.

1 回答
0

To answer your question, we require details that are non-public information. Please open a support case with AWS using the following link

[-] https://console.aws.amazon.com/support/home#/case/create

AWS
支持工程师
已回答 2 年前

您未登录。 登录 发布回答。

一个好的回答可以清楚地解答问题和提供建设性反馈,并能促进提问者的职业发展。

回答问题的准则