有用到tke的超级节点池,不知道这里面是不是两个运行时都有

kubectl get node -ojson | jq ‘.items[0].status.nodeInfo.containerRuntimeVersion’

执行下这个看看,所有节点都执行下

    wanjunlei 这个eks的就是三个超级节点池,其它普通的节点都是docker的

    [root@rhino-c1-c1 ~]# kubectl get node -o jsonpath='{.items[0].status.nodeInfo.containerRuntimeVersion}'
    eks://2.0[root@rhino-c1-c1 ~]# 
    [root@rhino-c1-c1 ~]# kubectl get node -o jsonpath='{.items[1].status.nodeInfo.containerRuntimeVersion}'
    eks://2.0[root@rhino-c1-c1 ~]# 
    [root@rhino-c1-c1 ~]# kubectl get node -o jsonpath='{.items[2].status.nodeInfo.containerRuntimeVersion}'
    eks://2.0[root@rhino-c1-c1 ~]# 
    [root@rhino-c1-c1 ~]# kubectl get node -o jsonpath='{.items[3].status.nodeInfo.containerRuntimeVersion}'
    docker://19.3.9[root@rhino-c1-c1 ~]# 
    [root@rhino-c1-c1 ~]# kubectl get node -o jsonpath='{.items[4].status.nodeInfo.containerRuntimeVersion}'
    docker://19.3.9

    首先,ks 目前不支持不同节点使用不同的 container runtime,其次 这个 eks 的 runtime 没有对接过,不确定能不能支持

      wanjunlei 可以怎么改成支持docker运行时的吗,先让docker节点的可以正常

      编辑 cc

      kubectl edit cc -n kubesphere-system ks-installer

      删除 status 中的 auditing、fluentbit、logging、events 部分,然后把 ks-installer 调度到 docker 节点上,等待安装完成

        先检查 fb 的配置文件,看看是不是 docker 的。

        然后看看 fluentbit-operator 的initcontainer,正确的命令应该是

         echo CONTAINER_ROOT_DIR=$(docker info -f {{'{{.DockerRootDir}}'}}) > /fluentbit-operator/fluent-bit.env

        都正确的话把 fluentbit-operator 调度到 docker 节点上,然后删除 fluent-bit daemonset 试试

          wanjunlei

          fb的配置还是containerd的

          fluent-bit.conf:
          [Service]
              Parsers_File    parsers.conf
          [Input]
              Name    systemd
              Path    /var/log/journal
              DB    /fluent-bit/tail/docker.db
              DB.Sync    Normal
              Tag    service.crio
              Systemd_Filter    _SYSTEMD_UNIT=crio.service
          [Input]
              Name    systemd
              Path    /var/log/journal
              DB    /fluent-bit/tail/docker.db
              DB.Sync    Normal
              Tag    service.docker
              Systemd_Filter    _SYSTEMD_UNIT=docker.service
          [Input]
              Name    systemd
              Path    /var/log/journal
              DB    /fluent-bit/tail/kubelet.db
              DB.Sync    Normal
              Tag    service.kubelet
              Systemd_Filter    _SYSTEMD_UNIT=kubelet.service
          [Input]
              Name    tail
              Path    /var/log/containers/*.log
              Exclude_Path    /var/log/containers/*_kubesphere-logging-system_events-exporter*.log,/var/log/containers/kube-auditing-webhook*_kubesphere-logging-system_kube-auditing-webhook*.log
              Refresh_Interval    10
              Skip_Long_Lines    true
              DB    /fluent-bit/tail/pos.db
              DB.Sync    Normal
              Mem_Buf_Limit    5MB
              Parser    cri
              Tag    kube.*
          [Input]
              Name    tail
              Path    /var/log/containers/kube-auditing-webhook*_kubesphere-logging-system_kube-auditing-webhook*.log
              Refresh_Interval    10
              Skip_Long_Lines    true
              DB    /fluent-bit/tail/pos-auditing.db
              DB.Sync    Normal
              Mem_Buf_Limit    5MB
              Parser    cri
              Tag    kube_auditing
          [Input]
              Name    tail
              Path    /var/log/containers/*_kubesphere-logging-system_events-exporter*.log
              Refresh_Interval    10
              Skip_Long_Lines    true
              DB    /fluent-bit/tail/pos-events.db
              DB.Sync    Normal
              Mem_Buf_Limit    5MB
              Parser    cri
              Tag    kube_events
          [Filter]
              Name    lua
              Match    kube.*
              script    /fluent-bit/config/containerd.lua
              call    containerd
              time_as_table    true
          [Filter]
              Name    parser
              Match    kube_auditing
              Key_Name    message
              Parser    json
          [Filter]
              Name    modify
              Match    kube_auditing
              Condition    Key_does_not_exist    AuditID    
              Add    ignore    true
          [Filter]
              Name    grep
              Match    kube_auditing
              Exclude    ignore true
          [Filter]
              Name    parser
              Match    kube_events
              Key_Name    message
              Parser    json
          [Filter]
              Name    kubernetes
              Match    kube.*
              Kube_URL    https://kubernetes.default.svc:443
              Kube_CA_File    /var/run/secrets/kubernetes.io/serviceaccount/ca.crt
              Kube_Token_File    /var/run/secrets/kubernetes.io/serviceaccount/token
              Labels    false
              Annotations    false
          [Filter]
              Name    nest
              Match    kube.*
              Operation    lift
              Nested_under    kubernetes
              Add_prefix    kubernetes_
          [Filter]
              Name    modify
              Match    kube.*
              Remove    stream
              Remove    kubernetes_pod_id
              Remove    kubernetes_host
              Remove    kubernetes_container_hash
          [Filter]
              Name    nest
              Match    kube.*
              Operation    nest
              Wildcard    kubernetes_*
              Nest_under    kubernetes
              Remove_prefix    kubernetes_
          [Filter]
              Name    lua
              Match    service.*
              script    /fluent-bit/config/systemd.lua
              call    add_time
              time_as_table    true
          [Output]
              Name    es
              Match_Regex    (?:kube|service)\.(.*)
              Host    10.33.10.17
              Port    9200
              Logstash_Format    true
              Logstash_Prefix    ks-logstash-log
              Time_Key    @timestamp
              Generate_ID    true
              Trace_Error    true
          [Output]
              Name    es
              Match    kube_auditing
              Host    10.33.10.17
              Port    9200
              Logstash_Format    true
              Logstash_Prefix    ks-logstash-auditing
              Generate_ID    true
          [Output]
              Name    es
              Match    kube_events
              Host    10.33.10.17
              Port    9200
              Logstash_Format    true
              Logstash_Prefix    ks-logstash-events
              Generate_ID    true

          fluentbit-operator 的initcontainer

          echo CONTAINER_ROOT_DIR=/var/log > /fluentbit-operator/fluent-bit.env

          现在只能手动改了

          1 改 input

          kubectl edit input -n kubesphere-logging-system tail

          kubectl edit input -n kubesphere-logging-system tail-auditing

          kubectl edit input -n kubesphere-logging-system tail-events

          修改 parser 为 docker

          删除 crio input

          kubectl delete input -n kubesphere-logging-system crio

          创建 docker input

          apiVersion: logging.kubesphere.io/v1alpha2
          kind: Input
          metadata:
            name: {{ logging_container_runtime | default('docker') }}
            namespace: kubesphere-logging-system
            labels:
              logging.kubesphere.io/enabled: "true"
              logging.kubesphere.io/component: logging
          spec:
            systemd:
              tag: service.docker
              path: /var/log/journal
              db: /fluent-bit/tail/docker.db
              dbSync: Normal
              systemdFilter:
                - _SYSTEMD_UNIT=docker.service

          2 删除 containerd filter

          kubectl delete filter -n kubesphere-logging-system containerd

          3 修改 fluent-operator,需要修改 volumes 和 initContainer,可以照这个修改

          kind: Deployment
          apiVersion: apps/v1
          metadata:
            name: fluentbit-operator
            namespace: kubesphere-logging-system
            labels:
              app.kubernetes.io/component: operator
              app.kubernetes.io/name: fluentbit-operator
          spec:
            replicas: 1
            selector:
              matchLabels:
                app.kubernetes.io/component: operator
                app.kubernetes.io/name: fluentbit-operator
            template:
              metadata:
                labels:
                  app.kubernetes.io/component: operator
                  app.kubernetes.io/name: fluentbit-operator
              spec:
                volumes:
                  - name: env
                    emptyDir: {}
                  - name: dockersock
                    hostPath:
                      path: /var/run/docker.sock
                      type: ''
                initContainers:
                  - name: setenv
                    image: 'docker:19.03'
                    command:
                      - /bin/sh
                      - '-c'
                      - >-
                        set -ex; echo CONTAINER_ROOT_DIR=$(docker info -f
                        {{.DockerRootDir}}) > /fluentbit-operator/fluent-bit.env
                    resources: {}
                    volumeMounts:
                      - name: env
                        mountPath: /fluentbit-operator
                      - name: dockersock
                        readOnly: true
                        mountPath: /var/run/docker.sock
                    terminationMessagePath: /dev/termination-log
                    terminationMessagePolicy: File
                    imagePullPolicy: IfNotPresent

          flunet-operator 重启完成后,删除 flunet-bit daemonset

            wanjunlei 现在可以正常收集容器日志了,事件的日志也能收集,就ks里的事件日志查看请求还有问题,是不是需要该哪里的配置呀

            kubectl edit filter -n kubesphere-logging-system filter-events

            kubectl edit filter -n kubesphere-logging-system filter-auditing

            keyName 改成 log

            2 年 后
            apiVersion: v1
            data:
              containerd.lua: ZnVuY3Rpb24gY29udGFpbmVyZCggdGFnLCB0aW1lc3RhbXAsIHJlY29yZCkKICAgICAgIGlmKHJlY29yZFsibG9ndGFnIl1+PW5pbCkKICAgICAgIHRoZW4KICAgICAgIHRpbWVTdHIgPSBvcy5kYXRlKCIhKnQiLCAgdGltZXN0YW1wWyJzZWMiXSkKICAgICAgICB0ID0gc3RyaW5nLmZvcm1hdCgiJTRkLSUwMmQtJTAyZFQlMDJkOiUwMmQ6JTAyZC4lc1oiLAogICAgICAgIHRp
            bWVTdHJbInllYXIiXSwgdGltZVN0clsibW9udGgiXSwgdGltZVN0clsiZGF5Il0sCiAgICAgICAgdGltZVN0clsiaG91ciJdLCB0aW1lU3RyWyJtaW4iXSwgdGltZVN0clsic2VjIl0sCiAgICAgICAgdGltZXN0YW1wWyJuc2VjIl0pOwogICAgICAgIHJlY29yZFsidGltZSJdID0gdDsKICAgICAgICByZWNvcmRbImxvZyJdID0gcmVjb3JkWyJtZXNzYWdlIl07CiAgICAgICAgcmVjb3JkWyJtZXNzYW
            dlIl0gPSAgbmlsOwogICAgICAgIHJldHVybiAxLCB0aW1lc3RhbXAsIHJlY29yZAogICAgICAgIGVsc2UKICAgICAgICByZXR1cm4gMCx0aW1lc3RhbXAscmVjb3JkCiAgICAgICBlbmQKZW5k
              fluent-bit.conf: W1NlcnZpY2VdCiAgICBQYXJzZXJzX0ZpbGUgICAgcGFyc2Vycy5jb25mCltJbnB1dF0KICAgIE5hbWUgICAgc3lzdGVtZAogICAgUGF0aCAgICAvdmFyL2xvZy9qb3VybmFsCiAgICBEQiAgICAvZmx1ZW50LWJpdC90YWlsL2RvY2tlci5kYgogICAgREIuU3luYyAgICBOb3JtYWwKICAgIFRhZyAgICBzZXJ2aWNlLmNvbnRhaW5lcmQKICAgIFN5c3RlbWRfRmlsdGVyICAgIF9
            TWVNURU1EX1VOSVQ9Y29udGFpbmVyZC5zZXJ2aWNlCltJbnB1dF0KICAgIE5hbWUgICAgc3lzdGVtZAogICAgUGF0aCAgICAvdmFyL2xvZy9qb3VybmFsCiAgICBEQiAgICAvZmx1ZW50LWJpdC90YWlsL2t1YmVsZXQuZGIKICAgIERCLlN5bmMgICAgTm9ybWFsCiAgICBUYWcgICAgc2VydmljZS5rdWJlbGV0CiAgICBTeXN0ZW1kX0ZpbHRlciAgICBfU1lTVEVNRF9VTklUPWt1YmVsZXQuc2VydmljZ
            QpbSW5wdXRdCiAgICBOYW1lICAgIHRhaWwKICAgIFBhdGggICAgL3Zhci9sb2cvY29udGFpbmVycy8qLmxvZwogICAgRXhjbHVkZV9QYXRoICAgIC92YXIvbG9nL2NvbnRhaW5lcnMvKl9rdWJlc3BoZXJlLWxvZ2dpbmctc3lzdGVtX2V2ZW50cy1leHBvcnRlcioubG9nLC92YXIvbG9nL2NvbnRhaW5lcnMva3ViZS1hdWRpdGluZy13ZWJob29rKl9rdWJlc3BoZXJlLWxvZ2dpbmctc3lzdGVtX2t1YmU
            tYXVkaXRpbmctd2ViaG9vayoubG9nCiAgICBSZWZyZXNoX0ludGVydmFsICAgIDEwCiAgICBTa2lwX0xvbmdfTGluZXMgICAgdHJ1ZQogICAgREIgICAgL2ZsdWVudC1iaXQvdGFpbC9wb3MuZGIKICAgIERCLlN5bmMgICAgTm9ybWFsCiAgICBNZW1fQnVmX0xpbWl0ICAgIDVNQgogICAgUGFyc2VyICAgIGNyaQogICAgVGFnICAgIGt1YmUuKgpbSW5wdXRdCiAgICBOYW1lICAgIHRhaWwKICAgIFBhd
            GggICAgL3Zhci9sb2cvY29udGFpbmVycy9rdWJlLWF1ZGl0aW5nLXdlYmhvb2sqX2t1YmVzcGhlcmUtbG9nZ2luZy1zeXN0ZW1fa3ViZS1hdWRpdGluZy13ZWJob29rKi5sb2cKICAgIFJlZnJlc2hfSW50ZXJ2YWwgICAgMTAKICAgIFNraXBfTG9uZ19MaW5lcyAgICB0cnVlCiAgICBEQiAgICAvZmx1ZW50LWJpdC90YWlsL3Bvcy1hdWRpdGluZy5kYgogICAgREIuU3luYyAgICBOb3JtYWwKICAgIE1
            lbV9CdWZfTGltaXQgICAgNU1CCiAgICBQYXJzZXIgICAgY3JpCiAgICBUYWcgICAga3ViZV9hdWRpdGluZwpbSW5wdXRdCiAgICBOYW1lICAgIHRhaWwKICAgIFBhdGggICAgL3Zhci9sb2cvY29udGFpbmVycy8qX2t1YmVzcGhlcmUtbG9nZ2luZy1zeXN0ZW1fZXZlbnRzLWV4cG9ydGVyKi5sb2cKICAgIFJlZnJlc2hfSW50ZXJ2YWwgICAgMTAKICAgIFNraXBfTG9uZ19MaW5lcyAgICB0cnVlCiAgI
            CBEQiAgICAvZmx1ZW50LWJpdC90YWlsL3Bvcy1ldmVudHMuZGIKICAgIERCLlN5bmMgICAgTm9ybWFsCiAgICBNZW1fQnVmX0xpbWl0ICAgIDVNQgogICAgUGFyc2VyICAgIGNyaQogICAgVGFnICAgIGt1YmVfZXZlbnRzCltGaWx0ZXJdCiAgICBOYW1lICAgIGx1YQogICAgTWF0Y2ggICAga3ViZS4qCiAgICBzY3JpcHQgICAgL2ZsdWVudC1iaXQvY29uZmlnL2NvbnRhaW5lcmQubHVhCiAgICBjYWx
            sICAgIGNvbnRhaW5lcmQKICAgIHRpbWVfYXNfdGFibGUgICAgdHJ1ZQpbRmlsdGVyXQogICAgTmFtZSAgICBwYXJzZXIKICAgIE1hdGNoICAgIGt1YmVfYXVkaXRpbmcKICAgIEtleV9OYW1lICAgIG1lc3NhZ2UKICAgIFBhcnNlciAgICBqc29uCltGaWx0ZXJdCiAgICBOYW1lICAgIG1vZGlmeQogICAgTWF0Y2ggICAga3ViZV9hdWRpdGluZwogICAgQ29uZGl0aW9uICAgIEtleV9kb2VzX25vdF9le
            GlzdCAgICBBdWRpdElEICAgIAogICAgQWRkICAgIGlnbm9yZSAgICB0cnVlCltGaWx0ZXJdCiAgICBOYW1lICAgIGdyZXAKICAgIE1hdGNoICAgIGt1YmVfYXVkaXRpbmcKICAgIEV4Y2x1ZGUgICAgaWdub3JlIHRydWUKW0ZpbHRlcl0KICAgIE5hbWUgICAgcGFyc2VyCiAgICBNYXRjaCAgICBrdWJlX2V2ZW50cwogICAgS2V5X05hbWUgICAgbWVzc2FnZQogICAgUGFyc2VyICAgIGpzb24KW0ZpbHR
            lcl0KICAgIE5hbWUgICAga3ViZXJuZXRlcwogICAgTWF0Y2ggICAga3ViZS4qCiAgICBLdWJlX1VSTCAgICBodHRwczovL2t1YmVybmV0ZXMuZGVmYXVsdC5zdmM6NDQzCiAgICBLdWJlX0NBX0ZpbGUgICAgL3Zhci9ydW4vc2VjcmV0cy9rdWJlcm5ldGVzLmlvL3NlcnZpY2VhY2NvdW50L2NhLmNydAogICAgS3ViZV9Ub2tlbl9GaWxlICAgIC92YXIvcnVuL3NlY3JldHMva3ViZXJuZXRlcy5pby9zZ
            XJ2aWNlYWNjb3VudC90b2tlbgogICAgTGFiZWxzICAgIGZhbHNlCiAgICBBbm5vdGF0aW9ucyAgICBmYWxzZQpbRmlsdGVyXQogICAgTmFtZSAgICBuZXN0CiAgICBNYXRjaCAgICBrdWJlLioKICAgIE9wZXJhdGlvbiAgICBsaWZ0CiAgICBOZXN0ZWRfdW5kZXIgICAga3ViZXJuZXRlcwogICAgQWRkX3ByZWZpeCAgICBrdWJlcm5ldGVzXwpbRmlsdGVyXQogICAgTmFtZSAgICBtb2RpZnkKICAgIE1
            hdGNoICAgIGt1YmUuKgogICAgUmVtb3ZlICAgIHN0cmVhbQogICAgUmVtb3ZlICAgIGt1YmVybmV0ZXNfcG9kX2lkCiAgICBSZW1vdmUgICAga3ViZXJuZXRlc19ob3N0CiAgICBSZW1vdmUgICAga3ViZXJuZXRlc19jb250YWluZXJfaGFzaApbRmlsdGVyXQogICAgTmFtZSAgICBuZXN0CiAgICBNYXRjaCAgICBrdWJlLioKICAgIE9wZXJhdGlvbiAgICBuZXN0CiAgICBXaWxkY2FyZCAgICBrdWJlc
            m5ldGVzXyoKICAgIE5lc3RfdW5kZXIgICAga3ViZXJuZXRlcwogICAgUmVtb3ZlX3ByZWZpeCAgICBrdWJlcm5ldGVzXwpbRmlsdGVyXQogICAgTmFtZSAgICBsdWEKICAgIE1hdGNoICAgIHNlcnZpY2UuKgogICAgc2NyaXB0ICAgIC9mbHVlbnQtYml0L2NvbmZpZy9zeXN0ZW1kLmx1YQogICAgY2FsbCAgICBhZGRfdGltZQogICAgdGltZV9hc190YWJsZSAgICB0cnVlCltPdXRwdXRdCiAgICBOYW1
            lICAgIGVzCiAgICBNYXRjaF9SZWdleCAgICAoPzprdWJlfHNlcnZpY2UpXC4oLiopCiAgICBIb3N0ICAgIGVsYXN0aWNzZWFyY2gtbG9nZ2luZy1kYXRhLmt1YmVzcGhlcmUtbG9nZ2luZy1zeXN0ZW0uc3ZjCiAgICBQb3J0ICAgIDkyMDAKICAgIExvZ3N0YXNoX0Zvcm1hdCAgICB0cnVlCiAgICBMb2dzdGFzaF9QcmVmaXggICAga3MtbG9nc3Rhc2gtbG9nCiAgICBUaW1lX0tleSAgICBAdGltZXN0Y
            W1wCiAgICBHZW5lcmF0ZV9JRCAgICB0cnVlCltPdXRwdXRdCiAgICBOYW1lICAgIGVzCiAgICBNYXRjaCAgICBrdWJlX2F1ZGl0aW5nCiAgICBIb3N0ICAgIGVsYXN0aWNzZWFyY2gtbG9nZ2luZy1kYXRhLmt1YmVzcGhlcmUtbG9nZ2luZy1zeXN0ZW0uc3ZjCiAgICBQb3J0ICAgIDkyMDAKICAgIExvZ3N0YXNoX0Zvcm1hdCAgICB0cnVlCiAgICBMb2dzdGFzaF9QcmVmaXggICAga3MtbG9nc3Rhc2g
            tYXVkaXRpbmcKICAgIEdlbmVyYXRlX0lEICAgIHRydWUKW091dHB1dF0KICAgIE5hbWUgICAgZXMKICAgIE1hdGNoICAgIGt1YmVfZXZlbnRzCiAgICBIb3N0ICAgIGVsYXN0aWNzZWFyY2gtbG9nZ2luZy1kYXRhLmt1YmVzcGhlcmUtbG9nZ2luZy1zeXN0ZW0uc3ZjCiAgICBQb3J0ICAgIDkyMDAKICAgIExvZ3N0YXNoX0Zvcm1hdCAgICB0cnVlCiAgICBMb2dzdGFzaF9QcmVmaXggICAga3MtbG9nc
            3Rhc2gtZXZlbnRzCiAgICBHZW5lcmF0ZV9JRCAgICB0cnVlCg==
              parsers.conf: ""
              systemd.lua: ZnVuY3Rpb24gYWRkX3RpbWUodGFnLCB0aW1lc3RhbXAsIHJlY29yZCkKICBuZXdfcmVjb3JkID0ge30KICB0aW1lU3RyID0gb3MuZGF0ZSgiISp0IiwgdGltZXN0YW1wWyJzZWMiXSkKICB0ID0gc3RyaW5nLmZvcm1hdCgiJTRkLSUwMmQtJTAyZFQlMDJkOiUwMmQ6JTAyZC4lc1oiLAoJCXRpbWVTdHJbInllYXIiXSwgdGltZVN0clsibW9udGgiXSwgdGltZVN0clsiZGF5Il0sCgk
            JdGltZVN0clsiaG91ciJdLCB0aW1lU3RyWyJtaW4iXSwgdGltZVN0clsic2VjIl0sCgkJdGltZXN0YW1wWyJuc2VjIl0pCiAga3ViZXJuZXRlcyA9IHt9CiAga3ViZXJuZXRlc1sicG9kX25hbWUiXSA9IHJlY29yZFsiX0hPU1ROQU1FIl0KICBrdWJlcm5ldGVzWyJjb250YWluZXJfbmFtZSJdID0gcmVjb3JkWyJTWVNMT0dfSURFTlRJRklFUiJdCiAga3ViZXJuZXRlc1sibmFtZXNwYWNlX25hbWUiX
            SA9ICJrdWJlLXN5c3RlbSIKICBuZXdfcmVjb3JkWyJ0aW1lIl0gPSB0CiAgbmV3X3JlY29yZFsibG9nIl0gPSByZWNvcmRbIk1FU1NBR0UiXQogIG5ld19yZWNvcmRbImt1YmVybmV0ZXMiXSA9IGt1YmVybmV0ZXMKICByZXR1cm4gMSwgdGltZXN0YW1wLCBuZXdfcmVjb3JkCmVuZA==
            kind: Secret
            metadata:
              creationTimestamp: "2023-06-16T00:51:20Z"
              name: fluent-bit-config
              namespace: kubesphere-logging-system
              ownerReferences:
              - apiVersion: logging.kubesphere.io/v1alpha2
                blockOwnerDeletion: true
                controller: true
                kind: FluentBitConfig
                name: fluent-bit-config
                uid: f3ae98b7-4933-45b1-91f8-3422f62a564f
              resourceVersion: "329760384"
              uid: 4733c8cb-7c42-45f8-8bd8-18593d8ffce3
            type: Opaque

            遇到了同样的问题,在3.3.2升级到3.4.1的时候,日志无法收集,集群是containerd环境

            fluent-bit日志:

            [2025/03/03 08:32:51] [error] [output:es:es.0] could not pack/validate JSON response
            {"took":3429,"errors":true,"items":[{"create":{"_index":"ks-logstash-log-2024.10.17","_type":"_doc","_id":"5deee03f-35d2-91f8-0fb9-85dd8a12de03","status":429,"error":{"type":"es_rejected_execution_exception","reason":"rejected execution of processing of [14043168][indices:data/write/bulk[s][p]]: request: BulkShardRequest [[ks-logstash-log-2024.10.17][0]] containing [53] requests, target allocation id: LWyJ_EVjT_6VpN7cG-PZEA, primary term: 1 on EsThreadPoolExecutor[name = elasticsearch-logging-data-1/write, queue capacity = 200, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@51bfe4ae[Running, pool size = 4, active threads = 4, queued tasks = 200, completed tasks = 8039058]]"}}},{"create":{"_index":"ks-logstash-log-2024.10.17","_type":"_doc","_id":"39f08f1a-c6aa-4456-8b2a-2305c4ee3742","status":429,"error":{"type":"es_rejected_execution_exception","reason":"rejected execution of processing of [18582445][indices:data/write/bulk[s][p]]: request: BulkShardRequest [[ks-logstash-log-2024.10.17][2]] containing [63] requests, target allocation id: cTbn210HQBa5VWbSlMLtKw, primary term: 1 on EsThreadPoolExecutor[name = elasticsearch-logging-data-0/write, queue capacity = 200, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@3d1ca0e7[Running, pool size = 4, active threads = 4, queued tasks = 200, completed tasks = 9184769]]"}}},{"create":{"_index":"ks-logstash-log-2024.10.17","_type":"_doc","_id":"05658ae1-6b8a-66e7-098c-5aeb4e123a24","status":429,"error":{"type":"es_rejected_execution_exception","reason":"rejected execution of processing of [14043189][indices:data/write/bulk[s][p]]: request: BulkShardRequest [[ks-logstash-log-2024.10.17][4]] containing [65] requests, target allocation id: WazCJm7uT1e6k3rUVi5U_A, primary term: 1 on EsThreadPoolExecutor[name = elasticsearch-logging-data-1/write, queue capacity = 200, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@51bfe4ae[Running, pool size = 4, active threads = 4, queued tasks = 200, completed tasks = 8039058]]"}}},{"create":{"_index":"ks-logstash-log-2024.10.17","_type":"_doc","_id":"1a885321-c9c1-13c9-19cf-4772c41d6797","status":429,"error":{"type":"es_rejected_execution_exception","reason":"rejected execution of processing of [14043189][indices:data/write/bulk[s][p]]: request: BulkShardRequest [[ks-logstash-log-2024.10.17][4]] containing [65] requests, target allocation id: WazCJm7uT1e6k3rUVi5U_A, primary term: 1 on EsThreadPoolExecutor[name = elasticsearch-logging-data-1/write, queue capacity = 200, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@51bfe4ae[Running, pool size = 4, active threads = 4, queued tasks = 200, completed tasks = 8039058]]"}}},{"create":{"_index":"ks-logstash-log-2024.10.17","_type":"_doc","_id":"d431d5db-918a-a794-9c3e-64a964bf57ca","status":429,"error":{"type":"es_rejected_execution_exception","reason":"rejected execution of processing of [14043168][indices:data/write/bulk[s][p]]: request: BulkShardRequest [[ks-logstash-log-2024.10.17][0]] containing [53] requests, target allocation id: LWyJ_EVjT_6VpN7cG-PZEA, primary term: 1 on EsThreadPoolExecutor[name = elasticsearch-logging-data-1/write, queue capacity = 200, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@51bfe4ae[Running, pool size = 4, active threads = 4, queued tasks = 200, completed tasks = 8039058]]"}}},{"create":{"_index":"ks-logstash-log-2024.10.17","_type":"_doc","_id":"f6a86bd3-94e1-dcdc-9489-4b4356fcba37","status":429,"error":{"type":"es_rejected_execution_exception","reason":"rejected execution of processing of [18138084][indices:data/write/bulk[s][p]]: request: BulkShardRequest [[ks-logstash-log-2024.10.17][1]] containing [54] requests, target allocation id: tirijULMQ8CfQlP57b_fyQ, primary term: 1 on EsThreadPoolExecutor[name = elasticsearch-logging-data-2/write, queue capacity = 200, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@18f2f516[Running, pool size = 4, active threads = 4, queued tasks = 200, comple

            麻烦给看看呗