• 网关路由
  • ks-apiserver 无法启动,求大佬们帮忙看看!

文件下载到了, 我调试看下,
临时解决办法, 可以把

kubectl delete helmrepos.application.kubesphere.io xxx(bitnami源的名称)

试一下

    W0808 16:59:24.533011 1 client_config.go:618] Neither --kubeconfig nor --master was specifie

    W0808 16:59:24.533903 1 client_config.go:618] Neither --kubeconfig nor --master was specifie

    W0808 16:59:24.540581 1 metricsserver.go:238] Metrics API not available.

    I0808 16:59:24.543659 1 interface.go:50] start helm repo informer

    I0808 16:59:24.779808 1 apiserver.go:428] Start cache objects

    E0808 16:59:26.480827 1 runtime.go:79] Observed a panic: "invalid memory address or nil poin

    goroutine 936 [running]:

    k8s.io/apimachinery/pkg/util/runtime.logPanic({0x3919e80?, 0x6c18e40})

    /workspace/vendor/k8s.io/apimachinery/pkg/util/runtime/runtime.go:75 +0x99

    k8s.io/apimachinery/pkg/util/runtime.HandleCrash({0x0, 0x0, 0xc0002a8690?})

    /workspace/vendor/k8s.io/apimachinery/pkg/util/runtime/runtime.go:49 +0x75

    panic({0x3919e80, 0x6c18e40})

    /usr/local/go/src/runtime/panic.go:884 +0x212

    kubesphere.io/kubesphere/pkg/simple/client/openpitrix/helmrepoindex.HelmVersionWrapper.GetName(...

    /workspace/pkg/simple/client/openpitrix/helmrepoindex/load_package.go:48

    kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).addRepo(0xc0002a8690, 0xc001d40a28, 0

    /workspace/pkg/utils/reposcache/repo_cahes.go:272 +0xbcb

    kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).AddRepo(0xc0002a8690, 0xc0001685a0?)

    /workspace/pkg/utils/reposcache/repo_cahes.go:194 +0x7c

    kubesphere.io/kubesphere/pkg/models/openpitrix.NewOpenpitrixOperator.func1({0x3eebf00?, 0xc001d40a

    /workspace/pkg/models/openpitrix/interface.go:56 +0x3b

    k8s.io/client-go/tools/cache.ResourceEventHandlerFuncs.OnAdd(...)

    /workspace/vendor/k8s.io/client-go/tools/cache/controller.go:232

    k8s.io/client-go/tools/cache.(*processorListener).run.func1()

    /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:911 +0x134

    k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x0?)

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:157 +0x3e

    k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0xc003329f38?, {0x476a2a0, 0xc002d84c00}, 0x1, 0xc0

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:158 +0xb6

    k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x65726f632e676e69?, 0x3b9aca00, 0x0, 0x71?, 0xc0033

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:135 +0x89

    k8s.io/apimachinery/pkg/util/wait.Until(...)

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:92

    k8s.io/client-go/tools/cache.(*processorListener).run(0xc0003a5d80?)

    /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:905 +0x6b

    k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1()

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:75 +0x5a

    created by k8s.io/apimachinery/pkg/util/wait.(*Group).Start

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:73 +0x85

    panic: runtime error: invalid memory address or nil pointer dereference [recovered]

    panic: runtime error: invalid memory address or nil pointer dereference

    [signal SIGSEGV: segmentation violation code=0x1 addr=0x0 pc=0x21ea58b]

    goroutine 936 [running]:

    k8s.io/apimachinery/pkg/util/runtime.HandleCrash({0x0, 0x0, 0xc0002a8690?})

    /workspace/vendor/k8s.io/apimachinery/pkg/util/runtime/runtime.go:56 +0xd7

    panic({0x3919e80, 0x6c18e40})

    /usr/local/go/src/runtime/panic.go:884 +0x212

    kubesphere.io/kubesphere/pkg/simple/client/openpitrix/helmrepoindex.HelmVersionWrapper.GetName(...

    /workspace/pkg/simple/client/openpitrix/helmrepoindex/load_package.go:48

    kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).addRepo(0xc0002a8690, 0xc001d40a28, 0

    /workspace/pkg/utils/reposcache/repo_cahes.go:272 +0xbcb

    kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).AddRepo(0xc0002a8690, 0xc0001685a0?)

    /workspace/pkg/utils/reposcache/repo_cahes.go:194 +0x7c

    kubesphere.io/kubesphere/pkg/models/openpitrix.NewOpenpitrixOperator.func1({0x3eebf00?, 0xc001d40a

    /workspace/pkg/models/openpitrix/interface.go:56 +0x3b

    k8s.io/client-go/tools/cache.ResourceEventHandlerFuncs.OnAdd(...)

    /workspace/vendor/k8s.io/client-go/tools/cache/controller.go:232

    k8s.io/client-go/tools/cache.(*processorListener).run.func1()

    /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:911 +0x134

    k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x0?)

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:157 +0x3e

    k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0xc003329f38?, {0x476a2a0, 0xc002d84c00}, 0x1, 0xc0

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:158 +0xb6

    k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x65726f632e676e69?, 0x3b9aca00, 0x0, 0x71?, 0xc0033

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:135 +0x89

    k8s.io/apimachinery/pkg/util/wait.Until(...)

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:92

    k8s.io/client-go/tools/cache.(*processorListener).run(0xc0003a5d80?)

    /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:905 +0x6b

    k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1()

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:75 +0x5a

    created by k8s.io/apimachinery/pkg/util/wait.(*Group).Start

    /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:73 +0x85

    能不能帮忙看看,因为这个东西是开放的,不太确定是否因为同事进行了那种操作导致的.现在无法启动ks-apiserver

    inksnw

    kubectl get helmrepos.application.kubesphere.io

    NAME NAME WORKSPACE URL STATE AGE

    repo-17114xo13qyk4y charts.bitnami.com xrkj-workspace https://charts.bitnami.com/bitnami failed 323d

    repo-3k11vx00kk8w4z mysql.github.io base-workspace https://mysql.github.io/mysql-operator successful 283d

    repo-41q15v9l948y9r helm.elastic.co xrkj-workspace https://helm.elastic.co successful 322d

    repo-44xlnmzyq6zm9r helm.goharbor.io xrkj-workspace https://helm.goharbor.io successful 303d

    repo-5y225jy5lnon6j charts.gitlab.io xrkj-workspace http://charts.gitlab.io failed 323d

    repo-9m7r8yk81lon6j sonatype.github.io xrkj-workspace https://sonatype.github.io/helm3-charts successful 323d

    repo-jkmx23jn3j8w4y mysql.github.io xrkj-workspace https://mysql.github.io/mysql-operator successful 283d

    repo-mowwwrm863v96l harbor.us168168.com xrkj-workspace https://harbor.us168168.com/chartrepo/library failed 322d

    repo-mpolxyrpj9v96l mirror.azure.cn/kubernetes/charts xrkj-workspace http://mirror.azure.cn/kubernetes/charts successful 323d

    repo-mqxl06roo5vy93 charts.bitnami.com sugn-workspace https://charts.bitnami.com/bitnami successful 258d

    repo-r73wv5j99z0k7k a-nexus.wetok168.com xrkj-workspace https://a-nexus.wetok168.com/repository/helm-hosted successful 289d

    只需要删除bitnami就可以嘛

      inksnw

      谢谢大哥,可以了,是否解释一下为什么呢?是否以后添加bitnami还是会出现这种情况呢

        bitnami源里有脏数据, 一个chart包光有元信息,没有实际内容, 导致出错了
        后续 bitnami源把数据修正, 或者ks出一个补丁跳过这种异常数据

          suguangnan
          可以尝试再添加一下, 我新加bitnami源没有复现, 可能是他某次加了个空包, 然后你们的环境就缓存了这个空包, 后面他又修复删除了

          [root@node1 ~]# k logs -f ks-apiserver-5c59bc47c8-zgtt9

          W0808 20:45:35.135585 1 client_config.go:618] Neither –kubeconfig nor –master was specified. Using the inClusterConfig. This might not work.

          W0808 20:45:35.137281 1 client_config.go:618] Neither –kubeconfig nor –master was specified. Using the inClusterConfig. This might not work.

          W0808 20:45:35.148741 1 cache.go:64] In-memory cache will be used, this may cause data inconsistencies when running with multiple replicas.

          I0808 20:45:35.148908 1 interface.go:50] start helm repo informer

          I0808 20:45:35.541494 1 apiserver.go:428] Start cache objects

          E0808 20:45:39.932750 1 runtime.go:79] Observed a panic: “invalid memory address or nil pointer dereference” (runtime error: invalid memory address or nil pointer dereference)

          goroutine 1270 [running]:

          k8s.io/apimachinery/pkg/util/runtime.logPanic({0×3919e80?, 0×6c18e40})

              /workspace/vendor/k8s.io/apimachinery/pkg/util/runtime/runtime.go:75 +0x99

          k8s.io/apimachinery/pkg/util/runtime.HandleCrash({0×0, 0×0, 0xc000d9a780?})

              /workspace/vendor/k8s.io/apimachinery/pkg/util/runtime/runtime.go:49 +0x75

          panic({0×3919e80, 0×6c18e40})

              /usr/local/go/src/runtime/panic.go:884 +0x212

          kubesphere.io/kubesphere/pkg/simple/client/openpitrix/helmrepoindex.HelmVersionWrapper.GetName(…)

              /workspace/pkg/simple/client/openpitrix/helmrepoindex/load_package.go:48

          kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).addRepo(0xc000d9a780, 0xc00324e208, 0×3?)

              /workspace/pkg/utils/reposcache/repo_cahes.go:272 +0xbcb

          kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).AddRepo(0xc000d9a780, 0xc000558720?)

              /workspace/pkg/utils/reposcache/repo_cahes.go:194 +0x7c

          kubesphere.io/kubesphere/pkg/models/openpitrix.NewOpenpitrixOperator.func1({0×3eebf00?, 0xc00324e208?})

              /workspace/pkg/models/openpitrix/interface.go:56 +0x3b

          k8s.io/client-go/tools/cache.ResourceEventHandlerFuncs.OnAdd(…)

              /workspace/vendor/k8s.io/client-go/tools/cache/controller.go:232

          k8s.io/client-go/tools/cache.(*processorListener).run.func1()

              /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:911 +0x134

          k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0×0?)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:157 +0x3e

          k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0xc001185738?, {0×476a2a0, 0xc003564390}, 0×1, 0xc000d57140)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:158 +0xb6

          k8s.io/apimachinery/pkg/util/wait.JitterUntil(0xc000080028?, 0×3b9aca00, 0×0, 0xc0?, 0xc001185788?)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:135 +0x89

          k8s.io/apimachinery/pkg/util/wait.Until(…)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:92

          k8s.io/client-go/tools/cache.(*processorListener).run(0xc0010aab80?)

              /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:905 +0x6b

          k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1()

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:75 +0x5a

          created by k8s.io/apimachinery/pkg/util/wait.(*Group).Start

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:73 +0x85

          panic: runtime error: invalid memory address or nil pointer dereference [recovered]

              panic: runtime error: invalid memory address or nil pointer dereference

          [signal SIGSEGV: segmentation violation code=0×1 addr=0×0 pc=0×21ea58b]

          goroutine 1270 [running]:

          k8s.io/apimachinery/pkg/util/runtime.HandleCrash({0×0, 0×0, 0xc000d9a780?})

              /workspace/vendor/k8s.io/apimachinery/pkg/util/runtime/runtime.go:56 +0xd7

          panic({0×3919e80, 0×6c18e40})

              /usr/local/go/src/runtime/panic.go:884 +0x212

          kubesphere.io/kubesphere/pkg/simple/client/openpitrix/helmrepoindex.HelmVersionWrapper.GetName(…)

              /workspace/pkg/simple/client/openpitrix/helmrepoindex/load_package.go:48

          kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).addRepo(0xc000d9a780, 0xc00324e208, 0×3?)

              /workspace/pkg/utils/reposcache/repo_cahes.go:272 +0xbcb

          kubesphere.io/kubesphere/pkg/utils/reposcache.(*cachedRepos).AddRepo(0xc000d9a780, 0xc000558720?)

              /workspace/pkg/utils/reposcache/repo_cahes.go:194 +0x7c

          kubesphere.io/kubesphere/pkg/models/openpitrix.NewOpenpitrixOperator.func1({0×3eebf00?, 0xc00324e208?})

              /workspace/pkg/models/openpitrix/interface.go:56 +0x3b

          k8s.io/client-go/tools/cache.ResourceEventHandlerFuncs.OnAdd(…)

              /workspace/vendor/k8s.io/client-go/tools/cache/controller.go:232

          k8s.io/client-go/tools/cache.(*processorListener).run.func1()

              /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:911 +0x134

          k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0×0?)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:157 +0x3e

          k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0xc001185738?, {0×476a2a0, 0xc003564390}, 0×1, 0xc000d57140)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:158 +0xb6

          k8s.io/apimachinery/pkg/util/wait.JitterUntil(0xc000080028?, 0×3b9aca00, 0×0, 0xc0?, 0xc001185788?)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:135 +0x89

          k8s.io/apimachinery/pkg/util/wait.Until(…)

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:92

          k8s.io/client-go/tools/cache.(*processorListener).run(0xc0010aab80?)

              /workspace/vendor/k8s.io/client-go/tools/cache/shared_informer.go:905 +0x6b

          k8s.io/apimachinery/pkg/util/wait.(*Group).Start.func1()

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:75 +0x5a

          created by k8s.io/apimachinery/pkg/util/wait.(*Group).Start

              /workspace/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:73 +0x85

          能帮忙看下我这个是因为什么吗

          删除bitnami 好了,这是什么问题呀