创建部署问题时,请参考下面模板,你提供的信息越多,越容易及时获得解答。如果未按模板创建问题,管理员有权关闭问题。
确保帖子格式清晰易读,用 markdown code block 语法格式化代码块。
你只花一分钟创建的问题,不能指望别人花上半个小时给你解答。
操作系统信息
Exsi8虚拟机 4U8G200G*3 12U32G500G*3 ,haproxy 2U4G50G
Kubernetes版本信息
clientVersion:
buildDate: “2022-12-08T19:58:30Z”
compiler: gc
gitCommit: b46a3f887ca979b1a5d14fd39cb1af43e7e5d12d
gitTreeState: clean
gitVersion: v1.26.0
goVersion: go1.19.4
major: “1”
minor: “26”
platform: linux/amd64
kustomizeVersion: v4.5.7
serverVersion:
buildDate: “2022-12-08T19:51:45Z”
compiler: gc
gitCommit: b46a3f887ca979b1a5d14fd39cb1af43e7e5d12d
gitTreeState: clean
gitVersion: v1.26.0
goVersion: go1.19.4
major: “1”
minor: “26”
platform: linux/amd64
容器运行时
Version: 0.1.0
RuntimeName: containerd
RuntimeVersion: v1.6.4
RuntimeApiVersion: v1
KubeSphere版本信息
kk create config –with-kubernetes v1.26.0 –with-kubesphere v3.3.2
问题是什么
动态编辑或部署是修改,devops为true,都报同一个错,结束后devops数量0,没有pod运行
Task ‘devops’ failed:
******************************************************************************************************************************************************
{
“counter”: 145,
“created”: “2023-04-05T18:59:16.612657”,
“end_line”: 174,
“event”: “runner_on_failed”,
“event_data”: {
"duration": 0.347949,
"end": "2023-04-05T18:59:16.612550",
"event_loop": null,
"host": "localhost",
"ignore_errors": null,
"play": "localhost",
"play_pattern": "localhost",
"play_uuid": "46cf8a32-51ba-dd35-e563-000000000005",
"playbook": "/kubesphere/playbooks/devops.yaml",
"playbook_uuid": "227602bc-8f4b-4f9b-80c4-996fd79664ff",
"remote_addr": "127.0.0.1",
"res": {
"changed": true,
"msg": "All items completed",
"results": [
{
"_ansible_item_label": "kubesphere-devops-worker",
"_ansible_no_log": false,
"ansible_loop_var": "item",
"changed": true,
"cmd": "/usr/local/bin/kubectl label --overwrite namespaces kubesphere-devops-worker kubesphere.io/workspace=system-workspace\\n",
"delta": "0:00:00.057229",
"end": "2023-04-06 02:59:16.590716",
"failed": true,
"invocation": {
"module_args": {
"_raw_params": "/usr/local/bin/kubectl label --overwrite namespaces kubesphere-devops-worker kubesphere.io/workspace=system-workspace\\n",
"_uses_shell": true,
"argv": null,
"chdir": null,
"creates": null,
"executable": null,
"removes": null,
"stdin": null,
"stdin_add_newline": true,
"strip_empty_ends": true,
"warn": true
}
},
"item": "kubesphere-devops-worker",
"msg": "non-zero return code",
"rc": 1,
"start": "2023-04-06 02:59:16.533487",
"stderr": "Error from server (NotFound): namespaces \\"kubesphere-devops-worker\\" not found",
"stderr_lines": [
"Error from server (NotFound): namespaces \\"kubesphere-devops-worker\\" not found"
],
"stdout": "",
"stdout_lines": []
}
]
},
"resolved_action": "shell",
"role": "ks-devops",
"start": "2023-04-05T18:59:16.264601",
"task": "ks-devops | Labelling devops namespaces as system workspace",
"task_action": "shell",
"task_args": "",
"task_path": "/kubesphere/installer/roles/ks-devops/tasks/main.yaml:232",
"task_uuid": "46cf8a32-51ba-dd35-e563-000000000043",
"uuid": "88af66a5-4189-4bd0-8a30-4d3e68df32d6"
},
“parent_uuid”: “46cf8a32-51ba-dd35-e563-000000000043”,
“pid”: 8830,
“runner_ident”: “devops”,
“start_line”: 174,
“stdout”: "",
“uuid”: “88af66a5-4189-4bd0-8a30-4d3e68df32d6”
}
******************************************************************************************************************************************************
Task ‘monitoring’ failed:
******************************************************************************************************************************************************
{
“counter”: 117,
“created”: “2023-04-05T19:00:35.934520”,
“end_line”: 112,
“event”: “runner_on_failed”,
“event_data”: {
"duration": 36.470051,
"end": "2023-04-05T19:00:35.934411",
"event_loop": null,
"host": "localhost",
"ignore_errors": null,
"play": "localhost",
"play_pattern": "localhost",
"play_uuid": "46cf8a32-51ba-6638-115b-000000000005",
"playbook": "/kubesphere/playbooks/monitoring.yaml",
"playbook_uuid": "208dee20-7e26-45fa-8f28-1326807b4b30",
"remote_addr": "127.0.0.1",
"res": {
"changed": true,
"msg": "All items completed",
"results": [
{
"_ansible_item_label": "prometheus",
"_ansible_no_log": false,
"ansible_loop_var": "item",
"attempts": 5,
"changed": true,
"cmd": "/usr/local/bin/kubectl apply -f /kubesphere/kubesphere/prometheus/prometheus",
"delta": "0:00:00.252329",
"end": "2023-04-06 03:00:18.054789",
"failed": true,
"failed_when_result": true,
"invocation": {
"module_args": {
"_raw_params": "/usr/local/bin/kubectl apply -f /kubesphere/kubesphere/prometheus/prometheus",
"_uses_shell": true,
"argv": null,
"chdir": null,
"creates": null,
"executable": null,
"removes": null,
"stdin": null,
"stdin_add_newline": true,
"strip_empty_ends": true,
"warn": true
}
},
"item": "prometheus",
"msg": "non-zero return code",
"rc": 1,
"start": "2023-04-06 03:00:17.802460",
"stderr": "error: unable to recognize \\"/kubesphere/kubesphere/prometheus/prometheus/prometheus-podDisruptionBudget.yaml\\": no matches for kind \\"PodDisruptionBudget\\" in version \\"policy/v1beta1\\"",
"stderr_lines": [
"error: unable to recognize \\"/kubesphere/kubesphere/prometheus/prometheus/prometheus-podDisruptionBudget.yaml\\": no matches for kind \\"PodDisruptionBudget\\" in version \\"policy/v1beta1\\""
],
"stdout": "clusterrole.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged\\nclusterrolebinding.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged\\nprometheus.monitoring.coreos.com/k8s unchanged\\nprometheusrule.monitoring.coreos.com/prometheus-k8s-prometheus-rules unchanged\\nrolebinding.rbac.authorization.k8s.io/prometheus-k8s-config unchanged\\nrole.rbac.authorization.k8s.io/prometheus-k8s-config unchanged\\nservice/prometheus-k8s unchanged\\nserviceaccount/prometheus-k8s unchanged\\nservicemonitor.monitoring.coreos.com/prometheus-k8s unchanged",
"stdout_lines": [
"clusterrole.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged",
"clusterrolebinding.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged",
"prometheus.monitoring.coreos.com/k8s unchanged",
"prometheusrule.monitoring.coreos.com/prometheus-k8s-prometheus-rules unchanged",
"rolebinding.rbac.authorization.k8s.io/prometheus-k8s-config unchanged",
"role.rbac.authorization.k8s.io/prometheus-k8s-config unchanged",
"service/prometheus-k8s unchanged",
"serviceaccount/prometheus-k8s unchanged",
"servicemonitor.monitoring.coreos.com/prometheus-k8s unchanged"
]
},
{
"_ansible_item_label": "prometheus",
"_ansible_no_log": false,
"ansible_loop_var": "item",
"attempts": 5,
"changed": true,
"cmd": "/usr/local/bin/kubectl apply -f /kubesphere/kubesphere/prometheus/prometheus",
"delta": "0:00:00.251284",
"end": "2023-04-06 03:00:35.908656",
"failed": true,
"failed_when_result": true,
"invocation": {
"module_args": {
"_raw_params": "/usr/local/bin/kubectl apply -f /kubesphere/kubesphere/prometheus/prometheus",
"_uses_shell": true,
"argv": null,
"chdir": null,
"creates": null,
"executable": null,
"removes": null,
"stdin": null,
"stdin_add_newline": true,
"strip_empty_ends": true,
"warn": true
}
},
"item": "prometheus",
"msg": "non-zero return code",
"rc": 1,
"start": "2023-04-06 03:00:35.657372",
"stderr": "error: unable to recognize \\"/kubesphere/kubesphere/prometheus/prometheus/prometheus-podDisruptionBudget.yaml\\": no matches for kind \\"PodDisruptionBudget\\" in version \\"policy/v1beta1\\"",
"stderr_lines": [
"error: unable to recognize \\"/kubesphere/kubesphere/prometheus/prometheus/prometheus-podDisruptionBudget.yaml\\": no matches for kind \\"PodDisruptionBudget\\" in version \\"policy/v1beta1\\""
],
"stdout": "clusterrole.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged\\nclusterrolebinding.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged\\nprometheus.monitoring.coreos.com/k8s unchanged\\nprometheusrule.monitoring.coreos.com/prometheus-k8s-prometheus-rules unchanged\\nrolebinding.rbac.authorization.k8s.io/prometheus-k8s-config unchanged\\nrole.rbac.authorization.k8s.io/prometheus-k8s-config unchanged\\nservice/prometheus-k8s unchanged\\nserviceaccount/prometheus-k8s unchanged\\nservicemonitor.monitoring.coreos.com/prometheus-k8s unchanged",
"stdout_lines": [
"clusterrole.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged",
"clusterrolebinding.rbac.authorization.k8s.io/kubesphere-prometheus-k8s unchanged",
"prometheus.monitoring.coreos.com/k8s unchanged",
"prometheusrule.monitoring.coreos.com/prometheus-k8s-prometheus-rules unchanged",
"rolebinding.rbac.authorization.k8s.io/prometheus-k8s-config unchanged",
"role.rbac.authorization.k8s.io/prometheus-k8s-config unchanged",
"service/prometheus-k8s unchanged",
"serviceaccount/prometheus-k8s unchanged",
"servicemonitor.monitoring.coreos.com/prometheus-k8s unchanged"
]
}
]
},
"resolved_action": "shell",
"role": "ks-monitor",
"start": "2023-04-05T18:59:59.464360",
"task": "Monitoring | Installing Prometheus",
"task_action": "shell",
"task_args": "",
"task_path": "/kubesphere/installer/roles/ks-monitor/tasks/prometheus.yaml:2",
"task_uuid": "46cf8a32-51ba-6638-115b-000000000042",
"uuid": "623ebccf-7051-4721-90c5-2b1de17ab2c7"
},
“parent_uuid”: “46cf8a32-51ba-6638-115b-000000000042”,
“pid”: 8824,
“runner_ident”: “monitoring”,
“start_line”: 112,
“stdout”: "",
“uuid”: “623ebccf-7051-4721-90c5-2b1de17ab2c7”
}