# master1 route
Destination     Gateway         Genmask         Flags Metric Ref    Use Iface
default         gateway         0.0.0.0         UG    100    0        0 ens192
10.10.16.0      0.0.0.0         255.255.254.0   U     100    0        0 ens192
10.233.90.0     node1.alpha.pm. 255.255.255.0   UG    0      0        0 tunl0
10.233.92.0     node3.alpha.pm. 255.255.255.0   UG    0      0        0 tunl0
10.233.96.0     node2.alpha.pm. 255.255.255.0   UG    0      0        0 tunl0
10.233.97.0     0.0.0.0         255.255.255.0   U     0      0        0 *
10.233.97.1     0.0.0.0         255.255.255.255 UH    0      0        0 calida4d89a3346
10.233.97.2     0.0.0.0         255.255.255.255 UH    0      0        0 calide035c655d8
10.233.97.3     0.0.0.0         255.255.255.255 UH    0      0        0 calid862eebfde5
10.233.97.4     0.0.0.0         255.255.255.255 UH    0      0        0 cali7915c27a513
10.233.97.5     0.0.0.0         255.255.255.255 UH    0      0        0 calieaa8c1d61b3
10.233.97.6     0.0.0.0         255.255.255.255 UH    0      0        0 calid3919276094
10.233.97.7     0.0.0.0         255.255.255.255 UH    0      0        0 cali41ce76524d3
10.233.98.0     master2.alpha.p 255.255.255.0   UG    0      0        0 tunl0
10.233.99.0     master3.alpha.p 255.255.255.0   UG    0      0        0 tunl0
10.233.105.0    node4.alpha.pm. 255.255.255.0   UG    0      0        0 tunl0
192.168.100.0   0.0.0.0         255.255.255.0   U     0      0        0 docker0

你eip配置是? 另外kubectl get nodes node4 -o yaml看下?

apiVersion: network.kubesphere.io/v1alpha2
kind: Eip
metadata:
  annotations:
    kubectl.kubernetes.io/last-applied-configuration: |
      {"apiVersion":"network.kubesphere.io/v1alpha2","kind":"Eip","metadata":{"annotations":{},"labels":{"argocd.argoproj.io/instance":"alpha-porter"},"name":"eip-pool"},"spec":{"address":"10.10.17.81-10.10.17.88","disable":false,"interface":"docker0","protocol":"layer2"}}
  creationTimestamp: "2021-04-19T02:31:10Z"
  finalizers:
  - finalizer.ipam.kubesphere.io/v1alpha1
  generation: 3
  labels:
    argocd.argoproj.io/instance: alpha-porter
  name: eip-pool
  resourceVersion: "18199"
  selfLink: /apis/network.kubesphere.io/v1alpha2/eips/eip-pool
  uid: 129ef10a-4475-441d-b927-d70debf17c53
spec:
  address: 10.10.17.81-10.10.17.88
  disable: false
  interface: docker0
  protocol: layer2
status:
  firstIP: 10.10.17.81
  lastIP: 10.10.17.88
  poolSize: 8
  ready: true
  usage: 2
  used:
    10.10.17.81: argocd/argocd-server
    10.10.17.82: default/ingress-nginx-controller
  v4: true
# node4
apiVersion: v1
kind: Node
metadata:
  annotations:
    kubeadm.alpha.kubernetes.io/cri-socket: /var/run/dockershim.sock
    node.alpha.kubernetes.io/ttl: "0"
    projectcalico.org/IPv4Address: 10.10.17.89/23
    projectcalico.org/IPv4IPIPTunnelAddr: 10.233.105.0
    volumes.kubernetes.io/controller-managed-attach-detach: "true"
  creationTimestamp: "2021-04-19T02:03:56Z"
  labels:
    beta.kubernetes.io/arch: amd64
    beta.kubernetes.io/os: linux
    kubernetes.io/arch: amd64
    kubernetes.io/hostname: node4
    kubernetes.io/os: linux
    node-role.kubernetes.io/worker: ""
  name: node4
  resourceVersion: "82320"
  selfLink: /api/v1/nodes/node4
  uid: 05969a4a-edae-44ce-8c72-70c5b5c85268
spec:
  podCIDR: 10.233.65.0/24
  podCIDRs:
  - 10.233.65.0/24
status:
  addresses:
  - address: 10.10.17.89
    type: InternalIP
  - address: node4
    type: Hostname
  allocatable:
    cpu: 7600m
    ephemeral-storage: 116335104Ki
    hugepages-1Gi: "0"
    hugepages-2Mi: "0"
    memory: "15281365595"
    pods: "110"
  capacity:
    cpu: "8"
    ephemeral-storage: 116335104Ki
    hugepages-1Gi: "0"
    hugepages-2Mi: "0"
    memory: 16247588Ki
    pods: "110"
  conditions:
  - lastHeartbeatTime: "2021-04-19T02:04:01Z"
    lastTransitionTime: "2021-04-19T02:04:01Z"
    message: Calico is running on this node
    reason: CalicoIsUp
    status: "False"
    type: NetworkUnavailable
  - lastHeartbeatTime: "2021-04-19T06:10:21Z"
    lastTransitionTime: "2021-04-19T02:03:56Z"
    message: kubelet has sufficient memory available
    reason: KubeletHasSufficientMemory
    status: "False"
    type: MemoryPressure
  - lastHeartbeatTime: "2021-04-19T06:10:21Z"
    lastTransitionTime: "2021-04-19T02:03:56Z"
    message: kubelet has no disk pressure
    reason: KubeletHasNoDiskPressure
    status: "False"
    type: DiskPressure
  - lastHeartbeatTime: "2021-04-19T06:10:21Z"
    lastTransitionTime: "2021-04-19T02:03:56Z"
    message: kubelet has sufficient PID available
    reason: KubeletHasSufficientPID
    status: "False"
    type: PIDPressure
  - lastHeartbeatTime: "2021-04-19T06:10:21Z"
    lastTransitionTime: "2021-04-19T02:04:07Z"
    message: kubelet is posting ready status
    reason: KubeletReady
    status: "True"
    type: Ready
  daemonEndpoints:
    kubeletEndpoint:
      Port: 10250
  images:
  - names:
    - argoproj/argocd@sha256:439393f0f7de91e8db45e99d1f0f2426b3c4c0081077e4b7d4fc9736b147f851
    - argoproj/argocd:v1.8.4
    sizeBytes: 1384936753
  - names:
    - argoproj/argocd@sha256:8d1d58ef963f615da97e0b2c54dbe243801d5e7198b98393ab36b7a5768f72a4
    - argoproj/argocd:v2.0.0
    sizeBytes: 1015151912
  - names:
    - openpitrix/openpitrix@sha256:a31a465c8b4d37b1c35bec6b4311c72ba4f1909d489a5943837bf1cb675e9033
    - openpitrix/openpitrix:v0.5.0
    sizeBytes: 966702051
  - names:
    - artifactory.gz.cvte.cn/docker-registry/infra/cod@sha256:f6c466a21d1dbf74ca62edbd8d51ad0d3639894c7fd30b2adb6f6fcb050f28fc
    - artifactory.gz.cvte.cn/docker-registry/infra/cod:master-cb3c46ee
    sizeBytes: 924637279
  - names:
    - kubesphere/elasticsearch-oss@sha256:795da1ec4a08fd30048471f7a731700c0781234970ff5d2be64029244c9288d3
    - kubesphere/elasticsearch-oss:6.7.0-1
    sizeBytes: 702164843
  - names:
    - kubesphere/jenkins-uc@sha256:6de69cb15a7d086cf4658409cda4be9e630bd30319e0ad127e5b4a8f8892c49c
    - kubesphere/jenkins-uc:v3.0.0
    sizeBytes: 479963409
  - names:
    - plantuml/plantuml-server@sha256:968eec27c9735338421c4585f5f30848a48d225aa16b4b6424af1a48356e5c13
    - plantuml/plantuml-server:jetty
    sizeBytes: 454573184
  - names:
    - k8s.gcr.io/ingress-nginx/controller@sha256:c4390c53f348c3bd4e60a5dd6a11c35799ae78c49388090140b9d72ccede1755
    sizeBytes: 281553881
  - names:
    - k8s.gcr.io/ingress-nginx/controller@sha256:3dd0fac48073beaca2d67a78c746c7593f9c575168a17139a9955a82c63c4b9a
    sizeBytes: 278507478
  - names:
    - calico/node@sha256:b386769a293d180cb6ee208c8594030128a0810b286a93ae897a231ef247afa8
    - calico/node:v3.15.1
    sizeBytes: 262412866
  - names:
    - istio/proxyv2@sha256:8a3cc699548bc29df1dbe42c642bc0736471a1cddfe5daa47d6a6cd454dff3f4
    - istio/proxyv2:1.4.8
    sizeBytes: 248297850
  - names:
    - docker@sha256:38ba04fa7a7194f1b95a082c204dc888ebc938007ac10343c623ee5db0f97cad
    - docker:19.03
    sizeBytes: 219956128
  - names:
    - calico/cni@sha256:b86711626e68a5298542efc52e2bd3c64e212a635359b3a017ee0a8cd47b0c1e
    - calico/cni:v3.15.1
    sizeBytes: 217137499
  - names:
    - istio/kubectl@sha256:4042114a643b009f5e42e60a5e8b89d80eece29d910bbb7e1092fe81f22e6011
    - istio/kubectl:1.4.8
    sizeBytes: 216637982
  - names:
    - istio/galley@sha256:9c6adde8936c8250c521204599afcdf4e2709a100ef9eae697b113b25289bfc0
    - istio/galley:1.4.8
    sizeBytes: 210356921
  - names:
    - istio/pilot@sha256:dc540d7c847cb65eaf54ad79ba3d3c9f0023e73bb7c39bd0cb95a85d63fc0259
    - istio/pilot:1.4.8
    sizeBytes: 206358021
  - names:
    - istio/sidecar_injector@sha256:1856ccd98a7f90f03ca07c359e48c84e79b600fd05c9c77bfa74b1b20decc853
    - istio/sidecar_injector:1.4.8
    sizeBytes: 194175320
  - names:
    - istio/citadel@sha256:41dd53ab67fe3984a512948b31066f8d779ae55642604581428930482be35a6a
    - istio/citadel:1.4.8
    sizeBytes: 185868819
  - names:
    - jaegertracing/jaeger-es-index-cleaner@sha256:96d06d7d278e01893fd18a37b01817c07d0b28e4b8bb3b429d148349ea2829b4
    - jaegertracing/jaeger-es-index-cleaner:1.17.1
    sizeBytes: 176939817
  - names:
    - kubesphere/alerting-dbinit@sha256:66ae519e5151d68d4a8a04c788b61644f21245136a68b3c235d5abd021adf3e3
    - kubesphere/alerting-dbinit:v3.0.0
    sizeBytes: 156779806
  - names:
    - openpitrix/openpitrix@sha256:770ec1cdc4626d8b22ae7191d1a1f21a979018e92c27ab66b7821eb3e3bec1b3
    - openpitrix/openpitrix:flyway-v0.5.0
    sizeBytes: 149139847
  - names:
    - prom/prometheus@sha256:788260ebd13613456c168d2eed8290f119f2b6301af2507ff65908d979c66c17
    - prom/prometheus:v2.20.1
    sizeBytes: 144887258
  - names:
    - kubesphere/ks-apiserver@sha256:dfdeb8a1e4e496a930cc827a153ecfc1151d62525ac76b4534867d91af6b7a38
    - kubesphere/ks-apiserver:v3.0.0
    sizeBytes: 120096156
  - names:
    - kubesphere/kube-proxy@sha256:f236d990c26b46044722a575d5c05ff1d7bd9a6d8ea5d4f92c6fa29878abb9e3
    - kubesphere/kube-proxy:v1.18.8
    sizeBytes: 117264685
  - names:
    - calico/pod2daemon-flexvol@sha256:c2c6bbe3e10d24a01d6f3fd5b446cce6cf3e37f943960263bf6e5c458ecdeb52
    - calico/pod2daemon-flexvol:v3.15.1
    sizeBytes: 112413732
  - names:
    - kubesphere/k8s-dns-node-cache@sha256:3b55377cd3b8098a79dc3f276cc542a681e3f2b71554addac9a603cc65e4829e
    - kubesphere/k8s-dns-node-cache:1.15.12
    sizeBytes: 106950843
  - names:
    - rabbitmq@sha256:7e0b62800e5e086a3b285b5d9c485f12417f1f626a758cbfe922b36cea797209
    - rabbitmq:3.8.1-alpine
    sizeBytes: 102854689
  - names:
    - kubesphere/alerting@sha256:e09bf9b5b8ea7ae1acacd551c650d0f01f60fc502957d219f423c9a49f144ade
    - kubesphere/alerting:v2.1.2
    sizeBytes: 101797272
  - names:
    - kubesphere/ks-console@sha256:f31a7fd617e47aaf912ddf1528c9e777c132f4ca2ac965e07b52d5f93780d3be
    - kubesphere/ks-console:v3.0.0
    sizeBytes: 95527402
  - names:
    - kubesphere/kube-events-operator@sha256:6eeb75c24c40701fec516bc689d7ffdc266b5ca676805c14146ee0c540cd1fba
    - kubesphere/kube-events-operator:v0.1.0
    sizeBytes: 91102914
  - names:
    - haproxy@sha256:cd4b3d4d27ae5931dc96b9632188590b7a6880469bcf07f478a3280dd0955336
    - haproxy:2.0.4
    sizeBytes: 91090911
  - names:
    - kubesphere/elasticsearch-curator@sha256:0fdc68b2a211f753238f9d54734b331141a9ade5bf31eef801ea0d056c9ab1c1
    - kubesphere/elasticsearch-curator:v5.7.6
    sizeBytes: 87443900
  - names:
    - kubespheredev/porter@sha256:2d5aa6cbec508200cd63a6e7a63536bd2b55dd0d0463c3754f6af8af4b67f716
    - kubespheredev/porter:v0.4.1
    sizeBytes: 83125979
  - names:
    - kubesphere/ks-controller-manager@sha256:1ae2f2880d254aa0339b18598eb946d397cac62941a4022232dab2a148bdacdb
    - kubesphere/ks-controller-manager:v3.0.0
    sizeBytes: 82028596
  - names:
    - istio/mixer@sha256:faeb4f2e591ddc89d74d520cb041256c25ff8efbf6eead1f4f36f3233c2bf228
    - istio/mixer:1.4.8
    sizeBytes: 80468615
  - names:
    - kubesphere/fluent-bit@sha256:1007b7cb7090435bf5b5d04f07cf6982d841597218fb67e291b2606c4e25b3e2
    - kubesphere/fluent-bit:v1.4.6
    sizeBytes: 71817557
  - names:
    - kubesphere/kube-auditing-operator@sha256:79779ad9da1b581d39c212cbcf2fa6401420149685371d5747f397dd3ab5cf7b
    - kubesphere/kube-auditing-operator:v0.1.0
    sizeBytes: 66671459
  - names:
    - kubesphere/alert-adapter@sha256:f6fa855a0675b638da08312409ead5138528beb11b630e3769835409ff500eba
    - kubesphere/alert-adapter:v3.0.0
    sizeBytes: 66503618
  - names:
    - minio/minio@sha256:cdc1ad6ee7b16b35a8c26c00e7bed0482f21bcd8ce841813fb91600f7d7f2faf
    - minio/minio:RELEASE.2019-08-07T01-59-21Z
    sizeBytes: 61341765
  - names:
    - kubesphere/notification@sha256:a954bf161a0861539fca9081a426cacd1f3e23eb55e7e33847d230786c78fa3d
    - kubesphere/notification:v2.1.2
    sizeBytes: 59315142
  - names:
    - prom/alertmanager@sha256:24a5204b418e8fa0214cfb628486749003b039c279c56b5bddb5b10cd100d926
    - prom/alertmanager:v0.21.0
    sizeBytes: 55508800
  - names:
    - kubesphere/metrics-server@sha256:c0efe772bb9e5c289db6cc4bc2002c268507d0226f2a3815f7213e00261c38e9
    - kubesphere/metrics-server:v0.3.7
    sizeBytes: 55367782
  - names:
    - calico/kube-controllers@sha256:f93dfb2c74e470f749165e0506f780cadf517615acb694286e034cb74650dbe6
    - calico/kube-controllers:v3.15.1
    sizeBytes: 53093019
  - names:
    - kubesphere/log-sidecar-injector@sha256:bc8575a5a0c187aa25b2c5c5557aa088891efec49de6dabe5841c4419a5acf56
    - kubesphere/log-sidecar-injector:1.1
    sizeBytes: 51649715
  - names:
    - kubesphere/notification-manager@sha256:f2db8c527b96f316353a72939b9a6815a88c07a4ffde5cbda5d26061d0f65ea1
    - kubesphere/notification-manager:v0.1.0
    sizeBytes: 47454851
  - names:
    - quay.io/external_storage/nfs-client-provisioner@sha256:cdbccbf53d100b36eae744c1cb07be3d0d22a8e64bb038b7a3808dd29c174661
    - quay.io/external_storage/nfs-client-provisioner:v3.1.0-k8s1.11
    sizeBytes: 45478559
  - names:
    - jettech/kube-webhook-certgen@sha256:950833e19ade18cd389d647efb88992a7cc077abedef343fa59e012d376d79b7
    - jettech/kube-webhook-certgen:v1.5.1
    sizeBytes: 44692736
  - names:
    - jettech/kube-webhook-certgen@sha256:fb7c2cd46ccfdd2809165152ec3f2cd0ae1dfc74cc74671b1dea73f034a87acf
    - jettech/kube-webhook-certgen:v1.5.0
    sizeBytes: 44681334
  - names:
    - kubesphere/notification-manager-operator@sha256:55b54c19a4b880602354b1b28690e376ad9e7daf11e0b3ec6fce342f4497f864
    - kubesphere/notification-manager-operator:v0.1.0
    sizeBytes: 44438571
  - names:
    - jaegertracing/jaeger-query@sha256:98fe9322ba7473e5aa89f50b47d15c1ff076d3ab323e714dc7fbc5b2e416719d
    - jaegertracing/jaeger-query:1.17
    sizeBytes: 44007709
  nodeInfo:
    architecture: amd64
    bootID: 2844ff06-1a8c-437a-84b5-a6e3e894bef1
    containerRuntimeVersion: docker://20.10.5
    kernelVersion: 3.10.0-1160.21.1.el7.x86_64
    kubeProxyVersion: v1.18.8
    kubeletVersion: v1.18.8
    machineID: c0b5da70f53b43578ba788f5ef22d3f3
    operatingSystem: linux
    osImage: CentOS Linux 7 (Core)
    systemUUID: 72713842-4B39-45CE-65CA-AF25BD480308

你eip里的接口配置错了, 你再看看文档吧, 应该是你真实的物理网卡, 例如ens***

  • pyfs 回复了此帖

    duanjiong

    https://porterlb.io/docs/getting-started/configuration/configure-ip-address-pools-using-eip/

    helm 安装 porter ,然后参考官方文档测试;

    apiVersion: network.kubesphere.io/v1alpha2
    kind: Eip
    metadata:
        name: porter-layer2-eip
    spec:
        address: 10.10.15.171-10.10.15.178
        protocol: layer2
        interface: ens192
        disable: false
    apiVersion: apps/v1
    kind: Deployment
    metadata:
      name: porter-layer2
    spec:
      replicas: 2
      selector:
        matchLabels:
          app: porter-layer2
      template:
        metadata:
          labels:
            app: porter-layer2
        spec:
          containers:
            - image: luksa/kubia
              name: kubia
              ports:
                - containerPort: 8080
    kind: Service
    apiVersion: v1
    metadata:
      name: porter-layer2-svc
      annotations:
        lb.kubesphere.io/v1alpha1: porter
        protocol.porter.kubesphere.io/v1alpha1: layer2
        eip.porter.kubesphere.io/v1alpha2: porter-layer2-eip
    spec:
      selector:
        app: porter-layer2
      type: LoadBalancer
      ports:
        - name: http
          port: 80
          targetPort: 8080
      externalTrafficPolicy: Cluster

    目前导出的 LB 地址为: 10.10.15.171

    在主机节点访问正常: curl 10.10.15.171

    但在集群外访问就一直超时: curl: (7) Failed to connect to 10.10.15.171 port 80: Operation timed out

    你k8s集群所在网络环境对arp有限制? 如果没有的话, 你自己先分析下收到的和发送的arp报文

    • pyfs 回复了此帖

      duanjiong

      尴尬啦,我了解一些 k8s,但不懂网络;懂网络的同事又不懂 k8s

      难搞!

        问题已经解决;

        docker0 桥网段与本地网络地址有冲突导致的;

        kubekey 创建集群时,默认会把 docker daemon 的配置给恢复;导致这个尴尬的问题;