1. Jenkins 镜像制作

1.1 获取Jenkins jar包

wget https://get.jenkins.io/war-stable/2.346.3/jenkins.war

1.2 配置Dockerfile

#Jenkins Version 2.190.1
FROM harbor.intra.com/pub-images/jdk-base:v8.212

ADD jenkins-2.319.2.war /apps/jenkins/jenkins.war
ADD run_jenkins.sh /usr/bin/


EXPOSE 8080 

CMD ["/usr/bin/run_jenkins.sh"]

run_jenkins.sh

#!/bin/bash
cd /apps/jenkins && java -server -Xms1024m -Xmx1024m -Xss512k -jar jenkins.war --webroot=/apps/jenkins/jenkins-data --httpPort=8080

1.3 Build

#!/bin/bash
docker build -t  harbor.intra.com/wework/jenkins:v2.319.2 .
echo "镜像制作完成,即将上传至Harbor服务器"
sleep 1
docker push harbor.intra.com/wework/jenkins:v2.319.2
echo "镜像上传完成"

1.4 构建Jenkins镜像

root@k8s-master-01:/opt/k8s-data/dockerfile/web/wework/jenkins# ./build-command.sh 
Sending build context to Docker daemon  72.25MB
Step 1/5 : FROM harbor.intra.com/pub-images/jdk-base:v8.212
 ---> 7c67b5ec4ce0
Step 2/5 : ADD jenkins-2.319.2.war /apps/jenkins/jenkins.war
 ---> cf6521e4609c
Step 3/5 : ADD run_jenkins.sh /usr/bin/
 ---> 7739c730bcc5
Step 4/5 : EXPOSE 8080
 ---> Running in 7a54bb8ee1d4
Removing intermediate container 7a54bb8ee1d4
 ---> 021de5204e27
Step 5/5 : CMD ["/usr/bin/run_jenkins.sh"]
 ---> Running in 8c7ccbfb8d50
Removing intermediate container 8c7ccbfb8d50
 ---> a9f3698a4f0c
Successfully built a9f3698a4f0c
Successfully tagged harbor.intra.com/wework/jenkins:v2.319.2
镜像制作完成,即将上传至Harbor服务器
The push refers to repository [harbor.intra.com/wework/jenkins]
12f86e00e26b: Pushed 
c8e99d914e42: Pushed 
aadaa9679cb8: Mounted from wework/tomcat-app1 
fc305a4ba468: Mounted from wework/tomcat-app1 
ab93afc6a659: Mounted from wework/tomcat-app1 
d7f831641e18: Mounted from wework/tomcat-app1 
f4b52134c525: Mounted from wework/tomcat-app1 
0533300cca03: Mounted from wework/tomcat-app1 
30a12549c4a3: Mounted from wework/tomcat-app1 
ce1fb445c72c: Mounted from wework/tomcat-app1 
174f56854903: Mounted from wework/redis 
v2.319.2: digest: sha256:c1a2d3060e8b39f01ca8261bcf8c65f5b10960be11b3c32c22802a72493c60a8 size: 2628
镜像上传完成

1.5 测试Jenkins镜像

运行刚才构建的镜像,简单验证镜像是否可用。

root@k8s-master-01:/opt/k8s-data/dockerfile/web/wework/jenkins# docker run -d --name jeknins -p 8080:8080 harbor.intra.com/wework/jenkins:v2.319.2
febd6864c0d6a994faf6b4ee3e1e2cc803090783f4a4abaa9ad75b06919c0e41
root@k8s-master-01:/opt/k8s-data/dockerfile/web/wework/jenkins# docker ps
CONTAINER ID   IMAGE                                           COMMAND                  CREATED          STATUS          PORTS                    NAMES
febd6864c0d6   harbor.intra.com/wework/jenkins:v2.319.2        "/usr/bin/run_jenkin…"   13 seconds ago   Up 12 seconds   0.0.0.0:8080->8080/tcp   jeknins
d954cd26ea3b   d45bf977dfbf                                    "start_runit"            24 hours ago     Up 24 hours                              k8s_calico-node_calico-node-jd8xr_kube-system_07751b5d-1af5-4d83-ba32-07f96bbef217_11
1447e8503433   harbor.intra.com/baseimages/pause-amd64:3.4.1   "/pause"                 24 hours ago     Up 24 hours                              k8s_POD_calico-node-jd8xr_kube-system_07751b5d-1af5-4d83-ba32-07f96bbef217_9
root@k8s-master-01:/opt/k8s-data/dockerfile/web/wework/jenkins# ss -tnlup|grep 8080
tcp   LISTEN   0        20480              0.0.0.0:8080           0.0.0.0:*      users:(("docker-proxy",pid=40462,fd=4)) 

2. Jenkins k8s部署

需要持久化数据的2个目录:
/root/.jenkins
/apps/jenkins/jenkis-data

2.1 准备PV使用的NFS目录

root@haproxy-1:~# mkdir /data/k8s/wework/jenkins-data
root@haproxy-1:~# mkdir /data/k8s/wework/jenkins-root-data

2.2 创建PV

jenkins-persistentvolume.yaml

---
apiVersion: v1
kind: PersistentVolume
metadata:
  name: jenkins-datadir-pv
  namespace: wework
spec:
  capacity:
    storage: 100Gi
  accessModes:
    - ReadWriteOnce
  nfs:
    server: 192.168.31.109
    path: /data/k8s/wework/jenkins-data 

---
apiVersion: v1
kind: PersistentVolume
metadata:
  name: jenkins-root-datadir-pv
  namespace: wework
spec:
  capacity:
    storage: 100Gi
  accessModes:
    - ReadWriteOnce
  nfs:
    server: 192.168.31.109
    path: /data/k8s/wework/jenkins-root-data

创建pv

root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins/pv# kubectl apply -f jenkins-persistentvolume.yaml 
persistentvolume/jenkins-datadir-pv created
persistentvolume/jenkins-root-datadir-pv created
root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins/pv# kubectl get pv
NAME                      CAPACITY   ACCESS MODES   RECLAIM POLICY   STATUS      CLAIM                            STORAGECLASS   REASON   AGE
jenkins-datadir-pv        100Gi      RWO            Retain           Available                                                            6s
jenkins-root-datadir-pv   100Gi      RWO            Retain           Available                                                            6s

2.3 创建PVC

jenkins-persistentvolumeclaim.yaml

---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
  name: jenkins-datadir-pvc
  namespace: wework
spec:
  volumeName: jenkins-datadir-pv
  accessModes:
    - ReadWriteOnce
  resources:
    requests:
      storage: 80Gi

---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
  name: jenkins-root-data-pvc
  namespace: wework
spec:
  volumeName: jenkins-root-datadir-pv 
  accessModes:
    - ReadWriteOnce
  resources:
    requests:
      storage: 80Gi

创建pvc

root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins/pv# kubectl apply -f jenkins-persistentvolumeclaim.yaml 
persistentvolumeclaim/jenkins-datadir-pvc created
persistentvolumeclaim/jenkins-root-data-pvc created
root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins/pv# kubectl get pvc -n wework
NAME                      STATUS   VOLUME                    CAPACITY   ACCESS MODES   STORAGECLASS   AGE
data-mysql-0              Bound    mysql-datadir-1           50Gi       RWO                           17h
data-mysql-1              Bound    mysql-datadir-4           50Gi       RWO                           17h
data-mysql-2              Bound    mysql-datadir-6           50Gi       RWO                           17h
data-mysql-3              Bound    mysql-datadir-2           50Gi       RWO                           16h
jenkins-datadir-pvc       Bound    jenkins-datadir-pv        100Gi      RWO                           16s
jenkins-root-data-pvc     Bound    jenkins-root-datadir-pv   100Gi      RWO                           16s
zookeeper-datadir-pvc-1   Bound    zookeeper-datadir-pv-1    20Gi       RWO                           2d11h
zookeeper-datadir-pvc-2   Bound    zookeeper-datadir-pv-2    20Gi       RWO                           2d11h
zookeeper-datadir-pvc-3   Bound    zookeeper-datadir-pv-3    20Gi       RWO                           2d11h

2.4 创建Jenkins Deployment服务

jenkins.yaml

kind: Deployment
apiVersion: apps/v1
metadata:
  labels:
    app: wework-jenkins
  name: wework-jenkins-deployment
  namespace: wework
spec:
  replicas: 1
  selector:
    matchLabels:
      app: wework-jenkins
  template:
    metadata:
      labels:
        app: wework-jenkins
    spec:
      containers:
      - name: wework-jenkins-container
        image: harbor.intra.com/wework/jenkins:v2.319.2 
        imagePullPolicy: IfNotPresent
        #imagePullPolicy: Always
        ports:
        - containerPort: 8080
          protocol: TCP
          name: http
        volumeMounts:
        - mountPath: "/apps/jenkins/jenkins-data/"
          name: jenkins-datadir-wework
        - mountPath: "/root/.jenkins"
          name: jenkins-root-datadir
      volumes:
        - name: jenkins-datadir-wework
          persistentVolumeClaim:
            claimName: jenkins-datadir-pvc
        - name: jenkins-root-datadir
          persistentVolumeClaim:
            claimName: jenkins-root-data-pvc

---
kind: Service
apiVersion: v1
metadata:
  labels:
    app: wework-jenkins
  name: wework-jenkins-service
  namespace: wework
spec:
  type: NodePort
  ports:
  - name: http
    port: 80
    protocol: TCP
    targetPort: 8080
    nodePort: 38080
  selector:
    app: wework-jenkins

创建Jenkins Deployment服务

root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins# kubectl apply -f jenkins.yaml 
deployment.apps/wework-jenkins-deployment created
service/wework-jenkins-service created
root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins# kubectl get pods -n wework
NAME                                         READY   STATUS    RESTARTS   AGE
mysql-0                                      2/2     Running   0          18m
mysql-1                                      2/2     Running   0          17m
mysql-2                                      2/2     Running   0          17m
mysql-3                                      2/2     Running   0          17m
wework-jenkins-deployment-5697fd66cf-p8cmg   1/1     Running   0          9s
zookeeper1-699d46468c-62nfk                  1/1     Running   0          12h
zookeeper2-7cc484778-fl594                   1/1     Running   0          12h
zookeeper3-cdf484f7c-bb9fr                   1/1     Running   0          12h
root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins# kubectl get svc -n wework 
NAME                         TYPE        CLUSTER-IP       EXTERNAL-IP   PORT(S)                                        AGE
mysql                        ClusterIP   None             <none>        3306/TCP                                       17h
mysql-read                   ClusterIP   10.200.195.5     <none>        3306/TCP                                       17h
wework-jenkins-service       NodePort    10.200.71.97     <none>        80:38080/TCP                                   40s
wework-nginx-service         NodePort    10.200.89.252    <none>        80:30090/TCP,443:30091/TCP                     3d19h
wework-tomcat-app1-service   ClusterIP   10.200.21.158    <none>        80/TCP                                         3d
zookeeper                    ClusterIP   10.200.117.19    <none>        2181/TCP                                       46h
zookeeper1                   NodePort    10.200.167.230   <none>        2181:32181/TCP,2888:31774/TCP,3888:56670/TCP   46h
zookeeper2                   NodePort    10.200.36.129    <none>        2181:32182/TCP,2888:46321/TCP,3888:30984/TCP   46h
zookeeper3                   NodePort    10.200.190.129   <none>        2181:32183/TCP,2888:61447/TCP,3888:51393/TCP   46h

3. 登录测试

请添加图片描述

3.1 获取登录密码

root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins# kubectl exec -it wework-jenkins-deployment-5697fd66cf-p8cmg -n wework bash
kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
[root@wework-jenkins-deployment-5697fd66cf-p8cmg /]# cat /root/.jenkins/secrets/initialAdminPassword
865212293721405bb0ddff114ee57f2f

请添加图片描述
请添加图片描述
请添加图片描述
请添加图片描述
请添加图片描述

3.2 删除pod后自动重建

root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins# kubectl get pods -n wework
NAME                                         READY   STATUS    RESTARTS   AGE
mysql-0                                      2/2     Running   0          41m
mysql-1                                      2/2     Running   0          40m
mysql-2                                      2/2     Running   0          40m
mysql-3                                      2/2     Running   0          40m
wework-jenkins-deployment-5697fd66cf-p8cmg   1/1     Running   0          23m
zookeeper1-699d46468c-62nfk                  1/1     Running   0          13h
zookeeper2-7cc484778-fl594                   1/1     Running   0          13h
zookeeper3-cdf484f7c-bb9fr                   1/1     Running   0          13h
root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins# kubectl delete pods wework-jenkins-deployment-5697fd66cf-p8cmg -n wework
pod "wework-jenkins-deployment-5697fd66cf-p8cmg" deleted
root@k8s-master-01:/opt/k8s-data/yaml/web/wework/jenkins# kubectl get pods -n wework
NAME                                         READY   STATUS    RESTARTS   AGE
mysql-0                                      2/2     Running   0          56m
mysql-1                                      2/2     Running   0          56m
mysql-2                                      2/2     Running   0          55m
mysql-3                                      2/2     Running   0          55m
wework-jenkins-deployment-5697fd66cf-mw8dl   1/1     Running   0          49s
zookeeper1-699d46468c-62nfk                  1/1     Running   0          13h
zookeeper2-7cc484778-fl594                   1/1     Running   0          13h
zookeeper3-cdf484f7c-bb9fr                   1/1     Running   0          13h

请添加图片描述

数据依然存在

请添加图片描述

Logo

K8S/Kubernetes社区为您提供最前沿的新闻资讯和知识内容

更多推荐