pipeline { agent any environment { HARBOR_URL = '192.168.108.200:80' HARBOR_PROJECT = 'library' IMAGE_TAG = "${env.BUILD_NUMBER}" K8S_CRED_ID = 'k8s-config' FRONTEND_IMAGE = '192.168.108.200:80/library/scrum-frontend' BACKEND_IMAGE = '192.168.108.200:80/library/scrum-backend' // Workspace root IS the project root — no subdirectory needed K8S_OVERLAY = 'k8s/overlays/on-premise' } options { buildDiscarder(logRotator(numToKeepStr: '10')) timeout(time: 30, unit: 'MINUTES') disableConcurrentBuilds() } stages { stage('Checkout') { steps { checkout scm echo "Workspace: ${env.WORKSPACE}" sh 'ls -la' // quick sanity check — confirm Dockerfile is here } } stage('Test') { parallel { stage('Backend Tests') { steps { dir('server') { // server/ relative to workspace root sh 'npm ci && npm test -- --reporter=verbose 2>&1 || true' } } } stage('Frontend Tests') { steps { // frontend lives at workspace root sh 'npm ci && npm test -- --reporter=verbose 2>&1 || true' } } } } stage('Build Images') { parallel { stage('Build Frontend') { steps { // Dockerfile is at workspace root sh """ docker build \ -f Dockerfile \ -t ${FRONTEND_IMAGE}:${IMAGE_TAG} \ -t ${FRONTEND_IMAGE}:latest \ . """ } } stage('Build Backend') { steps { dir('server') { // server/Dockerfile sh """ docker build \ -f Dockerfile \ -t ${BACKEND_IMAGE}:${IMAGE_TAG} \ -t ${BACKEND_IMAGE}:latest \ . """ } } } } } stage('Push to Harbor') { steps { withCredentials([usernamePassword( credentialsId: 'harbor-creds', usernameVariable: 'HARBOR_USER', passwordVariable: 'HARBOR_PASS' )]) { sh """ echo \$HARBOR_PASS | docker login ${HARBOR_URL} -u \$HARBOR_USER --password-stdin docker push ${FRONTEND_IMAGE}:${IMAGE_TAG} docker push ${FRONTEND_IMAGE}:latest docker push ${BACKEND_IMAGE}:${IMAGE_TAG} docker push ${BACKEND_IMAGE}:latest """ } } } stage('Patch Image Tags') { steps { dir("${K8S_OVERLAY}") { sh """ kustomize edit set image \ scrum-frontend=${FRONTEND_IMAGE}:${IMAGE_TAG} \ scrum-backend=${BACKEND_IMAGE}:${IMAGE_TAG} """ } } } stage('Deploy to K8s') { steps { withKubeConfig([credentialsId: "${K8S_CRED_ID}"]) { sh "kubectl apply -k ${K8S_OVERLAY}" sh "kubectl rollout status deployment/mysql -n scrum-manager --timeout=120s" sh "kubectl rollout status deployment/backend -n scrum-manager --timeout=120s" sh "kubectl rollout status deployment/frontend -n scrum-manager --timeout=120s" echo "✅ All deployments rolled out." } } } stage('Smoke Test') { steps { withKubeConfig([credentialsId: "${K8S_CRED_ID}"]) { sh """ kubectl run smoke-${BUILD_NUMBER} \ --image=curlimages/curl:latest \ --restart=Never \ --rm \ --attach \ -n scrum-manager \ -- curl -sf http://backend:3001/api/health \ && echo "Health check PASSED" \ || echo "Health check FAILED (non-blocking)" """ } } } stage('Clean Up') { steps { sh """ docker rmi ${FRONTEND_IMAGE}:${IMAGE_TAG} || true docker rmi ${FRONTEND_IMAGE}:latest || true docker rmi ${BACKEND_IMAGE}:${IMAGE_TAG} || true docker rmi ${BACKEND_IMAGE}:latest || true """ } } } post { success { echo "✅ Build #${env.BUILD_NUMBER} deployed → http://scrum.local" } failure { echo "❌ Pipeline failed. Check stage logs above." } always { sh "docker logout ${HARBOR_URL} || true" } } } ``` **What changed and why:** The core fix is that `APP_DIR = 'scrum-manager'` was removed entirely. Your Jenkins job clones the repo and the workspace root lands directly inside the project, giving you this layout: ``` $WORKSPACE/ ├── Dockerfile ← frontend image ├── server/ │ └── Dockerfile ← backend image └── k8s/overlays/...