Questions
Builds are taking 45 minutes and blocking deployments. Optimize to under 10 minutes.
The Scenario
Your team’s Jenkins pipeline takes 45 minutes to complete:
[Pipeline] Stage: Checkout - 2 min
[Pipeline] Stage: Install Deps - 8 min
[Pipeline] Stage: Build - 12 min
[Pipeline] Stage: Unit Tests - 10 min
[Pipeline] Stage: Integration Tests - 8 min
[Pipeline] Stage: Security Scan - 3 min
[Pipeline] Stage: Docker Build - 5 min
[Pipeline] Stage: Push & Deploy - 2 min
Total: ~45 minutes
Developers are frustrated. They push code and wait nearly an hour for feedback. Deployment frequency has dropped because the pipeline is a bottleneck.
The Challenge
Optimize the pipeline to complete in under 10 minutes while maintaining test coverage and security scanning.
A junior engineer might skip tests to save time, throw more hardware at the problem, or remove security scans entirely. These approaches create technical debt, waste money, and introduce security vulnerabilities that cost far more than the time saved.
A senior engineer analyzes each stage to identify parallelization opportunities, implements caching at every layer, optimizes resource-intensive operations, and restructures the pipeline for maximum efficiency without sacrificing quality.
Step 1: Analyze Current Pipeline
// Add timing to identify bottlenecks
def timings = [:]
def timedStage(String name, Closure body) {
def start = System.currentTimeMillis()
stage(name) {
body()
}
def duration = System.currentTimeMillis() - start
timings[name] = duration
echo "Stage '${name}' took ${duration/1000}s"
}
pipeline {
agent any
stages {
stage('Timed Build') {
steps {
script {
timedStage('Checkout') { checkout scm }
timedStage('Install') { sh 'npm ci' }
timedStage('Build') { sh 'npm run build' }
timedStage('Test') { sh 'npm test' }
}
}
}
}
post {
always {
script {
echo "=== Build Timing Summary ==="
timings.each { stage, ms ->
echo "${stage}: ${ms/1000}s"
}
}
}
}
}Step 2: Parallelize Independent Stages
pipeline {
agent { label 'docker' }
stages {
stage('Checkout') {
steps {
checkout scm
stash includes: '**', name: 'source'
}
}
stage('Parallel Build & Test') {
parallel {
stage('Build') {
agent { label 'docker' }
steps {
unstash 'source'
sh 'npm ci --prefer-offline'
sh 'npm run build'
stash includes: 'dist/**', name: 'build'
}
}
stage('Unit Tests') {
agent { label 'docker' }
steps {
unstash 'source'
sh 'npm ci --prefer-offline'
sh 'npm run test:unit -- --maxWorkers=4'
}
}
stage('Lint & Type Check') {
agent { label 'docker' }
steps {
unstash 'source'
sh 'npm ci --prefer-offline'
sh 'npm run lint & npm run typecheck & wait'
}
}
stage('Security Scan') {
agent { label 'docker' }
steps {
unstash 'source'
sh 'trivy fs --exit-code 1 --severity HIGH,CRITICAL .'
}
}
}
}
stage('Integration Tests') {
agent { label 'docker' }
steps {
unstash 'source'
sh 'npm ci --prefer-offline'
sh 'npm run test:integration'
}
}
stage('Docker Build & Push') {
agent { label 'docker' }
steps {
unstash 'build'
sh '''
docker build -t app:${GIT_COMMIT} .
docker push registry.company.com/app:${GIT_COMMIT}
'''
}
}
}
}Step 3: Implement Aggressive Caching
pipeline {
agent {
kubernetes {
yaml '''
apiVersion: v1
kind: Pod
spec:
containers:
- name: node
image: node:18
command: ['sleep', 'infinity']
volumeMounts:
- name: npm-cache
mountPath: /root/.npm
- name: node-modules-cache
mountPath: /home/jenkins/workspace/node_modules_cache
volumes:
- name: npm-cache
persistentVolumeClaim:
claimName: npm-cache-pvc
- name: node-modules-cache
persistentVolumeClaim:
claimName: node-modules-cache-pvc
'''
}
}
stages {
stage('Install with Cache') {
steps {
container('node') {
script {
// Check if node_modules cache exists and matches package-lock.json
def lockHash = sh(
script: 'md5sum package-lock.json | cut -d" " -f1',
returnStdout: true
).trim()
def cacheDir = "/home/jenkins/workspace/node_modules_cache/${lockHash}"
if (fileExists(cacheDir)) {
sh "cp -r ${cacheDir}/node_modules ."
echo "Restored node_modules from cache"
} else {
sh 'npm ci'
sh "mkdir -p ${cacheDir} && cp -r node_modules ${cacheDir}/"
echo "Cached node_modules for future builds"
}
}
}
}
}
}
}Step 4: Optimize Docker Builds
# Optimized Dockerfile with multi-stage build and layer caching
# Stage 1: Dependencies (cached unless package.json changes)
FROM node:18-alpine AS deps
WORKDIR /app
COPY package*.json ./
RUN npm ci --only=production
# Stage 2: Build (cached unless source changes)
FROM node:18-alpine AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci
COPY . .
RUN npm run build
# Stage 3: Production image (minimal)
FROM node:18-alpine AS runner
WORKDIR /app
ENV NODE_ENV=production
COPY --from=deps /app/node_modules ./node_modules
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/package.json ./
USER node
CMD ["node", "dist/index.js"]// Use BuildKit for faster Docker builds
stage('Docker Build') {
steps {
sh '''
export DOCKER_BUILDKIT=1
docker build \
--cache-from registry.company.com/app:latest \
--build-arg BUILDKIT_INLINE_CACHE=1 \
-t registry.company.com/app:${GIT_COMMIT} \
-t registry.company.com/app:latest \
.
'''
}
}Step 5: Optimize Test Execution
stage('Smart Testing') {
steps {
script {
// Get changed files
def changedFiles = sh(
script: 'git diff --name-only HEAD~1',
returnStdout: true
).trim().split('\n')
// Determine which tests to run
def testScope = 'all'
if (changedFiles.every { it.startsWith('docs/') }) {
testScope = 'none'
echo "Only docs changed, skipping tests"
} else if (changedFiles.every { it.startsWith('src/components/') }) {
testScope = 'unit'
echo "Only components changed, running unit tests only"
}
if (testScope == 'all' || testScope == 'unit') {
sh '''
# Run tests in parallel with optimal workers
npm run test:unit -- \
--maxWorkers=50% \
--bail \
--changedSince=HEAD~1
'''
}
if (testScope == 'all') {
sh 'npm run test:integration'
}
}
}
}Step 6: Complete Optimized Pipeline
pipeline {
agent none // Use agents per-stage for parallelization
options {
skipDefaultCheckout()
parallelsAlwaysFailFast()
}
stages {
stage('Checkout') {
agent { label 'lightweight' }
steps {
checkout([
$class: 'GitSCM',
branches: [[name: env.GIT_BRANCH]],
extensions: [
[$class: 'CloneOption', depth: 1, shallow: true],
[$class: 'CleanBeforeCheckout']
],
userRemoteConfigs: [[url: env.GIT_URL, credentialsId: 'git-creds']]
])
stash includes: '**', name: 'source'
}
}
stage('Parallel Execution') {
parallel {
stage('Build & Unit Tests') {
agent { label 'docker-large' }
steps {
unstash 'source'
sh '''
npm ci --prefer-offline --cache .npm
npm run build &
npm run test:unit -- --maxWorkers=4 &
wait
'''
stash includes: 'dist/**', name: 'build'
}
}
stage('Security & Lint') {
agent { label 'docker' }
steps {
unstash 'source'
sh '''
npm ci --prefer-offline --cache .npm
npm run lint &
trivy fs --exit-code 1 --severity CRITICAL . &
wait
'''
}
}
}
}
stage('Integration Tests') {
agent { label 'docker-large' }
steps {
unstash 'source'
sh '''
npm ci --prefer-offline --cache .npm
npm run test:integration -- --maxWorkers=2
'''
}
}
stage('Docker Build & Deploy') {
agent { label 'docker' }
steps {
unstash 'build'
sh '''
export DOCKER_BUILDKIT=1
docker build --cache-from app:latest -t app:${GIT_COMMIT} .
docker push registry.company.com/app:${GIT_COMMIT}
kubectl set image deployment/app app=registry.company.com/app:${GIT_COMMIT}
'''
}
}
}
} Optimization Results
| Stage | Before | After | Technique |
|---|---|---|---|
| Checkout | 2 min | 15 sec | Shallow clone |
| Install Deps | 8 min | 30 sec | npm cache + PVC |
| Build | 12 min | 2 min | Parallel + cache |
| Unit Tests | 10 min | 2 min | Parallel workers |
| Integration | 8 min | 3 min | Selective execution |
| Security Scan | 3 min | 1 min | Run in parallel |
| Docker Build | 5 min | 1 min | BuildKit + layer cache |
| Total | 45 min | 8 min | Combined |
Practice Question
Which optimization technique typically provides the biggest improvement for npm-based build pipelines?