Questions
Credentials are hardcoded in pipelines and logs expose secrets. Implement secure credential management.
The Scenario
During a security audit, you discover critical issues:
// Found in production Jenkinsfile
pipeline {
environment {
DB_PASSWORD = 'prod_p@ssw0rd123!' // Hardcoded secret!
AWS_ACCESS_KEY = 'AKIAIOSFODNN7EXAMPLE'
}
stages {
stage('Deploy') {
steps {
sh '''
echo "Connecting with password: $DB_PASSWORD"
mysql -u admin -p$DB_PASSWORD -h prod-db.company.com
'''
}
}
}
}
Console logs show:
[Pipeline] echo
Connecting with password: prod_p@ssw0rd123!
The security team is demanding immediate remediation. You need to secure credentials across 200+ jobs.
The Challenge
Implement a comprehensive credential management solution that secures all secrets, prevents exposure in logs, and integrates with enterprise secret management tools.
A junior engineer might just move secrets to environment variables in Jenkins UI, use base64 encoding thinking it's encryption, or add a find-and-replace to mask passwords in logs after the fact. These approaches are still insecure - environment variables in UI are visible to admins, base64 is not encryption, and log masking after exposure is too late.
A senior engineer implements proper secret management using Jenkins credentials store with appropriate scopes, integrates with external secret managers like HashiCorp Vault, ensures secrets are masked in all outputs, implements credential rotation, and sets up audit logging for all credential access.
Step 1: Use Jenkins Credentials Store Properly
pipeline {
agent any
environment {
// Bind credentials to environment variables - automatically masked
DB_CREDS = credentials('production-db-credentials')
// For username/password credentials, creates:
// DB_CREDS_USR - username
// DB_CREDS_PSW - password (masked in logs)
}
stages {
stage('Deploy') {
steps {
// Password is automatically masked as ****
sh '''
mysql -u $DB_CREDS_USR -p$DB_CREDS_PSW -h prod-db.company.com
'''
}
}
}
}Step 2: Use withCredentials for Scoped Access
pipeline {
agent any
stages {
stage('Deploy to AWS') {
steps {
// Credentials only available in this block
withCredentials([
usernamePassword(
credentialsId: 'aws-credentials',
usernameVariable: 'AWS_ACCESS_KEY_ID',
passwordVariable: 'AWS_SECRET_ACCESS_KEY'
),
string(
credentialsId: 'deploy-token',
variable: 'DEPLOY_TOKEN'
),
file(
credentialsId: 'kubeconfig-prod',
variable: 'KUBECONFIG'
)
]) {
sh '''
aws s3 cp dist/ s3://my-bucket/ --recursive
kubectl --kubeconfig=$KUBECONFIG apply -f k8s/
'''
}
// Credentials are no longer available here
}
}
}
}Step 3: Integrate with HashiCorp Vault
// Install: HashiCorp Vault Plugin
pipeline {
agent any
stages {
stage('Deploy with Vault Secrets') {
steps {
withVault(
configuration: [
vaultUrl: 'https://vault.company.com',
vaultCredentialId: 'vault-approle'
],
vaultSecrets: [
[
path: 'secret/data/production/database',
secretValues: [
[envVar: 'DB_HOST', vaultKey: 'host'],
[envVar: 'DB_USER', vaultKey: 'username'],
[envVar: 'DB_PASS', vaultKey: 'password']
]
],
[
path: 'secret/data/production/api-keys',
secretValues: [
[envVar: 'STRIPE_KEY', vaultKey: 'stripe_secret']
]
]
]
) {
sh '''
# Secrets are available and automatically masked
./deploy.sh --db-host=$DB_HOST --db-user=$DB_USER
'''
}
}
}
}
}Step 4: Implement Dynamic Secrets with Vault
// vars/withDynamicDbCredentials.groovy
def call(Map config, Closure body) {
def vaultPath = config.vaultPath ?: 'database/creds/app-role'
def ttl = config.ttl ?: '1h'
withVault(
configuration: [
vaultUrl: env.VAULT_ADDR,
vaultCredentialId: 'vault-approle'
],
vaultSecrets: [[
path: vaultPath,
secretValues: [
[envVar: 'DB_USER', vaultKey: 'username'],
[envVar: 'DB_PASS', vaultKey: 'password']
]
]]
) {
echo "Using dynamic database credentials (TTL: ${ttl})"
body()
}
// Credentials automatically expire after TTL
}
// Usage in pipeline
stage('Database Migration') {
steps {
script {
withDynamicDbCredentials(ttl: '15m') {
sh './run-migrations.sh'
}
}
}
}Step 5: Prevent Secret Exposure in Logs
pipeline {
agent any
options {
// Mask any string matching these patterns
buildDiscarder(logRotator(numToKeepStr: '10'))
}
stages {
stage('Safe Logging') {
steps {
script {
// Wrap sensitive operations
wrap([$class: 'MaskPasswordsBuildWrapper']) {
withCredentials([string(credentialsId: 'api-key', variable: 'API_KEY')]) {
// Never echo secrets directly
sh '''
# Bad - might expose in error messages
# curl -H "Authorization: $API_KEY" https://api.example.com
# Good - use config file
echo "Authorization: $API_KEY" > .auth-header
curl -H @.auth-header https://api.example.com
rm -f .auth-header
'''
}
}
}
}
}
}
}Step 6: Secure Pipeline Pattern
// vars/secureDeployment.groovy - Shared library function
def call(Map config) {
def environment = config.environment
def credentialPrefix = "deploy-${environment}"
// Validate caller has permission
def approvedTeams = ['platform', 'sre', 'devops']
if (!approvedTeams.contains(config.team)) {
error "Team ${config.team} is not authorized for deployments"
}
pipeline {
agent { label 'secure-agents' }
options {
timeout(time: 30, unit: 'MINUTES')
}
stages {
stage('Validate') {
steps {
script {
// Verify credentials exist before proceeding
def requiredCreds = [
"${credentialPrefix}-kubeconfig",
"${credentialPrefix}-registry"
]
requiredCreds.each { credId ->
try {
withCredentials([string(credentialsId: credId, variable: 'TEST')]) {
echo "Credential ${credId} verified"
}
} catch (Exception e) {
error "Missing required credential: ${credId}"
}
}
}
}
}
stage('Deploy') {
steps {
withCredentials([
file(credentialsId: "${credentialPrefix}-kubeconfig", variable: 'KUBECONFIG'),
usernamePassword(
credentialsId: "${credentialPrefix}-registry",
usernameVariable: 'REGISTRY_USER',
passwordVariable: 'REGISTRY_PASS'
)
]) {
sh '''
# Login to registry
echo $REGISTRY_PASS | docker login -u $REGISTRY_USER --password-stdin
# Deploy using kubeconfig
kubectl apply -f k8s/${ENVIRONMENT}/
# Cleanup
docker logout
'''
}
}
}
}
post {
always {
// Audit log
script {
def auditEntry = [
timestamp: new Date().format("yyyy-MM-dd'T'HH:mm:ss'Z'"),
job: env.JOB_NAME,
build: env.BUILD_NUMBER,
user: currentBuild.getBuildCauses()[0]?.userId ?: 'system',
environment: environment,
result: currentBuild.result
]
writeJSON file: 'audit.json', json: auditEntry
archiveArtifacts artifacts: 'audit.json'
}
}
}
}
}Step 7: Credential Rotation Script
// Jenkinsfile for credential rotation
pipeline {
agent any
triggers {
cron('0 0 1 * *') // Monthly rotation
}
stages {
stage('Rotate Credentials') {
steps {
script {
def credentialsToRotate = [
'production-db-password',
'staging-db-password',
'api-keys-production'
]
credentialsToRotate.each { credId ->
echo "Rotating credential: ${credId}"
// Generate new secret
def newSecret = sh(
script: 'openssl rand -base64 32',
returnStdout: true
).trim()
// Update in Vault
withVault(configuration: [vaultUrl: env.VAULT_ADDR, vaultCredentialId: 'vault-admin']) {
sh """
vault kv put secret/jenkins/${credId} value='${newSecret}'
"""
}
echo "Rotated ${credId} successfully"
}
}
}
}
}
post {
success {
slackSend(
channel: '#security-ops',
message: "Credential rotation completed successfully"
)
}
}
} Credential Security Checklist
| Risk | Mitigation | Implementation |
|---|---|---|
| Hardcoded secrets | Use credentials() binding | Replace all plaintext with credential IDs |
| Log exposure | MaskPasswordsBuildWrapper | Enable globally in Jenkins config |
| Broad access | Credential domains/folders | Scope credentials to specific folders |
| No rotation | Automated rotation jobs | Monthly rotation with Vault integration |
| No audit trail | Credential access logging | Enable audit plugin, send to SIEM |
Practice Question
What happens when you use the credentials() helper in Jenkins pipeline environment block?