What is Jenkins?
Jenkins is an open-source automation server that enables developers to build, test, and deploy applications through continuous integration and continuous deployment (CI/CD) pipelines.
Jenkins Installation
Docker Installation
# Run Jenkins in Docker
docker run -d \
--name jenkins \
-p 8080:8080 \
-p 50000:50000 \
-v jenkins_home:/var/jenkins_home \
jenkins/jenkins:lts
# Get initial admin password
docker exec jenkins cat /var/jenkins_home/secrets/initialAdminPassword
Ubuntu Installation
# Add Jenkins repository
wget -q -O - https://pkg.jenkins.io/debian/jenkins.io.key | sudo apt-key add -
sudo sh -c 'echo deb http://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list'
# Install Jenkins
sudo apt update
sudo apt install jenkins
# Start Jenkins
sudo systemctl start jenkins
sudo systemctl enable jenkins
📝 Pipeline Types in Jenkins
Jenkins Pipeline Types
- Declarative Pipeline: Structured, easier syntax (recommended)
- Scripted Pipeline: Groovy-based, more flexible
- Freestyle Projects: GUI-based configuration
- Multibranch Pipeline: Automatic branch detection
1. 📜 Declarative Pipeline (Recommended)
Structured approach with predefined sections and easier syntax:
pipeline {
agent any
environment {
NODE_VERSION = '18'
DOCKER_REGISTRY = 'myregistry.com'
}
tools {
nodejs "${NODE_VERSION}"
}
stages {
stage('Checkout') {
steps {
checkout scm
echo "Checked out branch: ${env.BRANCH_NAME}"
}
}
stage('Install Dependencies') {
steps {
sh 'npm ci'
sh 'npm audit --audit-level high'
}
}
stage('Build') {
steps {
sh 'npm run build'
archiveArtifacts artifacts: 'dist/**/*', fingerprint: true
}
}
stage('Test') {
parallel {
stage('Unit Tests') {
steps {
sh 'npm run test:unit'
publishTestResults testResultsPattern: 'test-results.xml'
}
}
stage('Integration Tests') {
steps {
sh 'npm run test:integration'
}
}
stage('Lint') {
steps {
sh 'npm run lint'
recordIssues enabledForFailure: true, tools: [esLint()]
}
}
}
}
stage('Security Scan') {
steps {
sh 'npm audit --json > audit-results.json'
sh 'docker run --rm -v $(pwd):/app clair-scanner'
}
}
stage('Build Docker Image') {
steps {
script {
def image = docker.build("${DOCKER_REGISTRY}/myapp:${BUILD_NUMBER}")
docker.withRegistry('https://myregistry.com', 'registry-credentials') {
image.push()
image.push('latest')
}
}
}
}
stage('Deploy') {
when {
branch 'main'
}
steps {
sh 'kubectl set image deployment/myapp myapp=${DOCKER_REGISTRY}/myapp:${BUILD_NUMBER}'
sh 'kubectl rollout status deployment/myapp'
}
}
}
post {
always {
cleanWs()
publishHTML([
allowMissing: false,
alwaysLinkToLastBuild: true,
keepAll: true,
reportDir: 'coverage',
reportFiles: 'index.html',
reportName: 'Coverage Report'
])
}
success {
slackSend channel: '#deployments',
color: 'good',
message: "✅ Pipeline succeeded: ${env.JOB_NAME} - ${env.BUILD_NUMBER}"
}
failure {
slackSend channel: '#deployments',
color: 'danger',
message: "❌ Pipeline failed: ${env.JOB_NAME} - ${env.BUILD_NUMBER}"
}
}
}
2. 📜 Scripted Pipeline (Advanced)
Groovy-based approach with more flexibility and programming constructs:
node {
def app
def buildNumber = env.BUILD_NUMBER
def branchName = env.BRANCH_NAME
try {
stage('Checkout') {
checkout scm
echo "Building branch: ${branchName}"
}
stage('Build') {
if (fileExists('package.json')) {
sh 'npm ci'
sh 'npm run build'
} else if (fileExists('pom.xml')) {
sh 'mvn clean compile'
} else {
error('No supported build file found')
}
}
stage('Test') {
parallel(
'Unit Tests': {
sh 'npm run test:unit'
},
'Integration Tests': {
sh 'npm run test:integration'
},
'Security Scan': {
sh 'npm audit'
}
)
}
stage('Docker Build') {
app = docker.build("myapp:${buildNumber}")
}
stage('Deploy') {
if (branchName == 'main') {
docker.withRegistry('https://registry.com', 'registry-creds') {
app.push("${buildNumber}")
app.push('latest')
}
sh "kubectl set image deployment/myapp myapp=registry.com/myapp:${buildNumber}"
} else {
echo "Skipping deployment for branch: ${branchName}"
}
}
} catch (Exception e) {
currentBuild.result = 'FAILURE'
throw e
} finally {
// Cleanup
sh 'docker system prune -f'
// Notifications
if (currentBuild.result == 'FAILURE') {
emailext (
subject: "Build Failed: ${env.JOB_NAME} - ${env.BUILD_NUMBER}",
body: "Build failed. Check console output at ${env.BUILD_URL}",
to: "${env.CHANGE_AUTHOR_EMAIL}"
)
}
}
}
3. 🌳 Multibranch Pipeline
Automatically discovers and manages pipelines for multiple branches:
// Jenkinsfile for multibranch pipeline
pipeline {
agent any
stages {
stage('Build') {
steps {
echo "Building branch: ${env.BRANCH_NAME}"
sh 'make build'
}
}
stage('Test') {
steps {
sh 'make test'
}
}
stage('Deploy to Dev') {
when {
not { branch 'main' }
}
steps {
echo "Deploying ${env.BRANCH_NAME} to development"
sh 'make deploy-dev'
}
}
stage('Deploy to Staging') {
when {
branch 'develop'
}
steps {
echo "Deploying to staging"
sh 'make deploy-staging'
}
}
stage('Deploy to Production') {
when {
branch 'main'
}
steps {
input message: 'Deploy to production?', ok: 'Deploy'
echo "Deploying to production"
sh 'make deploy-prod'
}
}
}
}
🚀 Advanced Pipeline Features
Parallel Execution
pipeline {
agent any
stages {
stage('Parallel Tests') {
parallel {
stage('Unit Tests') {
steps {
sh 'npm run test:unit'
}
}
stage('Integration Tests') {
steps {
sh 'npm run test:integration'
}
}
stage('Linting') {
steps {
sh 'npm run lint'
}
}
}
}
}
}
Conditional Stages
pipeline {
agent any
stages {
stage('Deploy to Staging') {
when {
branch 'develop'
}
steps {
sh 'deploy-to-staging.sh'
}
}
stage('Deploy to Production') {
when {
branch 'main'
}
steps {
input message: 'Deploy to production?', ok: 'Deploy'
sh 'deploy-to-production.sh'
}
}
}
}
Environment Variables and Parameters
pipeline {
agent any
parameters {
choice(
name: 'ENVIRONMENT',
choices: ['dev', 'staging', 'prod'],
description: 'Target environment'
)
booleanParam(
name: 'SKIP_TESTS',
defaultValue: false,
description: 'Skip test execution'
)
}
environment {
APP_NAME = 'myapp'
VERSION = "${BUILD_NUMBER}"
DOCKER_REGISTRY = 'registry.example.com'
}
stages {
stage('Build') {
steps {
sh "docker build -t ${DOCKER_REGISTRY}/${APP_NAME}:${VERSION} ."
}
}
}
}
Docker Integration
Building and Pushing Images
pipeline {
agent any
environment {
DOCKER_REGISTRY = 'your-registry.com'
IMAGE_NAME = 'myapp'
}
stages {
stage('Build Docker Image') {
steps {
script {
def image = docker.build("${DOCKER_REGISTRY}/${IMAGE_NAME}:${BUILD_NUMBER}")
docker.withRegistry('https://your-registry.com', 'registry-credentials') {
image.push()
image.push('latest')
}
}
}
}
}
}
Kubernetes Deployment
pipeline {
agent any
stages {
stage('Deploy to Kubernetes') {
steps {
withKubeConfig([credentialsId: 'kubeconfig']) {
sh '''
kubectl set image deployment/myapp \
myapp=${DOCKER_REGISTRY}/${IMAGE_NAME}:${BUILD_NUMBER}
kubectl rollout status deployment/myapp
'''
}
}
}
}
}
Testing Integration
Unit Tests with Coverage
pipeline {
agent any
stages {
stage('Test') {
steps {
sh 'npm test -- --coverage'
}
post {
always {
publishHTML([
allowMissing: false,
alwaysLinkToLastBuild: true,
keepAll: true,
reportDir: 'coverage',
reportFiles: 'index.html',
reportName: 'Coverage Report'
])
}
}
}
}
}
Security Scanning
pipeline {
agent any
stages {
stage('Security Scan') {
parallel {
stage('SAST') {
steps {
sh 'sonar-scanner'
}
}
stage('Dependency Check') {
steps {
sh 'npm audit'
sh 'safety check'
}
}
stage('Container Scan') {
steps {
sh 'trivy image myapp:latest'
}
}
}
}
}
}
Notifications
pipeline {
agent any
stages {
// ... other stages
}
post {
success {
slackSend(
channel: '#deployments',
color: 'good',
message: "✅ Deployment successful: ${env.JOB_NAME} - ${env.BUILD_NUMBER}"
)
}
failure {
emailext(
subject: "❌ Build Failed: ${env.JOB_NAME} - ${env.BUILD_NUMBER}",
body: "Build failed. Check console output at ${env.BUILD_URL}",
to: "${env.CHANGE_AUTHOR_EMAIL}"
)
}
}
}
Multi-branch Pipeline
pipeline {
agent any
stages {
stage('Build') {
steps {
sh 'make build'
}
}
stage('Test') {
steps {
sh 'make test'
}
}
stage('Deploy') {
when {
anyOf {
branch 'main'
branch 'develop'
}
}
steps {
script {
if (env.BRANCH_NAME == 'main') {
sh 'deploy-to-prod.sh'
} else if (env.BRANCH_NAME == 'develop') {
sh 'deploy-to-staging.sh'
}
}
}
}
}
}
Jenkins Configuration as Code
# jenkins.yaml
jenkins:
systemMessage: "Jenkins configured automatically by JCasC"
securityRealm:
local:
allowsSignup: false
users:
- id: admin
password: ${JENKINS_ADMIN_PASSWORD}
authorizationStrategy:
globalMatrix:
permissions:
- "Overall/Administer:admin"
- "Overall/Read:authenticated"
jobs:
- script: >
multibranchPipelineJob('my-app') {
branchSources {
git {
id('my-app')
remote('https://github.com/user/my-app.git')
}
}
}
Best Practices
- Use declarative pipelines over scripted
- Implement proper error handling
- Use shared libraries for common functions
- Secure credentials with Jenkins credentials store
- Implement proper testing at each stage
- Use parallel execution where possible
- Monitor pipeline performance and optimize
- Implement proper logging and notifications
Troubleshooting Common Issues
Pipeline Debugging
// Add debug information
pipeline {
agent any
stages {
stage('Debug') {
steps {
sh 'env | sort'
sh 'pwd && ls -la'
echo "Branch: ${env.BRANCH_NAME}"
echo "Build: ${env.BUILD_NUMBER}"
}
}
}
}
Conclusion
Jenkins provides powerful CI/CD capabilities for automating software delivery. Start with simple pipelines and gradually add advanced features like parallel execution, security scanning, and multi-environment deployments. Focus on creating reliable, maintainable pipelines that provide fast feedback to development teams.