Website : rimsha.abasa.com
backdoor
Home
Console
Upload
information
Create File
Create Folder
About
Tools
:
/
var
/
lib
/
jenkins
/
jobs
/
Silentcontent PM2
/
Filename :
config.xml
back
Copy
<?xml version='1.1' encoding='UTF-8'?> <flow-definition plugin="workflow-job@1505.vea_4b_20a_4a_495"> <actions> <org.jenkinsci.plugins.pipeline.modeldefinition.actions.DeclarativeJobAction plugin="pipeline-model-definition@2.2236.va_b_88ceec798f"/> <org.jenkinsci.plugins.pipeline.modeldefinition.actions.DeclarativeJobPropertyTrackerAction plugin="pipeline-model-definition@2.2236.va_b_88ceec798f"> <jobProperties/> <triggers/> <parameters/> <options/> </org.jenkinsci.plugins.pipeline.modeldefinition.actions.DeclarativeJobPropertyTrackerAction> </actions> <description></description> <keepDependencies>false</keepDependencies> <properties> <jenkins.model.BuildDiscarderProperty> <strategy class="hudson.tasks.LogRotator"> <daysToKeep>10</daysToKeep> <numToKeep>10</numToKeep> <artifactDaysToKeep>-1</artifactDaysToKeep> <artifactNumToKeep>-1</artifactNumToKeep> <removeLastBuild>false</removeLastBuild> </strategy> </jenkins.model.BuildDiscarderProperty> <org.jenkinsci.plugins.workflow.job.properties.DisableConcurrentBuildsJobProperty> <abortPrevious>true</abortPrevious> </org.jenkinsci.plugins.workflow.job.properties.DisableConcurrentBuildsJobProperty> <org.jenkinsci.plugins.workflow.job.properties.DisableResumeJobProperty/> <com.dabsquared.gitlabjenkins.connection.GitLabConnectionProperty plugin="gitlab-plugin@1.9.7"> <gitLabConnection>Abasa Gitlab</gitLabConnection> <jobCredentialId></jobCredentialId> <useAlternativeCredential>false</useAlternativeCredential> </com.dabsquared.gitlabjenkins.connection.GitLabConnectionProperty> <org.jenkinsci.plugins.workflow.job.properties.PipelineTriggersJobProperty> <triggers> <com.dabsquared.gitlabjenkins.GitLabPushTrigger plugin="gitlab-plugin@1.9.7"> <spec></spec> <triggerOnPush>false</triggerOnPush> <triggerToBranchDeleteRequest>false</triggerToBranchDeleteRequest> <triggerOnMergeRequest>false</triggerOnMergeRequest> <triggerOnlyIfNewCommitsPushed>false</triggerOnlyIfNewCommitsPushed> <triggerOnPipelineEvent>false</triggerOnPipelineEvent> <triggerOnAcceptedMergeRequest>true</triggerOnAcceptedMergeRequest> <triggerOnClosedMergeRequest>false</triggerOnClosedMergeRequest> <triggerOnApprovedMergeRequest>false</triggerOnApprovedMergeRequest> <triggerOpenMergeRequestOnPush>never</triggerOpenMergeRequestOnPush> <triggerOnNoteRequest>false</triggerOnNoteRequest> <noteRegex>Jenkins please retry a build</noteRegex> <ciSkip>true</ciSkip> <skipWorkInProgressMergeRequest>true</skipWorkInProgressMergeRequest> <labelsThatForcesBuildIfAdded></labelsThatForcesBuildIfAdded> <setBuildDescription>true</setBuildDescription> <branchFilterType>All</branchFilterType> <includeBranchesSpec>main</includeBranchesSpec> <excludeBranchesSpec></excludeBranchesSpec> <sourceBranchRegex></sourceBranchRegex> <targetBranchRegex></targetBranchRegex> <secretToken>{AQAAABAAAAAwIAi4h2aiMJtRG65c8qvYxkJwmSXzQVb8ip0bdnPtoVF/+SoE2oFG7ezJliDAdKKsW8C4X2KzZ/HKRkp75m/6HQ==}</secretToken> <pendingBuildName></pendingBuildName> <cancelPendingBuildsOnUpdate>false</cancelPendingBuildsOnUpdate> </com.dabsquared.gitlabjenkins.GitLabPushTrigger> </triggers> </org.jenkinsci.plugins.workflow.job.properties.PipelineTriggersJobProperty> </properties> <definition class="org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition" plugin="workflow-cps@4039.vd58c465ea_71a_"> <script>pipeline { agent any environment { GIT_URL = 'https://gs1.abasa.com/abasa/talha_silentcontent.git' GIT_BRANCH = 'main' GIT_CREDENTIALS_ID = '3e4f5030-66a8-431b-b666-e38fefb7d32f' IMAGE_NAME = 'abasa:silentcontent' CONTAINER_NAME = 'silent-content' DEPLOY_PORT = '3034' HOST_BIND_IP = '127.0.0.1' DOCKER_PRUNE = 'false' // Enable/Disable Docker cleanup } stages { stage('Clone Repository') { steps { script { try { git url: GIT_URL, credentialsId: GIT_CREDENTIALS_ID, branch: GIT_BRANCH echo 'Repository cloned successfully' } catch (err) { error 'Failed to clone repository' } } } } stage('Build Docker Image') { steps { script { try { def img = docker.build(IMAGE_NAME) echo "Docker image built: ${img.imageName()}" } catch (err) { error 'Docker build failed' } } } } stage('Stop Existing Container') { steps { script { def container = sh(script: "docker ps -aq --filter name=${CONTAINER_NAME}", returnStdout: true).trim() if (container) { echo "Stopping running container: ${CONTAINER_NAME}" sh "docker stop ${CONTAINER_NAME} && docker rm ${CONTAINER_NAME}" } else { echo 'No existing container found' } } } } stage('Deploy Container') { steps { script { try { sh "docker run -e PORT=${DEPLOY_PORT} -d --name ${CONTAINER_NAME} -p ${HOST_BIND_IP}:${DEPLOY_PORT}:${DEPLOY_PORT} ${IMAGE_NAME}" echo "Container deployed successfully on port ${DEPLOY_PORT}" } catch (err) { error 'Container deployment failed' } } } } } post { success { echo 'Pipeline executed successfully!' } failure { echo 'Pipeline failed! Check logs.' } always { script { if (DOCKER_PRUNE == 'true') { sh 'docker system prune -f' echo 'Cleaned up Docker resources' } else { echo 'Docker cleanup skipped' } } } } }</script> <sandbox>true</sandbox> </definition> <triggers/> <disabled>false</disabled> </flow-definition>