Jenkinsfile: yet another try..

Project: http://git-wip-us.apache.org/repos/asf/logging-log4net/repo
Commit: http://git-wip-us.apache.org/repos/asf/logging-log4net/commit/1e38d083
Tree: http://git-wip-us.apache.org/repos/asf/logging-log4net/tree/1e38d083
Diff: http://git-wip-us.apache.org/repos/asf/logging-log4net/diff/1e38d083

Branch: refs/heads/develop
Commit: 1e38d08350d2f293af14f36d2427358314a149e8
Parents: ed50566
Author: Dominik Psenner <dpsen...@apache.org>
Authored: Tue Jul 18 17:34:12 2017 +0200
Committer: Dominik Psenner <dpsen...@apache.org>
Committed: Tue Jul 18 17:34:12 2017 +0200

----------------------------------------------------------------------
 Jenkinsfile | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/logging-log4net/blob/1e38d083/Jenkinsfile
----------------------------------------------------------------------
diff --git a/Jenkinsfile b/Jenkinsfile
index 20cd33c..ff65f52 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -26,6 +26,8 @@ pipeline {
        agent {
                label 'ubuntu'
        }
+       def JENKINS_UID
+       def JENKINS_GID
        stages {
                // prepare node for builds
                stage('checkout') {
@@ -33,8 +35,8 @@ pipeline {
                                deleteDir()
                                checkout scm
                                script {
-                                       def JENKINS_UID = sh(returnStdout: 
true, script: 'stat -c "%u" .').trim()
-                                       def JENKINS_GID = sh(returnStdout: 
true, script: 'stat -c "%g" .').trim()
+                                       JENKINS_UID = sh(returnStdout: true, 
script: 'stat -c "%u" .').trim()
+                                       JENKINS_GID = sh(returnStdout: true, 
script: 'stat -c "%g" .').trim()
                                        echo $JENKINS_UID
                                        echo $JENKINS_GID
                                }

Reply via email to