diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 6fe8aae3..64080350 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,33 +1,29 @@ version: 2 updates: - - package-ecosystem: maven + # Maven Dependencies for Jenkins Plugin - Target `tms_079` + - package-ecosystem: "maven" directory: "/" schedule: - interval: daily + interval: "daily" + target-branch: "tms_079" open-pull-requests-limit: 10 commit-message: prefix: "" ignore: - - dependency-name: org.eclipse.collections:eclipse-collections + - dependency-name: "org.eclipse.collections:eclipse-collections" versions: - ">= 10.a" - - dependency-name: org.eclipse.collections:eclipse-collections-api + - dependency-name: "org.eclipse.collections:eclipse-collections-api" versions: - ">= 10.a" - - dependency-name: net.javacrumbs.json-unit:json-unit-assertj + - dependency-name: "net.javacrumbs.json-unit:json-unit-assertj" versions: - ">= 3.0.0" + # GitHub Actions - Still on master - package-ecosystem: "github-actions" directory: "/" - commit-message: - prefix: "" schedule: interval: "daily" - - - package-ecosystem: "npm" - directory: "/" commit-message: prefix: "" - schedule: - interval: "daily" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e5b0894f..40bc4580 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: strategy: matrix: platform: [ubuntu-latest, windows-latest] - jdk: [11, 17, 21] + jdk: [21] runs-on: ${{ matrix.platform }} name: on ${{ matrix.platform }} with JDK ${{ matrix.jdk }} diff --git a/.github/workflows/jenkins-security-scan.yml b/.github/workflows/jenkins-security-scan.yml index 79662bde..d30dbf38 100644 --- a/.github/workflows/jenkins-security-scan.yml +++ b/.github/workflows/jenkins-security-scan.yml @@ -18,5 +18,5 @@ jobs: uses: jenkins-infra/jenkins-security-scan/.github/workflows/jenkins-security-scan.yaml@v2 with: java-cache: 'maven' # Optionally enable use of a build dependency cache. Specify 'maven' or 'gradle' as appropriate. - java-version: 11 # Optionally specify what version of Java to set up for the build, or remove to use a recent default. + java-version: 21 # Optionally specify what version of Java to set up for the build, or remove to use a recent default. diff --git a/.gitignore b/.gitignore index f2f8e449..cd44e73f 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,4 @@ work/** /src/main/resources/scripts/*.pyc /src/main/resources/scripts/vpython-addons/bs4/*.pyc +.idea diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 00000000..2e3746d1 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,2 @@ +// file: Jenkinsfile +buildPlugin(platforms: ['linux','windows'], jdkVersions: [21]) diff --git a/README.md b/README.md index 4f471064..bf014edb 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -[![CodeCov](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/coverage.yml/badge.svg?branch=tms_078)](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/coverage.yml) -[![CodeQL](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/codeql.yml/badge.svg?branch=tms_078)](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/codeql.yml) -[![GitHub CI](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/ci.yml/badge.svg?branch=tms_078)](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/ci.yml) +[![CodeCov](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/coverage.yml/badge.svg)](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/coverage.yml) +[![CodeQL](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/codeql.yml/badge.svg)](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/codeql.yml) +[![GitHub CI](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/ci.yml/badge.svg)](https://github.com/jenkinsci/vectorcast-execution-plugin/actions/workflows/ci.yml) # Summary @@ -8,6 +8,11 @@ This plugin allows the user to create Single and Pipeline Jobs to build and exec - [Jenkins Coverage Plugin](https://plugins.jenkins.io/coverage) - [Legacy VectorCAST Coverage Plugin](https://wiki.jenkins.io/display/JENKINS/VectorCAST+Coverage+Plugin). +### Jenkins Version Information: +- Development completed on Jenkins LTS 2.492.3 and Java 21 +- Validated against Jenkins LTS 2.504.2 +- Validated against Jenkins 2.513 + # Table of Contents @@ -35,7 +40,6 @@ This plugin allows the user to create Single and Pipeline Jobs to build and exec * [Additional Tools](#additional-tools) * [PC-lint Plus](#pc-lint-plus) * [Squore](#squore) - * [TESTinsights](#testinsights) * [Controlling Where Jobs Run](#controlling-where-jobs-run) * [Build Summary](#build-summary) * [Test Results](#test-results) @@ -49,6 +53,7 @@ This plugin allows the user to create Single and Pipeline Jobs to build and exec * [Using Change Based Testing Imported Results with QA Project](#using-change-based-testing-imported-results-with-qa-project) * [Disabled environments may add coverage metrics](#disabled-environments-may-add-coverage-metrics) * [Change Log](#change-log) + * [Version 0.79 (4 Dec 2025)](#version-079-4-dec-2025) * [Version 0.78 (14 Jun 2025)](#version-078-14-jun-2025) * [Version 0.77 (21 Aug 2024)](#version-077-21-aug-2024) * [Version 0.76 (19 Jan 2023)](#version-076-19-jan-2023) @@ -135,6 +140,10 @@ This plugin adds a new top-level menu item to the Jenkins sidebar. Select the ** ![](docs/images/vc_menu_in_sidebar.png) +On newer versions of Jenkins, the location of the **VectorCAST** create job windows is now in the upper right: + +![](docs/images/new_vc_location.png) + ## Job Types @@ -284,7 +293,6 @@ When using imported results and the Use External Result File option, the job wil Other Vector tool integrations are supported by this plugin. - PC-lint Plus - Squore -- TESTinsights ![](docs/images/additional.png) @@ -297,14 +305,7 @@ For [PC-lint Plus](https://pclintplus.com/), the user must provide the command o -hs1 // The height of a message should be 1 ``` #### Squore -For [Squore](https://www.vector.com/int/en/products/products-a-z/software/squore/) analysis, the user must provide the Squore command found on the last page of the Squore project's configuration/build page. - -#### TESTinsights -For [TESTinsights](https://www.vector.com/int/en/products/products-a-z/software/vectorcast/vectorcast-testinsights), if the user is using Git or Subversion for SCM, the plugin will attempt to have the SCM version linked to the TESTinsights project for team access and distributed change-based testing. The user must provide the following: - - TESTinsights URL - The URL to TESTinsights server and project (Use Copy Team Area URL). - - TESTinsights Project - The Project Name in TESTinsights to push the results (leave blank to use the Jenkins Job Name). - - TESTinsights Credential ID - The Credential ID from Jenkins for TESTinsights. - - The proxy to push to TESTinsights server in the format **proxy.address:port** (optional) +For [Squore](https://www.vector.com/squore/) analysis, the user must provide the Squore command found on the last page of the Squore project's configuration/build page. ### Controlling Where Jobs Run @@ -347,7 +348,34 @@ By selecting individual cases, you can view the execution reports, providing ins ## Known Issues -### VectorCAST Reports and Jenkins Content Security +### 🔥 Jenkins 2.535 “Form is larger than max length 200000” + +**Cause:** +Jenkins 2.535 upgraded to Jetty 12, which limits web form submissions to **200 KB** by default. Large Pipeline job configs can exceed this after HTML encoding. + +**Fix:** +- Move pipeline script to SCM - Instead of keeping the Groovy text inline in the job config, use Pipeline script from SCM. +- Increase Jetty’s form size limit in your startup command: + + ```bash + -Dorg.eclipse.jetty.server.Request.maxFormContentSize=5242880 \ + -Dorg.eclipse.jetty.server.Request.maxFormKeys=10000 + ``` + +**Example** +``` +java -Dorg.eclipse.jetty.server.Request.maxFormContentSize=5242880 \ + -Dorg.eclipse.jetty.server.Request.maxFormKeys=10000 \ + -jar jenkins.war --httpPort=9090 +``` + +**Notes** +Old Jenkins flags `hudson.util.MultipartFormDataParser.MAX_FORM_SIZE` no longer work in 2.535+. + +### ⚠️ Imported Results with Cobertura and LCOV output +New output formats were added, extended cobertura format output for use with Jenkins Coverage Plugin and LCOV output support. These reporting scripts do not currently support generating coverage metrics based off of imported results. + +### ⚠️ VectorCAST Reports and Jenkins Content Security VectorCAST HTML reports for metrics were updated to use cascading style sheets (CSS) in the 2019 release and 2020 for top level project metrics. This was done to offer users greater flexibility in displaying metrics. To maintain single file HTML format, VectorCAST Reports used inline CSS. Inline CSS was disallowed under Jenkins more restrictive CSP. @@ -366,29 +394,43 @@ For more information on the Jenkins CSP, please see [Configuring Content Securit For more information on VectorCAST Reports and Jenkins Content Security Policy, please see the article [VectorCAST Reports and Jenkins Content Security Policy](https://support.vector.com/kb?sys_kb_id=e54af267db6b6c904896115e68961902&id=kb_article_view&sysparm_rank=8&sysparm_tsqueryId=ba9d8f558707b858b9f233770cbb3543) -### JUnit publisher failing environment with no test cases +### ⚠️ JUnit publisher failing environment with no test cases For non-pipeline jobs, JUnit publisher will fail any environments published without test results. If you have an environment with no test results, you will need to manually check the box "Do not fail the build on empty test results" in the Publish JUnit test result report configuration. -### Potential loss of requirements information +### ⚠️ Potential loss of requirements information For customers using VectorCAST's requirements gateway, there's a potential for loss of requirements data when running test environments in parallel while using a shared requirements database. -### Test and code coverage reporting with Imported Results +### ⚠️ Test and code coverage reporting with Imported Results For environments that use imported results with versions of VectorCAST before 2020, reporting of test results and code coverage will not properly generate because of the lack of required build information. -### Using Change Based Testing Imported Results with QA Project +### ⚠️ Using Change Based Testing Imported Results with QA Project VectorCAST/QA projects cannot use imported results for change based testing -### Disabled environments may add coverage metrics +### ⚠️ Disabled environments may add coverage metrics In rare cases, VectorCAST projects will have disabled environment with results stored before they were disabled. In cases where the disabled environments share source file with enabled environments, this may lead addition coverage metrics. It is recommended to clean the environment before disabling. This takes into account environments that are directly disabled or disabled at the Compiler or TestSuite Nodes. To avoid this, please clean environments before disabling them ## Change Log +### Version 0.79 (4 Dec 2025) +- Moved to minimum Jenkins LTS 2.492.3 and Java 21 + - Validated against Jenkins LTS 2.504.2 + - Validated against Jenkins 2.513 +- Update pom.xml to get a build of the plugin and to Java21 +- Update NewSingleJob to catch new exception thrown from SecureGroovyScript +- Removing support for TESTinsights +- Updating parallel_full_reports.py NVLM support to support 9.0.22 +- Update jenkinsfile pipeline script to match up with the latest groovy interpreter + - Missing def before globals + - Pipeline can access VC_ global vars, but not functions + - Need to pass all required VC_ global vars to functions +- Fixed encoding issues + ### Version 0.78 (14 Jun 2025) - Moved to minimum Jenkins version: 2.452.1 and Java 11 - Changed default coverage plugin on new job creation diff --git a/docs/images/additional.png b/docs/images/additional.png index 70c46cf3..854cf963 100644 Binary files a/docs/images/additional.png and b/docs/images/additional.png differ diff --git a/docs/images/new_vc_location.png b/docs/images/new_vc_location.png new file mode 100644 index 00000000..f4edf617 Binary files /dev/null and b/docs/images/new_vc_location.png differ diff --git a/pom.xml b/pom.xml index 2ae28d39..24cf0821 100644 --- a/pom.xml +++ b/pom.xml @@ -4,8 +4,8 @@ org.jenkins-ci.plugins plugin - 4.88 - + 5.9 + VectorCAST Execution Run VectorCAST manage jobs from Jenkins @@ -25,7 +25,7 @@ UTF-8 UTF-8 false - 2.462.3 + 2.492.3 Vector Informatik, GmbH @@ -65,121 +65,119 @@ io.jenkins.tools.bom - bom-2.462.x - 3387.v0f2773fa_3200 + bom-2.492.x + + 5473.vb_9533d9e5d88 + import pom + + javax.annotation + javax.annotation-api + 1.3.2 + + + org.jenkins-ci.main + maven-plugin + 3.25 + + + org.jenkins-ci.plugins + vectorcast-coverage + 0.22 + + io.jenkins.plugins warnings-ng - 11.10.0 io.jenkins.plugins coverage - 1.16.1 + 2.7.1 - + org.jenkins-ci.plugins vectorcast-coverage - 0.22 org.jvnet.hudson.plugins groovy-postbuild - 264.vf6e02a_77d5b_c org.jenkins-ci.plugins copyartifact - 749.vfb_dca_a_9b_6549 - - org.jenkins-ci.plugins - ws-cleanup - 0.46 - com.github.spotbugs spotbugs-annotations - 4.8.6 + 4.9.3 + org.mockito mockito-core - 5.14.1 test + org.jenkins-ci.plugins credentials-binding - 681.vf91669a_32e45 - - org.jenkins-ci.plugins - junit - 1300.v03d9d8a_cf1fb_ + ws-cleanup - - - - org.jenkins-ci.plugins - display-url-api - 2.204.vf6fddd8a_8b_e9 - - + - org.jenkins-ci.main - maven-plugin - 3.23 + org.jenkins-ci.plugins.workflow + workflow-api + compile - - io.jenkins.plugins - commons-text-api - 1.12.0-129.v99a_50df237f7 + org.jenkins-ci.plugins.workflow + workflow-job + compile - - org.jenkins-ci.plugins - trilead-api - 2.147.vb_73cc728a_32e + org.jenkins-ci.plugins.workflow + workflow-step-api + compile - - - io.jenkins.plugins - caffeine-api - 3.1.8-133.v17b_1ff2e0599 + org.jenkins-ci.plugins.workflow + workflow-basic-steps + compile - - - io.jenkins.plugins - commons-lang3-api - 3.17.0-84.vb_b_938040b_078 - - org.jenkins-ci.plugins.workflow - workflow-support - 930.vf51d22b_ce488 + workflow-durable-task-step + compile - - + org.jenkins-ci.plugins.workflow workflow-cps - 3969.vdc9d3a_efcc6a_ + compile + + + org.jenkins-ci.plugins.workflow + workflow-aggregator + 608.v67378e9d3db_1 + compile + + + org.jenkinsci.plugins + pipeline-model-definition + compile @@ -222,9 +220,45 @@ org.apache.maven.plugins - maven-release-plugin - 3.0.0-M1 + maven-compiler-plugin + 3.14.0 + + 21 + 21 + + ${lintCompilerArgs} + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.2.5 + + 1C + true + classes + true + true + + + + + org.jenkins-ci.tools + maven-hpi-plugin + 3.9 + true + + 21 + + + + + org.apache.maven.plugins + maven-release-plugin + @@ -235,18 +269,6 @@ connection - - org.apache.maven.plugins - maven-compiler-plugin - 3.13.0 - - 1.8 - 1.8 - - ${lintCompilerArgs} - - - org.codehaus.mojo build-helper-maven-plugin @@ -265,29 +287,14 @@ - - org.jenkins-ci.tools - maven-hpi-plugin - 3.55 - true - com.github.spotbugs spotbugs-maven-plugin - 4.8.5.0 + 4.9.3.0 true false - - - run-spotbugs - verify - - check - - - org.apache.maven.plugins diff --git a/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTCommand.java b/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTCommand.java index 69ba77b9..04c658bf 100644 --- a/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTCommand.java +++ b/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTCommand.java @@ -72,15 +72,15 @@ public final String getUnixCommand() { /** * Create a VectorCAST command. - * @param winCommand the windows variant of the command - * @param unixCommand the unix variant of the command + * @param inWinCmd the windows variant of the command + * @param inLinuxCmd the unix variant of the command */ @DataBoundConstructor @SuppressWarnings("checkstyle:HiddenField") - public VectorCASTCommand(final String winCommand, - final String unixCommand) { - this.winCommand = winCommand; - this.unixCommand = unixCommand; + public VectorCASTCommand(final String inWinCmd, + final String inLinuxCmd) { + this.winCommand = inWinCmd; + this.unixCommand = inLinuxCmd; } /** diff --git a/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTJobPipeline.java b/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTJobPipeline.java index 2f429516..714c7f27 100644 --- a/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTJobPipeline.java +++ b/src/main/java/com/vectorcast/plugins/vectorcastexecution/VectorCASTJobPipeline.java @@ -130,7 +130,7 @@ public HttpResponse doCreate(final StaplerRequest request, job.create(); projectName = job.getProjectName(); Logger.getLogger(VectorCASTJobPipeline.class.getName()) - .log(Level.SEVERE, "Pipeline Project Name: " + projectName, + .log(Level.INFO, "Pipeline Project Name: " + projectName, "Pipeline Project Name: " + projectName); return new HttpRedirect("created"); } catch (ScmConflictException ex) { diff --git a/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/BaseJob.java b/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/BaseJob.java index 20de9595..d8fe2878 100644 --- a/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/BaseJob.java +++ b/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/BaseJob.java @@ -54,11 +54,7 @@ import io.jenkins.plugins.util.QualityGate.QualityGateCriticality; import edu.hm.hafner.coverage.Metric; import io.jenkins.plugins.forensics.reference.SimpleReferenceRecorder; -import org.jenkinsci.plugins.credentialsbinding.impl.SecretBuildWrapper; -import org.jenkinsci.plugins.credentialsbinding.impl.UsernamePasswordMultiBinding; -import org.jenkinsci.plugins.credentialsbinding.MultiBinding; import java.util.List; -import java.util.Collections; import java.util.ArrayList; import java.net.URL; @@ -175,19 +171,6 @@ public abstract class BaseJob { /** Squore execution command. */ private String squoreCommand; - /** TESTinsights URL information. */ - private String testInsightsUrl; - /** TESTinsights Project information. */ - private String testInsightsProject; - /** TESTinsights credentials information. */ - private String testInsightsCredentialsId; - /** TESTinsights Proxy information. */ - private String testInsightsProxy; - /** TESTinsights SCM information. */ - private String testInsightsScmUrl; - /** TESTinsights SCM Tech information. */ - private String testInsightsScmTech; - /** * Constructor. * @param req request object @@ -316,14 +299,6 @@ protected BaseJob(final StaplerRequest req, pclpCommand = json.optString("pclpCommand", "").replace('\\', '/'); pclpResultsPattern = json.optString("pclpResultsPattern", ""); squoreCommand = json.optString("squoreCommand", "").replace('\\', '/'); - testInsightsUrl = json.optString("TESTinsights_URL", ""); - testInsightsProject = json.optString("TESTinsights_project", ""); - if (testInsightsProject.length() == 0) { - testInsightsProject = "env.JOB_BASE_NAME"; - } - testInsightsCredentialsId = - json.optString("TESTinsights_credentials_id", ""); - testInsightsProxy = json.optString("TESTinsights_proxy", ""); } @@ -589,69 +564,7 @@ protected String getPclpResultsPattern() { protected String getSquoreCommand() { return squoreCommand; } - /** - * Get URL for TESTinsights. - * @return TESTinsights URL - */ - protected String getTestInsightsUrl() { - return testInsightsUrl; - } - /** - * Get Project for TESTinsights. - * @return TESTinsights Project - */ - protected String getTestInsightsProject() { - return testInsightsProject; - } - /** - * set Project for TESTinsights. - * @param project TESTinsights Project - */ - protected void setTestInsightsProject(final String project) { - this.testInsightsProject = project; - } - /** - * Get Proxy for TESTinsights. - * @return proxy TESTinsights proxy - */ - protected String getTestInsightsProxy() { - return testInsightsProxy; - } - /** - * Get Credentials for TESTinsights. - * @return TESTinsights Credentials - */ - protected String getTestInsightsCredentialsId() { - return testInsightsCredentialsId; - } - /** - * Set SCM URL for TESTinsights. - * @param url TESTinsights SCM URL - */ - protected void setTestInsightsScmUrl(final String url) { - this.testInsightsScmUrl = url; - } - /** - * Get SCM URL for TESTinsights. - * @return TESTinsights SCM URL - */ - protected String getTestInsightsScmUrl() { - return testInsightsScmUrl; - } - /** - * Get SCM Technology TESTinsights. - * @return TESTinsights SCM Technology - */ - protected String getTestInsightsScmTech() { - return testInsightsScmTech; - } - /** - * Set SCM Technology TESTinsights. - * @param tech TESTinsights SCM Technology - */ - protected void setTestInsightsScmTech(final String tech) { - this.testInsightsScmTech = tech; - } + /** * Get request. * @return request @@ -745,16 +658,6 @@ public void create() throws usingScm = false; } else { usingScm = true; - - // for TESTinsights SCM connector - String scmName = scm.getDescriptor().getDisplayName(); - if (scmName.equals("Git")) { - testInsightsScmTech = "git"; - } else if (scmName.equals("Subversion")) { - testInsightsScmTech = "svn"; - } else { - testInsightsScmTech = ""; - } } topProject.setScm(scm); @@ -818,9 +721,6 @@ protected void addArchiveArtifacts(final Project project) { if (pclpCommand.length() != 0) { pclpArchive = ", " + pclpResultsPattern; } - if (testInsightsUrl.length() != 0) { - tiArchive = ", TESTinsights_Push.log"; - } String addToolsArchive = pclpArchive + tiArchive; String defaultArchive = "**/*.html, xml_data/**/*.xml," + "unit_test_*.txt, **/*.png, **/*.css," @@ -951,16 +851,6 @@ protected void addJenkinsCoverage(final Project project) { project.getPublishersList().add(publisher); } - /** - * Add credentials for coverage reporting step. - * @param project project to add step to - */ - protected void addCredentialID(final Project project) { - project.getBuildWrappersList().add( - new SecretBuildWrapper(Collections.>singletonList( - new UsernamePasswordMultiBinding("VC_TI_USR", "VC_TI_PWS", - testInsightsCredentialsId)))); - } /** * Call to get baseline windows single job file. diff --git a/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewPipelineJob.java b/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewPipelineJob.java index 349c65de..63cfdef9 100644 --- a/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewPipelineJob.java +++ b/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewPipelineJob.java @@ -61,18 +61,13 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServletResponse; -import java.util.ArrayList; -import java.util.List; - import java.io.UncheckedIOException; -import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.nio.file.attribute.PosixFilePermission; -import java.nio.file.attribute.PosixFilePermissions; +import java.nio.file.Files; + import java.nio.file.StandardCopyOption; -import java.util.EnumSet; import org.kohsuke.stapler.verb.POST; import hudson.model.Item; import hudson.security.AccessDeniedException3; @@ -122,47 +117,11 @@ public NewPipelineJob( BadOptionComboException { super(request, response); - final int indexNotFound = -1; - JSONObject json = request.getSubmittedForm(); sharedArtifactDirectory = json.optString("sharedArtifactDir", ""); pipelineSCM = json.optString("scmSnippet", "").trim(); - String[] lines = pipelineSCM.split("\n"); - - List scmList = new ArrayList(); - scmList.add("git"); - scmList.add("svn"); - - String url = ""; - String scmTechnology = ""; - - for (String line : lines) { - - for (String scm : scmList) { - if (line.startsWith(scm)) { - scmTechnology = scm; - - if (line.indexOf("url:") == indexNotFound) { - String[] elements = line.split(","); - for (String ele : elements) { - - if (ele.startsWith("url:")) { - String[] eleList = ele.split(" "); - url = eleList[eleList.length - 1]; - } - } - } else { - String[] urlEle = line.split(" "); - url = urlEle[urlEle.length - 1]; - } - } - } - } - setTestInsightsScmUrl(url.replace("'", "")); - setTestInsightsScmTech(scmTechnology); - singleCheckout = json.optBoolean("singleCheckout", false); // remove the win/linux options since there's no platform any more @@ -201,10 +160,6 @@ public NewPipelineJob( if (pipelineSCM.length() != 0 && absPath) { throw new ScmConflictException(pipelineSCM, mpName); } - - if (getTestInsightsProject().equals("env.JOB_BASE_NAME")) { - setTestInsightsProject("${JOB_BASE_NAME}"); - } } /** @@ -357,47 +312,28 @@ public void create() throws doCreate(); } - private static Path createTempFile(final Path tempDirChild) - throws UncheckedIOException { + /** + * Creates a named temp file. + * + * @return File - temporary file + * @throws UncheckedIOException + */ + private static Path createNamedTempFile() throws UncheckedIOException { try { - if (tempDirChild.getFileSystem().supportedFileAttributeViews(). - contains("posix")) { - // Explicit permissions setting is only required - // on unix-like systems because - // the temporary directory is shared between all users. - // This is not necessary on Windows, - // each user has their own temp directory - final EnumSet posixFilePermissions = - EnumSet.of( - PosixFilePermission.OWNER_READ, - PosixFilePermission.OWNER_WRITE - ); - if (!Files.exists(tempDirChild)) { - Files.createFile( - tempDirChild, - PosixFilePermissions - .asFileAttribute(posixFilePermissions) - ); - } else { - Files.setPosixFilePermissions( - tempDirChild, - posixFilePermissions - ); // GOOD: Good has permissions `-rw-------`, - //or will throw an exception if this fails - } - } else if (!Files.exists(tempDirChild)) { - // On Windows, we still need to create the directory, - // when it doesn't already exist. - Files.createDirectory(tempDirChild); + Path tempDir = Paths.get(System.getProperty("java.io.tmpdir")); + Path tempFile = tempDir.resolve("config_temp.xml"); + + if (!Files.exists(tempFile)) { + Files.createFile(tempFile); } - return tempDirChild.toAbsolutePath(); - } catch (IOException exception) { - throw new UncheckedIOException("Failed to create temp file", - exception); + return tempFile.toAbsolutePath(); + } catch (IOException e) { + throw new UncheckedIOException("Named temp file create failed", e); } } + /** * Retrieves config.xml from the jar and writes it to the systems temp. * directory. @@ -416,7 +352,7 @@ private File writeConfigFileWithFiles() throws IOException { in = getPipelineConfigXML().openStream(); } - configFile = createTempFile(Paths.get("config_temp.xml")); + configFile = createNamedTempFile(); try { Files.copy(in, configFile, StandardCopyOption.REPLACE_EXISTING); @@ -568,55 +504,42 @@ private String generateJenkinsfile() throws IOException { + "//\n" + "// ===========================================================\n" + "\n" - + "VC_Manage_Project = \'" + getManageProjectName() + "\'\n" - + "VC_EnvSetup = '''" + setup + "'''\n" - + "VC_Build_Preamble = \"" + preamble + "\"\n" - + "VC_EnvTeardown = '''" + teardown + "'''\n" + + "def VC_Manage_Project = \'" + getManageProjectName() + "\'\n" + + "def VC_EnvSetup = '''" + setup + "'''\n" + + "def VC_Build_Preamble = \"" + preamble + "\"\n" + + "def VC_EnvTeardown = '''" + teardown + "'''\n" + "def scmStep () { " + pipelineSCM + " }\n" - + "VC_usingSCM = " + + "def VC_usingSCM = " + String.valueOf(pipelineSCM.length() != 0) + "\n" - + "VC_postScmStepsCmds = '''" + postCheckoutCmds + "'''\n" - + "VC_sharedArtifactDirectory = '''" + + "def VC_postScmStepsCmds = '''" + postCheckoutCmds + "'''\n" + + "def VC_sharedArtifactDirectory = '''" + sharedArtifactDirectory + "'''\n" - + "VC_Agent_Label = '" + getNodeLabel() + "'\n" - + "VC_waitTime = '" + getWaitTime() + "'\n" - + "VC_waitLoops = '" + getWaitLoops() + "'\n" - + "VC_maxParallel = " + getMaxParallel().toString() + "\n" - + "VC_useOneCheckoutDir = " + singleCheckout + "\n" - + "VC_UseCILicense = " + vcUseCi + "\n" - + "VC_useCBT = " + incremental + "\n" - + "VC_useCoveragePlugin = " + getUseCoveragePlugin() + "\n" - + "VC_createdWithVersion = '" + + "def VC_Agent_Label = '" + getNodeLabel() + "'\n" + + "def VC_waitTime = '" + getWaitTime() + "'\n" + + "def VC_waitLoops = '" + getWaitLoops() + "'\n" + + "def VC_maxParallel = " + getMaxParallel().toString() + "\n" + + "def VC_useOneCheckoutDir = " + singleCheckout + "\n" + + "def VC_useCILicense = " + vcUseCi + "\n" + + "def VC_useCBT = " + incremental + "\n" + + "def VC_useCoveragePlugin = " + getUseCoveragePlugin() + "\n" + + "def VC_createdWithVersion = '" + VcastUtils.getVersion().orElse("Unknown") + "'\n" - + "VC_usePCLintPlus = " + + "def VC_usePCLintPlus = " + String.valueOf(getPclpCommand().length() != 0) + "\n" - + "VC_pclpCommand = '" + getPclpCommand() + "'\n" - + "VC_pclpResultsPattern = '" + getPclpResultsPattern() + "'\n" - + "VC_useSquore = " + + "def VC_pclpCommand = '" + getPclpCommand() + "'\n" + + "def VC_pclpResultsPattern = '" + getPclpResultsPattern() + "'\n" + + "def VC_useSquore = " + String.valueOf(getSquoreCommand().length() != 0) + "\n" - + "VC_squoreCommand = '''" + getSquoreCommand() + "'''\n" - + "VC_useTESTinsights = " - + String.valueOf(getTestInsightsUrl().length() != 0) + "\n" - + "VC_TESTinsights_URL = '" + getTestInsightsUrl() + "'\n" - + "VC_TESTinsights_Project = \"" - + getTestInsightsProject() + "\"\n" - + "VC_TESTinsights_Proxy = '" + getTestInsightsProxy() + "'\n" - + "VC_TESTinsights_Credential_ID = '" - + getTestInsightsCredentialsId() + "'\n" - + "VC_TESTinsightsScmUrl = '" - + getTestInsightsScmUrl() + "'\n" - + "VC_TESTinsights_SCM_Tech = '" - + getTestInsightsScmTech() + "'\n" - + "VC_TESTinsights_Revision = \"\"\n" - + "VC_useCoverageHistory = " + getUseCoverageHistory() + "\n" - + "VC_useStrictImport = " + getUseStrictTestcaseImport() + "\n" - + "VC_useRGW3 = " + getUseRGW3() + "\n" - + "VC_useImportedResults = " + getUseImportedResults() + "\n" - + "VC_useLocalImportedResults = " + + "def VC_squoreCommand = '''" + getSquoreCommand() + "'''\n" + + "def VC_useCoverageHistory = " + getUseCoverageHistory() + "\n" + + "def VC_useStrictImport = " + getUseStrictTestcaseImport() + "\n" + + "def VC_useRGW3 = " + getUseRGW3() + "\n" + + "def VC_useImportedResults = " + getUseImportedResults() + "\n" + + "def VC_useLocalImportedResults = " + getUseLocalImportedResults() + "\n" - + "VC_useExternalImportedResults = " + + "def VC_useExternalImportedResults = " + getUseExternalImportedResults() + "\n" - + "VC_externalResultsFilename = \"" + + "def VC_externalResultsFilename = \"" + getExternalResultsFilename() + "\"\n" + "\n"; diff --git a/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewSingleJob.java b/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewSingleJob.java index 83010809..3c4a9590 100644 --- a/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewSingleJob.java +++ b/src/main/java/com/vectorcast/plugins/vectorcastexecution/job/NewSingleJob.java @@ -36,6 +36,7 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServletResponse; import org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.SecureGroovyScript; +import hudson.model.Descriptor.FormException; import org.jvnet.hudson.plugins.groovypostbuild.GroovyPostbuildRecorder; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; @@ -69,7 +70,6 @@ public NewSingleJob(final StaplerRequest request, * @param pluginVersion plugin version of the running plugin while create * @param rptFmt Report Format (HTML/TXT] * @param htmlOrText html or text version of the reports - * @param tiCommandStringWin TESTinsights command string * @param noGenExecReport don't generate execution report * @return String of configuration for Windows */ @@ -77,7 +77,6 @@ private String getWindowsConfig( final String pluginVersion, final String rptFmt, final String htmlOrText, - final String tiCommandStringWin, final String noGenExecReport) throws IOException { @@ -116,8 +115,7 @@ private String getWindowsConfig( win += getEnvironmentTeardownWin() + "\n" + getPclpCommand() + "\n" - + getSquoreCommand() + "\n" - + tiCommandStringWin; + + getSquoreCommand() + "\n"; return win; } @@ -127,7 +125,6 @@ private String getWindowsConfig( * @param pluginVersion plugin version of the running plugin while create * @param rptFmt Report Format (HTML/TXT] * @param htmlOrText html or text version of the reports - * @param tiCommandStringUnix TESTinsights command string * @param noGenExecReport don't generate execution report * @return String of configuration for unix */ @@ -135,7 +132,6 @@ private String getUnixConfig( final String pluginVersion, final String rptFmt, final String htmlOrText, - final String tiCommandStringUnix, final String noGenExecReport) throws IOException { @@ -174,8 +170,7 @@ private String getUnixConfig( unix += getEnvironmentTeardownUnix() + getPclpCommand() + "\n" - + getSquoreCommand() + "\n" - + tiCommandStringUnix + "\n"; + + getSquoreCommand() + "\n"; return unix; } @@ -275,120 +270,6 @@ private String getAdditonalEnvVarsWindows() { return addEnvVars; } - /** - * get the test insights settings. - * @return Stringp[] [windows,linux] settings - */ - private String[] getTestInsightsSettings() { - String tiScmConnectWin = ""; - String tiScmConnectUnix = ""; - String tiCommandStringWin = ""; - String tiCommandStringUnix = ""; - String scmInfoCommandWin = ""; - String scmInfoCmdUnix = ""; - String tiProxy = ""; - - if (getTestInsightsUrl().length() != 0) { - boolean setupConnect = false; - if (isUsingScm()) { - if (getTestInsightsScmTech().equals("git")) { - scmInfoCommandWin = "git config remote.origin.url > scm_url.tmp\n" - + "set /p SCM_URL= < scm_url.tmp\n" - + "git rev-parse HEAD > scm_rev.tmp\n" - + "set /p SCM_REV= < scm_rev.tmp\n"; - scmInfoCmdUnix = "SCM_URL=`git config remote.origin.url`\n" - + "SCM_REV=`git rev-parse HEAD`\n"; - setupConnect = true; - } - if (getTestInsightsScmTech().equals("svn")) { - scmInfoCommandWin = " " - + "svn info --show-item=url --no-newline > scm_url.tmp\n" - + "set /p SCM_URL= < scm_url.tmp\n" - + "git svn info --show-item revision > scm_rev.tmp\n" - + "set /p SCM_REV= < scm_rev.tmp\n"; - scmInfoCmdUnix = "SCM_URL=`svn info --show-item=url --no-newline`\n" - + "SCM_REV=`svn info --show-item revision`\n"; - setupConnect = true; - } - if (setupConnect) { - tiScmConnectWin = " " - + "--vc-project-local-path=%WORKSPACE%/%VCAST_PROJECT_NAME% " - + "--vc-project-scm-path=%SCM_URL%/%VCAST_PROJECT_NAME% " - + "--src-local-path=%WORKSPACE% " - + " --src-scm-path=%SCM_URL%/ " - + "--vc-project-scm-technology=" - + getTestInsightsScmTech() + " " - + "--src-scm-technology=" - + getTestInsightsScmTech() + " " - + "--vc-project-scm-revision=%SCM_REV% " - + "--src-scm-revision %SCM_REV% " - + "--versioned\n"; - tiScmConnectUnix = " " - + "--vc-project-local-path=$WORKSPACE/$VCAST_PROJECT_NAME " - + "--vc-project-scm-path=$SCM_URL/$VCAST_PROJECT_NAME " - + "--src-local-path=$WORKSPACE " - + "--src-scm-path=$SCM_URL/ " - + "--vc-project-scm-technology=" - + getTestInsightsScmTech() + " " - + " --src-scm-technology=" - + getTestInsightsScmTech() + " " - + "--vc-project-scm-revision=$SCM_REV " - + "--src-scm-revision $SCM_REV " - + "--versioned\n"; - } - } - if (setupConnect) { - tiCommandStringWin = scmInfoCommandWin; - tiCommandStringUnix = scmInfoCmdUnix; - } - if (getTestInsightsProxy().length() > 0) { - tiProxy = "--proxy " + getTestInsightsProxy(); - } - String tiProjectWin = ""; - String tiProjectUnix = ""; - - if (getTestInsightsProject().equals("env.JOB_BASE_NAME")) { - tiProjectWin = "%JOB_BASE_NAME%"; - tiProjectUnix = "$JOB_BASE_NAME"; - } else { - tiProjectWin = getTestInsightsProject(); - tiProjectUnix = getTestInsightsProject(); - } - - tiCommandStringWin += "testinsights_connector --api " - + getTestInsightsUrl() - + " --user %VC_TI_USR% " - + "--pass %VC_TI_PWS% " - + "--action PUSH " - + "--project " - + tiProjectWin + " " - + " --test-object %BUILD_NUMBER% " - + "--vc-project %VCAST_PROJECT_NAME% " - + tiProxy + " " - + "--log TESTinsights_Push.log " - + tiScmConnectWin; - tiCommandStringUnix += "testinsights_connector " - + "--api " - + getTestInsightsUrl() + " " - + "--user $VC_TI_USR " - + "--pass $VC_TI_PWS " - + "--action PUSH " - + "--project " - + tiProjectUnix + " " - + "--test-object $BUILD_NUMBER " - + "--vc-project $VCAST_PROJECT_NAME " - + tiProxy + " " - + "--log TESTinsights_Push.log " - + tiScmConnectUnix; - } - - String[] settings = new String[2]; - settings[0] = tiCommandStringWin; - settings[1] = tiCommandStringUnix; - - return settings; - } - /** * Add build commands step to job. */ @@ -407,22 +288,19 @@ private void addCommandSingleJob() throws IOException { rptFmt = "TEXT"; } - String[] settings = getTestInsightsSettings(); - String tiCommandStringWin = settings[0]; - String tiCommandStringUnix = settings[1]; String pluginVersion = VcastUtils.getVersion().orElse("Unknown"); /* * Windows config portion */ String win = getWindowsConfig(pluginVersion, rptFmt, - htmlOrText, tiCommandStringWin, noGenExecReport); + htmlOrText, noGenExecReport); /* * Unix config portion */ String unix = getUnixConfig(pluginVersion, rptFmt, - htmlOrText, tiCommandStringUnix, noGenExecReport); + htmlOrText, noGenExecReport); VectorCASTCommand command = new VectorCASTCommand(win, unix); if (!getTopProject().getBuildersList().add(command)) { @@ -455,12 +333,19 @@ private void addGroovyScriptSingleJob() throws IOException { script = script.replace("@PROJECT_BASE@", getBaseName()); - SecureGroovyScript secureScript = - new SecureGroovyScript( - script, - false, /*sandbox*/ - null /*classpath*/ - ); + SecureGroovyScript secureScript = null; + + try { + secureScript = + new SecureGroovyScript( + script, + false, /*sandbox*/ + null /*classpath*/ + ); + } catch (FormException ex) { + Logger.getLogger(NewSingleJob.class.getName()). + log(Level.INFO, null, ex); + } GroovyPostbuildRecorder groovy = new GroovyPostbuildRecorder( secureScript, @@ -537,9 +422,6 @@ public void doCreate() } else { addVCCoverage(getTopProject()); } - if (getTestInsightsUrl().length() != 0) { - addCredentialID(getTopProject()); - } } addGroovyScriptSingleJob(); diff --git a/src/main/resources/lib/VectorCAST/additionalTools.jelly b/src/main/resources/lib/VectorCAST/additionalTools.jelly index c24fe501..047a9d9d 100644 --- a/src/main/resources/lib/VectorCAST/additionalTools.jelly +++ b/src/main/resources/lib/VectorCAST/additionalTools.jelly @@ -46,23 +46,6 @@ - - - - - - - - - - - - - - - - - diff --git a/src/main/resources/lib/VectorCAST/additionalTools.properties b/src/main/resources/lib/VectorCAST/additionalTools.properties index c7af05ae..6bf95539 100644 --- a/src/main/resources/lib/VectorCAST/additionalTools.properties +++ b/src/main/resources/lib/VectorCAST/additionalTools.properties @@ -33,15 +33,3 @@ VCJob.AdditionalTools.pclp.result_pattern.description = Syntax specifying the f VCJob.AdditionalTools.squore.command.title = Command to push data to Squore VCJob.AdditionalTools.squore.command.description = Command to push data to Squore. If using a .bat file, you will want to use "call [Squore batch file]". -VCJob.AdditionalTools.TESTinsights.URL.title = TESTinsights URL -VCJob.AdditionalTools.TESTinsights.URL.description = URL to TESTinsights server and project (Use Copy Team Area Url) - -VCJob.AdditionalTools.TESTinsights.project.title = TESTinsights Project -VCJob.AdditionalTools.TESTinsights.project.description = Project Name in TESTinsights to push results - Leave blank to use Job Name - -VCJob.AdditionalTools.TESTinsights.credentials_id.title = TESTinsights Credential ID -VCJob.AdditionalTools.TESTinsights.credentials_id.description = Credential ID from Jenkins for TESTinsights - -VCJob.AdditionalTools.TESTinsights.proxy.title = Proxy to push to TESTinsights server -VCJob.AdditionalTools.TESTinsights.proxy.description = If needed, use proxy.address:port - diff --git a/src/main/resources/scripts/__init__.py b/src/main/resources/scripts/__init__.py index e69de29b..de798d87 100644 --- a/src/main/resources/scripts/__init__.py +++ b/src/main/resources/scripts/__init__.py @@ -0,0 +1,24 @@ +# +# The MIT License +# +# Copyright 2020 Vector Informatik, GmbH. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# + diff --git a/src/main/resources/scripts/archive_extract_reports.py b/src/main/resources/scripts/archive_extract_reports.py index e262d2c9..9877defe 100644 --- a/src/main/resources/scripts/archive_extract_reports.py +++ b/src/main/resources/scripts/archive_extract_reports.py @@ -18,7 +18,16 @@ def extract(verbose = False): if verbose: print("extracting old report " + f.name) tf.extract(f) - + + # Set timestamp explicitly (Windows fix) + if f.mtime is not None: + full_path = os.path.join(os.getcwd(), f.name) + try: + os.utime(full_path, (f.mtime, f.mtime)) + except Exception as e: + if verbose: + print("Could not set time for {}: {}".format(f.name, e)) + def archive(verbose = False): if os.path.exists(archive_name): os.remove(archive_name) diff --git a/src/main/resources/scripts/baseJenkinsfile.groovy b/src/main/resources/scripts/baseJenkinsfile.groovy index b8fcb38d..462d4e39 100644 --- a/src/main/resources/scripts/baseJenkinsfile.groovy +++ b/src/main/resources/scripts/baseJenkinsfile.groovy @@ -1,10 +1,9 @@ // Code Coverage threshold numbers // Basis path coverage is no longer support after VectorCAST 2019SP1 -VC_Healthy_Target = [ maxStatement: 100, maxBranch: 100, maxFunctionCall: 100, maxFunction: 100, maxMCDC: 100, - minStatement: 20, minBranch: 20, minFunctionCall: 20, minFunction: 20, minMCDC: 20] +def VC_Healthy_Target = [ maxStatement: 100, maxBranch: 100, maxFunctionCall: 100, maxFunction: 100, maxMCDC: 100, + minStatement: 20, minBranch: 20, minFunctionCall: 20, minFunction: 20, minMCDC: 20] - -VC_Use_Threshold = true +def VC_Use_Threshold = true // =============================================================== // @@ -12,44 +11,56 @@ VC_Use_Threshold = true // // =============================================================== +// =============================================================== +// +// Function : getFailureUnstablePhrases +// Inputs : N/A +// Action : Builds two lists and returns them +// Returns : Returns a list of failure/unstable phrases +// Notes : +// +// =============================================================== + +def getFailureUnstablePhrases() { + + def FailurePhrases = ["No valid edition(s) available", + "py did not execute correctly", + "Traceback (most recent call last)", + "Failed to acquire lock on environment", + "Environment Creation Failed", + "Error with Test Case Management Report", + "FLEXlm Error", + "Unable to obtain license", + "INCR_BUILD_FAILED", + "Environment was not successfully built", + "NOT_LINKED", + "Preprocess Failed", + "Abnormal Termination on Environment", + "not recognized as an internal or external command", + "Another Workspace with this path already exists", + "Destination directory or database is not writable", + "Could not acquire a read lock on the project's vcm file", + "No environments found in ", + ".vcm is invalid", + "Invalid Workspace. Please ensure the directory and database contain write permission", + "The environment is invalid because", + "Please ensure that the project has the proper permissions and that the environment is not being accessed by another process.", + "Error: That command is not permitted in continuous integration mode", + "has been opened by a newer version of VectorCAST. Please upgrade this version of VectorCAST to open the project" + ] + + def UnstablePhrases = ["Dropping probe point", + "Value Line Error - Command Ignored", + "INFO: Problem parsing test results file", + "INFO: File System Error ", + "ERROR: Error accessing DataAPI", + "ERROR: Undefined Error", + "Unapplied Test Data" + ] + + return [FailurePhrases, UnstablePhrases] +} -// Failure phrases for checkLogsForErrors - -VC_FailurePhrases = ["No valid edition(s) available", - "py did not execute correctly", - "Traceback (most recent call last)", - "Failed to acquire lock on environment", - "Environment Creation Failed", - "Error with Test Case Management Report", - "FLEXlm Error", - "Unable to obtain license", - "INCR_BUILD_FAILED", - "Environment was not successfully built", - "NOT_LINKED", - "Preprocess Failed", - "Abnormal Termination on Environment", - "not recognized as an internal or external command", - "Another Workspace with this path already exists", - "Destination directory or database is not writable", - "Could not acquire a read lock on the project's vcm file", - "No environments found in ", - ".vcm is invalid", - "Invalid Workspace. Please ensure the directory and database contain write permission", - "The environment is invalid because", - "Please ensure that the project has the proper permissions and that the environment is not being accessed by another process.", - "Error: That command is not permitted in continuous integration mode", - "has been opened by a newer version of VectorCAST. Please upgrade this version of VectorCAST to open the project" - ] - -// Unstable phrases for checkLogsForErrors -VC_UnstablePhrases = ["Dropping probe point", - "Value Line Error - Command Ignored", - "INFO: Problem parsing test results file", - "INFO: File System Error ", - "ERROR: Error accessing DataAPI", - "ERROR: Undefined Error", - "Unapplied Test Data" - ] // =============================================================== // @@ -67,9 +78,14 @@ def checkLogsForErrors(log) { def boolean unstable_flag = false; def foundKeywords = "" + def FailurePhrases = "" + def UnstablePhrases = "" + + (FailurePhrases, UnstablePhrases) = getFailureUnstablePhrases() + // Check for unstable first // Loop over all the unstable keywords above - VC_UnstablePhrases.each { + UnstablePhrases.each { if (log.contains(it)) { // found a phrase considered unstable, mark the build accordingly foundKeywords = foundKeywords + it + ", " @@ -79,7 +95,7 @@ def checkLogsForErrors(log) { // The check for failure keywords first // Loop over all the failure keywords above - VC_FailurePhrases.each { + FailurePhrases.each { if (log.contains(it)) { // found a phrase considered failure, mark the build accordingly foundKeywords = foundKeywords + it + ", " @@ -116,6 +132,7 @@ def pluginCreateSummary(inIcon, inText) { } } + // =============================================================== // // Function : checkBuildLogForErrors @@ -133,8 +150,13 @@ def checkBuildLogForErrors(logFile) { def foundKeywords = "" def output = "" def status = 0 + def FailurePhrases = "" + def UnstablePhrases = "" + def cmd = "" - writeFile file: "phrases.txt", text: VC_UnstablePhrases.join("\n") + "\n" + VC_FailurePhrases.join("\n") + (FailurePhrases, UnstablePhrases) = getFailureUnstablePhrases() + + writeFile file: "phrases.txt", text: UnstablePhrases.join("\n") + "\n" + FailurePhrases.join("\n") if (isUnix()) { cmd = "grep -f phrases.txt " + logFile + " > search_results.txt" @@ -162,36 +184,6 @@ def checkBuildLogForErrors(logFile) { // // *************************************************************** -// =============================================================== -// -// Function : get_SCM_rev -// Inputs : None -// Action : Returns SCM revision from git or svn -// Notes : Used for TESTinsight Command -// -// =============================================================== - -def get_SCM_rev() { - def scm_rev = "" - def cmd = "" - - if (VC_TESTinsights_SCM_Tech=='git') { - cmd = "git rev-parse HEAD" - } else { - cmd = "svn info --show-item revision" - } - - if (isUnix()) { - scm_rev = sh returnStdout: true, script: cmd - } else { - cmd = "@echo off \n " + cmd - scm_rev = bat returnStdout: true, script: cmd - } - - println "Git Rev Reply " + scm_rev.trim() + "***" - return scm_rev.trim() -} - // *************************************************************** // // File/Pathing Utilities @@ -207,9 +199,9 @@ def get_SCM_rev() { // // =============================================================== -def getMPname() { +def getMPname(manageProject) { // get the manage projects full name and base name - def mpFullName = VC_Manage_Project.split("/")[-1] + def mpFullName = manageProject.split("/")[-1] def mpName = "" if (mpFullName.toLowerCase().endsWith(".vcm")) { mpName = mpFullName.take(mpFullName.lastIndexOf('.')) @@ -246,9 +238,9 @@ def stripLeadingWhitespace(str) { // Notes : Used for accessing the build directory // // =============================================================== -def getMPpath() { +def getMPpath(manageProject) { // get the manage projects full name and base name - def mpFullName = VC_Manage_Project + def mpFullName = manageProject def mpPath = "" if (mpFullName.toLowerCase().endsWith(".vcm")) { mpPath = mpFullName.take(mpFullName.lastIndexOf('.')) @@ -300,7 +292,7 @@ def fixUpName(name) { // // =============================================================== -def concatenateBuildLogs(logFileNames, outputFileName) { +def concatenateBuildLogs(logFileNames, outputFileName, envSetup, useCILicense) { def cmd = "" if (isUnix()) { @@ -311,7 +303,7 @@ def concatenateBuildLogs(logFileNames, outputFileName) { cmd += logFileNames + " > " + outputFileName - runCommands(cmd) + runCommands(cmd, envSetup, useCILicense) } @@ -335,7 +327,7 @@ def concatenateBuildLogs(logFileNames, outputFileName) { // Notes : Used widely // // =============================================================== -def runCommands(cmds) { +def runCommands(cmds, envSetup, useCILicense) { def boolean failure = false; def boolean unstable_flag = false; def foundKeywords = "" @@ -350,15 +342,14 @@ def runCommands(cmds) { // add extra env vars to make debugging of commands useful // add extra env for reports localCmds = """ - ${VC_EnvSetup} + ${envSetup} export VCAST_RPTS_PRETTY_PRINT_HTML=FALSE export VCAST_NO_FILE_TRUNCATION=1 export VCAST_RPTS_SELF_CONTAINED=FALSE - """ // if using CI licenses add in both CI license env vars - if (VC_UseCILicense.length() != 0) { + if (useCILicense.length() != 0) { localCmds += """ export VCAST_USING_HEADLESS_MODE=1 export VCAST_USE_CI_LICENSES=1 @@ -366,7 +357,7 @@ def runCommands(cmds) { } cmds = localCmds + cmds cmds = stripLeadingWhitespace(cmds.replaceAll("_VECTORCAST_DIR","\\\$VECTORCAST_DIR").replaceAll("_RM","rm -rf ").replaceAll("_COPY","cp -p ").replaceAll("_IF_EXIST","if [[ -f ").replaceAll("_IF_THEN"," ]] ; then ").replaceAll("_ENDIF","; fi") ) - println "Running commands: " + cmds + println "Running Linux Command: " + cmds // run command in shell sh label: 'Running VectorCAST Commands', returnStdout: false, script: cmds @@ -377,7 +368,7 @@ def runCommands(cmds) { // add extra env for reports localCmds = """ @echo off - ${VC_EnvSetup} + ${envSetup} set VCAST_RPTS_PRETTY_PRINT_HTML=FALSE set VCAST_NO_FILE_TRUNCATION=1 set VCAST_RPTS_SELF_CONTAINED=FALSE @@ -385,7 +376,7 @@ def runCommands(cmds) { """ // if using CI licenses add in both CI license env vars - if (VC_UseCILicense.length() != 0) { + if (useCILicense.length() != 0) { localCmds += """ set VCAST_USING_HEADLESS_MODE=1 set VCAST_USE_CI_LICENSES=1 @@ -393,16 +384,21 @@ def runCommands(cmds) { } cmds = localCmds + cmds cmds = stripLeadingWhitespace(cmds.replaceAll("_VECTORCAST_DIR","%VECTORCAST_DIR%").replaceAll("_RM","DEL /Q ").replaceAll("_COPY","copy /y /b").replaceAll("_IF_EXIST","if exist ").replaceAll("_IF_THEN"," ( ").replaceAll("_ENDIF"," )")) - println "Running commands: " + cmds + println "Running Windows Command: " + cmds // run command in bat bat label: 'Running VectorCAST Commands', returnStdout: false, script: cmds } // read back the command.log - this is specific to - log = readFile "command.log" - - println "Commands Output: " + log + def log = "" + + if (fileExists("command.log")) { + log = readFile "command.log" + println "Commands Output: " + log + } else { + println "Error getting command.log from these commands: " + cmds + } return log } @@ -418,51 +414,55 @@ def runCommands(cmds) { // // =============================================================== -def setupManageProject() { - def mpName = getMPname() +def setupManageProject(waitTime, waitLoops, manageProject, useCILicense, + sharedArtifactDirectory, envSetup, useExternalImportedResults, + useImportedResults, useLocalImportedResults, externalResultsFilename, + useStrictImport, useOneCheckoutDir) { + + def mpName = getMPname(manageProject) def cmds = """""" - if (VC_sharedArtifactDirectory.length() > 0) { + if (sharedArtifactDirectory.length() > 0) { cmds += """ - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} ${VC_sharedArtifactDirectory} --status" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} ${sharedArtifactDirectory} --status" """ } - if (VC_useStrictImport) { + if (useStrictImport) { cmds += """ - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --config=VCAST_STRICT_TEST_CASE_IMPORT=TRUE" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --config=VCAST_STRICT_TEST_CASE_IMPORT=TRUE" """ } cmds += """ _RM *_rebuild.html - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --status" - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --force --release-locks" - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --config VCAST_CUSTOM_REPORT_FORMAT=HTML" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --status" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --force --release-locks" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --config VCAST_CUSTOM_REPORT_FORMAT=HTML" """ - if (VC_useOneCheckoutDir) { - cmds += """_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --config VCAST_DEPENDENCY_CACHE_DIR=./vcqik" """ + if (useOneCheckoutDir) { + cmds += """_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --config VCAST_DEPENDENCY_CACHE_DIR=./vcqik" """ } - if (VC_useImportedResults) { - if (VC_useLocalImportedResults) { + if (useImportedResults) { + if (useLocalImportedResults) { try { copyArtifacts filter: "${mpName}_results.vcr", fingerprintArtifacts: true, optional: true, projectName: "${env.JOB_NAME}", selector: lastSuccessful() cmds += """ - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --force --import-result=${mpName}_results.vcr" - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --status" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --force --import-result=${mpName}_results.vcr" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --status" _IF_EXIST ${mpName}_results.vcr _IF_THEN _COPY ${mpName}_results.vcr ${mpName}_results_orig.vcr _ENDIF """ } catch(Exception e) { print "No result artifact to use" } - } else if (VC_useExternalImportedResults) { - if (VC_externalResultsFilename.length() != 0) { + } else if (useExternalImportedResults) { + if (externalResultsFilename.length() != 0) { cmds += """ - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --force --import-result=${VC_externalResultsFilename}" - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --status" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --force --import-result=${externalResultsFilename}" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --status" """ } else { error ("External result specified, but external result file is blank") @@ -471,7 +471,7 @@ def setupManageProject() { } - runCommands(cmds) + runCommands(cmds, envSetup, useCILicense) } // =============================================================== @@ -484,14 +484,19 @@ def setupManageProject() { // // =============================================================== -def transformIntoStep(inputString) { - +def transformIntoStep(inputString, useOneCheckoutDir, usingSCM, envSetup, + useRGW3, waitTime, waitLoops, manageProject, useCILicense, + envTeardown, useCBT, sharedArtifactDirectory, + useExternalImportedResults, buildPreamble, useImportedResults, + useStrictImport, useLocalImportedResults, externalResultsFilename) { + def compiler = "" def test_suite = "" - def environment = "" - def source = "" - def machine = "" + def environment = "" + def source = "" + def machine = "" def level = "" + def trimmedLine = inputString.trim() def wordCount = trimmedLine.split(/\s+/).length if (wordCount == 3) { @@ -511,6 +516,8 @@ def transformIntoStep(inputString) { return { catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { + def cmds = "" + // Try to use VCAST_FORCE_NODE_EXEC_NAME parameter. // If 0 length or not present, use the compiler name as a node label def nodeID = "default" @@ -534,7 +541,7 @@ def transformIntoStep(inputString) { println "Starting Build-Execute Stage for ${compiler}/${test_suite}/${environment}" // if we are not using a single checkout directory - if (!VC_useOneCheckoutDir) { + if (!useOneCheckoutDir) { // call the scmStep for each job scmStep() @@ -544,26 +551,29 @@ def transformIntoStep(inputString) { step([$class: 'VectorCASTSetup']) // if we are not using a single checkout directory and using SCM step - if (VC_usingSCM && !VC_useOneCheckoutDir) { + if (usingSCM && !useOneCheckoutDir) { // set options for each manage project pulled out out of SCM - setupManageProject() + setupManageProject(waitTime, waitLoops, manageProject, useCILicense, + sharedArtifactDirectory, envSetup, useExternalImportedResults, + useImportedResults, useLocalImportedResults, externalResultsFilename, + useStrictImport, useOneCheckoutDir) } // setup the commands for building, executing, and transferring information - if (VC_useRGW3) { + if (useRGW3) { cmds = """ - ${VC_EnvSetup} - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/patch_rgw_directory.py "${VC_Manage_Project}" - ${VC_Build_Preamble} _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --level ${level} -e ${environment} --build-execute ${VC_useCBT} --output ${compiler}_${test_suite}_${environment}_rebuild.html" - ${VC_EnvTeardown} + ${envSetup} + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/patch_rgw_directory.py "${manageProject}" + ${buildPreamble} _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --level ${level} -e ${environment} --build-execute ${useCBT} --output ${compiler}_${test_suite}_${environment}_rebuild.html" + ${envTeardown} """ } else { cmds = """ - ${VC_EnvSetup} - ${VC_Build_Preamble} _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --level ${level} -e ${environment} --build-execute ${VC_useCBT} --output ${compiler}_${test_suite}_${environment}_rebuild.html" - ${VC_EnvTeardown} + ${envSetup} + ${buildPreamble} _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${waitTime} --wait_loops ${waitLoops} --command_line "--project "${manageProject}" ${useCILicense} --level ${level} -e ${environment} --build-execute ${useCBT} --output ${compiler}_${test_suite}_${environment}_rebuild.html" + ${envTeardown} """ } @@ -571,7 +581,7 @@ def transformIntoStep(inputString) { def buildLogText = "" // run the build-execute step and save the results - buildLogText = runCommands(cmds) + buildLogText = runCommands(cmds, envSetup, useCILicense) def foundKeywords = "" def boolean failure = false @@ -581,12 +591,12 @@ def transformIntoStep(inputString) { (foundKeywords, failure, unstable_flag) = checkLogsForErrors(buildLogText) // if we didn't fail and don't have a shared artifact directory - we may have to copy back build directory artifacts... - if (!failure && VC_sharedArtifactDirectory.length() == 0) { + if (!failure && sharedArtifactDirectory.length() == 0) { // if we are using an SCM checkout and we aren't using a single checkout directory, we need to copy back build artifacts - if (VC_usingSCM && !VC_useOneCheckoutDir) { + if (usingSCM && !useOneCheckoutDir) { def fixedJobName = fixUpName("${env.JOB_NAME}") - buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/copy_build_dir.py ${VC_Manage_Project} --level ${level} --basename ${fixedJobName}_${compiler}_${test_suite}_${environment} --environment ${environment}""" ) + buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/copy_build_dir.py ${manageProject} --level ${level} --basename ${fixedJobName}_${compiler}_${test_suite}_${environment} --environment ${environment}""", envSetup, useCILicense) } } @@ -609,7 +619,6 @@ def transformIntoStep(inputString) { // if something failed, raise an error if (failure) { error ("Error in Commands: " + foundKeywords) - // else if something made the job unstable, mark as unsable } else if (unstable_flag) { unstable("Triggering stage unstable because keywords found: " + foundKeywords) @@ -628,20 +637,29 @@ def transformIntoStep(inputString) { // Notes : Use to get a list of system and unit tests jobs // // =============================================================== -def stepsForJobList(localEnvList) { - jobList = [:] +def stepsForJobList(localEnvList, useOneCheckoutDir, usingSCM, envSetup, + useRGW3, waitTime, waitLoops, manageProject, useCILicense, + envTeardown, useCBT, sharedArtifactDirectory, + useExternalImportedResults, buildPreamble, useImportedResults, + useStrictImport, useLocalImportedResults, externalResultsFilename) { + + def jobList = [:] localEnvList.each { - jobList[it] = transformIntoStep(it) + jobList[it] = transformIntoStep(it, useOneCheckoutDir, usingSCM, envSetup, + useRGW3, waitTime, waitLoops, manageProject, useCILicense, + envTeardown, useCBT, sharedArtifactDirectory, + useExternalImportedResults, buildPreamble, useImportedResults, + useStrictImport, useLocalImportedResults, externalResultsFilename) } return jobList } // global environment list used to create pipeline jobs -EnvList = [] -UtEnvList = [] -StEnvList = [] -origManageProject = VC_Manage_Project +def EnvList = [] +def UtEnvList = [] +def StEnvList = [] +def origManageProject = VC_Manage_Project // *************************************************************** @@ -693,11 +711,11 @@ pipeline { if (VC_useOneCheckoutDir && !usingExternalRepo) { // we need to convert all the future job's workspaces to point to the original checkout - VC_OriginalWorkspace = "${env.WORKSPACE}" - println "scmStep executed here: " + VC_OriginalWorkspace + def originalWorkspace = "${env.WORKSPACE}" + println "scmStep executed here: " + originalWorkspace scmStep() - print "Updating " + VC_Manage_Project + " to: " + VC_OriginalWorkspace + "/" + VC_Manage_Project - VC_Manage_Project = VC_OriginalWorkspace + "/" + VC_Manage_Project + print "Updating " + VC_Manage_Project + " to: " + originalWorkspace + "/" + VC_Manage_Project + VC_Manage_Project = originalWorkspace + "/" + VC_Manage_Project def origSetup = VC_EnvSetup def origTeardown = VC_EnvTeardown @@ -705,30 +723,30 @@ pipeline { def orig_VC_postScmStepsCmds = VC_postScmStepsCmds if (isUnix()) { - VC_EnvSetup = VC_EnvSetup.replace("\$WORKSPACE" ,VC_OriginalWorkspace) - VC_EnvTeardown = VC_EnvTeardown.replace("\$WORKSPACE" ,VC_OriginalWorkspace) - VC_sharedArtifactDirectory = VC_sharedArtifactDirectory.replace("\$WORKSPACE" ,VC_OriginalWorkspace) - VC_postScmStepsCmds = VC_postScmStepsCmds.replace("\$WORKSPACE" ,VC_OriginalWorkspace) + VC_EnvSetup = VC_EnvSetup.replace("\$WORKSPACE" ,originalWorkspace) + VC_EnvTeardown = VC_EnvTeardown.replace("\$WORKSPACE" ,originalWorkspace) + VC_sharedArtifactDirectory = VC_sharedArtifactDirectory.replace("\$WORKSPACE" ,originalWorkspace) + VC_postScmStepsCmds = VC_postScmStepsCmds.replace("\$WORKSPACE" ,originalWorkspace) } else { - VC_OriginalWorkspace = VC_OriginalWorkspace.replace('\\','/') + originalWorkspace = originalWorkspace.replace('\\','/') def tmpInfo = "" // replace case insensitive workspace with WORKSPACE tmpInfo = VC_EnvSetup.replaceAll("(?i)%WORKSPACE%","%WORKSPACE%") - VC_EnvSetup = tmpInfo.replace("%WORKSPACE%",VC_OriginalWorkspace) + VC_EnvSetup = tmpInfo.replace("%WORKSPACE%",originalWorkspace) // replace case insensitive workspace with WORKSPACE tmpInfo = VC_EnvTeardown.replaceAll("(?i)%WORKSPACE%","%WORKSPACE%") - VC_EnvTeardown = tmpInfo.replace("%WORKSPACE%",VC_OriginalWorkspace) + VC_EnvTeardown = tmpInfo.replace("%WORKSPACE%",originalWorkspace) // replace case insensitive workspace with WORKSPACE tmpInfo = VC_sharedArtifactDirectory.replaceAll("(?i)%WORKSPACE%","%WORKSPACE%") - VC_sharedArtifactDirectory = tmpInfo.replace("%WORKSPACE%" ,VC_OriginalWorkspace) + VC_sharedArtifactDirectory = tmpInfo.replace("%WORKSPACE%" ,originalWorkspace) // replace case insensitive workspace with WORKSPACE tmpInfo = VC_postScmStepsCmds.replaceAll("(?i)%WORKSPACE%","%WORKSPACE%") - VC_postScmStepsCmds = tmpInfo.replace("%WORKSPACE%" ,VC_OriginalWorkspace) + VC_postScmStepsCmds = tmpInfo.replace("%WORKSPACE%" ,originalWorkspace) } print "Updating setup script " + origSetup + " \nto: " + VC_EnvSetup print "Updating teardown script " + origTeardown + " \nto: " + origTeardown @@ -737,7 +755,7 @@ pipeline { // If there are post SCM checkout steps, do them now if (VC_postScmStepsCmds.length() > 0) { - runCommands(VC_postScmStepsCmds) + runCommands(VC_postScmStepsCmds, VC_EnvSetup, VC_useCILicense) } } else { if (usingExternalRepo) { @@ -766,7 +784,7 @@ pipeline { // If there are post SCM checkout steps, do them now if (VC_postScmStepsCmds.length() > 0) { - runCommands(VC_postScmStepsCmds) + runCommands(VC_postScmStepsCmds, VC_EnvSetup, VC_useCILicense) } } @@ -775,14 +793,14 @@ pipeline { // Run the setup step to copy over the scripts step([$class: 'VectorCASTSetup']) - runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/archive_extract_reports.py --archive""") + runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/archive_extract_reports.py --archive""", VC_EnvSetup, VC_useCILicense) // ------------------------------------------------------------------------------------------- - // this part could be done with Manage_Project.getJobs() but it doesn't seem to be working VVV + // this part could be done with manageProject.getJobs() but it doesn't seem to be working VVV def EnvData = "" // Run a script to determine the compiler test_suite and environment - EnvData = runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/getjobs.py ${VC_Manage_Project} --type""") + EnvData = runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/getjobs.py ${VC_Manage_Project} --type""", VC_EnvSetup, VC_useCILicense) // for a groovy list that is stored in a global variable EnvList to be use later in multiple places def lines = EnvData.split('\n') @@ -800,7 +818,7 @@ pipeline { def compiler = "" def test_suite = "" def environment = "" - + if (wordCount == 4) { (testType, compiler, test_suite, environment) = trimmedString.split() } else if (wordCount == 6) { @@ -808,7 +826,7 @@ pipeline { } else { print(trimmedString + " isn't splitting into 4/6 elements " + wordCount) } - + if (testType == "ST:") { trimmedString = compiler + " " + test_suite + " " + environment + " " + source + " " + machine // print("ST:" + trimmedString) @@ -839,10 +857,17 @@ pipeline { stage('System Test Build-Execute Stage') { steps { script { - setupManageProject() + setupManageProject(VC_waitTime, VC_waitLoops, VC_Manage_Project, VC_useCILicense, + VC_sharedArtifactDirectory, VC_EnvSetup, VC_useExternalImportedResults, + VC_useImportedResults, VC_useLocalImportedResults, VC_externalResultsFilename, + VC_useStrictImport, VC_useOneCheckoutDir) // Get the job list from the system test environment listed - jobs = stepsForJobList(StEnvList) + def jobs = stepsForJobList(StEnvList, VC_useOneCheckoutDir, VC_usingSCM, VC_EnvSetup, + VC_useRGW3, VC_waitTime, VC_waitLoops, VC_Manage_Project, VC_useCILicense, + VC_EnvTeardown, VC_useCBT, VC_sharedArtifactDirectory, + VC_useExternalImportedResults, VC_Build_Preamble, VC_useImportedResults, + VC_useStrictImport, VC_useLocalImportedResults, VC_externalResultsFilename) // run each of those jobs in serial jobs.each { name, job -> @@ -857,10 +882,17 @@ pipeline { stage('Unit Test Build-Execute Stage') { steps { script { - setupManageProject() + setupManageProject(VC_waitTime, VC_waitLoops, VC_Manage_Project, VC_useCILicense, + VC_sharedArtifactDirectory, VC_EnvSetup, VC_useExternalImportedResults, + VC_useImportedResults, VC_useLocalImportedResults, VC_externalResultsFilename, + VC_useStrictImport, VC_useOneCheckoutDir) // Get the job list from the unit test environment listed - jobs = stepsForJobList(UtEnvList) + def jobs = stepsForJobList(UtEnvList, VC_useOneCheckoutDir, VC_usingSCM, VC_EnvSetup, + VC_useRGW3, VC_waitTime, VC_waitLoops, VC_Manage_Project, VC_useCILicense, + VC_EnvTeardown, VC_useCBT, VC_sharedArtifactDirectory, + VC_useExternalImportedResults, VC_Build_Preamble, VC_useImportedResults, + VC_useStrictImport, VC_useLocalImportedResults, VC_externalResultsFilename) if (VC_maxParallel > 0) { def runningJobs = [:] @@ -896,7 +928,18 @@ pipeline { script { def buildLogText = "" def buildFileNames = "" + def compiler = "" + def test_suite = "" + def environment = "" + def source = "" + def machine = "" def level = "" + def cmds = "" + def boolean failure = false; + def boolean unstable_flag = false; + def foundKeywords = "" + + // Loop over all environnment and unstash each of the files // These files will be logs and build artifacts EnvList.each { @@ -921,7 +964,7 @@ pipeline { } if (VC_sharedArtifactDirectory.length() > 0) { def fixedJobName = fixUpName("${env.JOB_NAME}") - buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/copy_build_dir.py ${VC_Manage_Project} --level ${level} --basename ${fixedJobName}_${compiler}_${test_suite}_${environment} --environment ${environment} --notar""") + buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/copy_build_dir.py ${VC_Manage_Project} --level ${level} --basename ${fixedJobName}_${compiler}_${test_suite}_${environment} --environment ${environment} --notar""", VC_EnvSetup, VC_useCILicense) } } @@ -939,69 +982,69 @@ pipeline { cmds = """ _RM ${coverDBpath} _RM ${coverSfpDBpath} - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --refresh" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_useCILicense} --refresh" """ - buildLogText += runCommands(cmds) + buildLogText += runCommands(cmds, VC_EnvSetup, VC_useCILicense) } - concatenateBuildLogs(buildFileNames, "unstashed_build.log") + concatenateBuildLogs(buildFileNames, "unstashed_build.log", VC_EnvSetup, VC_useCILicense) // get the manage project's base name for use in rebuild naming - def mpName = getMPname() + def mpName = getMPname(VC_Manage_Project) // if we are using SCM and not using a shared artifact directory... if (VC_usingSCM && !VC_useOneCheckoutDir && VC_sharedArtifactDirectory.length() == 0) { // run a script to extract stashed files and process data into xml reports - def mpPath = getMPpath() + def mpPath = getMPpath(VC_Manage_Project) def coverDBpath = formatPath(mpPath + "/build/vcast_data/cover.db") def coverSfpDBpath = formatPath(mpPath + "/build/vcast_data/vcprj.db") cmds = """ _RM ${coverDBpath} _RM ${coverSfpDBpath} _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/extract_build_dir.py --leave_files - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --refresh" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_useCILicense} --refresh" """ - buildLogText += runCommands(cmds) + buildLogText += runCommands(cmds, VC_EnvSetup, VC_useCILicense) } // run the metrics at the end - buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/generate-results.py ${VC_Manage_Project} --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --junit --buildlog unstashed_build.log""") - buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/parallel_full_reports.py ${VC_Manage_Project} --jobs max""") + buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/generate-results.py ${VC_Manage_Project} --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --junit --buildlog unstashed_build.log""", VC_EnvSetup, VC_useCILicense) + buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/parallel_full_reports.py ${VC_Manage_Project} --jobs max""", VC_EnvSetup, VC_useCILicense) if (VC_useRGW3) { - buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/patch_rgw_directory.py ${VC_Manage_Project}""") - buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --clicast-args rgw export" """) + buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/patch_rgw_directory.py ${VC_Manage_Project}""", VC_EnvSetup, VC_useCILicense) + buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_useCILicense} --clicast-args rgw export" """, VC_EnvSetup, VC_useCILicense) } if (VC_useCoveragePlugin) { - buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/cobertura.py ${VC_Manage_Project}""") + buildLogText += runCommands("""_VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/cobertura.py --extended ${VC_Manage_Project}""", VC_EnvSetup, VC_useCILicense) } cmds = """ _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/incremental_build_report_aggregator.py ${mpName} --rptfmt HTML _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/full_report_no_toc.py "${VC_Manage_Project}" - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --create-report=aggregate --output=${mpName}_aggregate_report.html" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_useCILicense} --create-report=aggregate --output=${mpName}_aggregate_report.html" """ if (VC_useImportedResults) { if (VC_useLocalImportedResults) { cmds += """ - _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_UseCILicense} --export-result=${mpName}_results.vcr" + _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/managewait.py --wait_time ${VC_waitTime} --wait_loops ${VC_waitLoops} --command_line "--project "${VC_Manage_Project}" ${VC_useCILicense} --export-result=${mpName}_results.vcr" _VECTORCAST_DIR/vpython "${env.WORKSPACE}"/vc_scripts/merge_vcr.py --new ${mpName}_results.vcr --orig ${mpName}_results_orig.vcr """ } } - buildLogText += runCommands(cmds) + buildLogText += runCommands(cmds, VC_EnvSetup, VC_useCILicense) writeFile file: "metrics_build.log", text: buildLogText buildFileNames += "metrics_build.log " - concatenateBuildLogs(buildFileNames, "complete_build.log") + concatenateBuildLogs(buildFileNames, "complete_build.log", VC_EnvSetup, VC_useCILicense) (foundKeywords, failure, unstable_flag) = checkBuildLogForErrors("complete_build.log") @@ -1024,7 +1067,7 @@ pipeline { currResult = currentBuild.result } - // Send reports to the VectorCAST Soverage Plugin + // Send reports to the VectorCAST Coverage Plugin step([$class: 'VectorCASTPublisher', includes: 'xml_data/coverage_results*.xml', useThreshold: VC_Use_Threshold, @@ -1059,7 +1102,7 @@ pipeline { catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { script { - def mpName = getMPname() + def mpName = getMPname(VC_Manage_Project) def foundKeywords = "" def boolean failure = false @@ -1084,7 +1127,7 @@ pipeline { summaryText += "
" summaryText += readFile('coverage_diffs.html_tmp') - } else { + } else if (VC_useCoverageHistory && !VC_useCoveragePlugin) { print "coverage_diffs.html_tmp missing" } @@ -1147,7 +1190,7 @@ pipeline { _RM ${mpName}_metrics_report.html_tmp """ - runCommands(cmds) + runCommands(cmds, VC_EnvSetup, VC_useCILicense) // use unit_test_fail_count.txt to see if there were any failed test cases // if any failed test cases, Junit will mark as at least unstable. @@ -1168,7 +1211,7 @@ pipeline { } // Stage for additional tools from Vector - // Currently supporting PC Lint Plus, Squore, and TESTInsights + // Currently supporting PC Lint Plus and Squore stage('Additional Tools') { steps { catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { @@ -1177,7 +1220,7 @@ pipeline { // If there's a PC Lint Plus command... if (VC_usePCLintPlus) { // run the PC Lint Plus command - runCommands(VC_pclpCommand) + runCommands(VC_pclpCommand, VC_EnvSetup, VC_useCILicense) // record the results with Warnings-NG plugin recordIssues(tools: [pcLint(pattern: VC_pclpResultsPattern, reportEncoding: 'UTF-8')]) // Archive the PC Lint Results @@ -1187,47 +1230,12 @@ pipeline { // If we are using Squore... if (VC_useSquore) { // Generate the results from Squore and run the squore command which should publish the information to Squore Server - cmd = "${VC_squoreCommand}" - runCommands(cmd) + def cmd = "${VC_squoreCommand}" + runCommands(cmd, VC_EnvSetup, VC_useCILicense) // Archive the Squore results archiveArtifacts allowEmptyArchive: true, artifacts: 'xml_data/squore_results*.xml' } - - // If we using TESTInsights... - if (VC_useTESTinsights){ - - // using the credentials passed in when creating the job... - withCredentials([usernamePassword(credentialsId: VC_TESTinsights_Credential_ID, usernameVariable : "VC_TI_USR", passwordVariable : "VC_TI_PWS")]){ - TI_proxy = "" - - // if we are using a proxy to communicate with TESTInsights, set the proxy from data input during job creation - if (VC_TESTinsights_Proxy.length() != 0) { - TI_proxy = "--proxy ${VC_TESTinsights_Proxy}" - } - - // Build the base TESTInsights command - TESTinsight_Command = "testinsights_connector --api ${VC_TESTinsights_URL} --user " + VC_TI_USR + " --pass " + VC_TI_PWS + " --action PUSH --project ${VC_TESTinsights_Project} --test-object ${BUILD_NUMBER} --vc-project ${VC_Manage_Project} " + TI_proxy + " --log TESTinsights_Push.log" - - // If we are using an SCM, attempt to link the SCM info into TESTInsights - if (VC_usingSCM) { - - // Get the TESTinsights Revision - VC_TESTinsights_Revision = get_SCM_rev() - - println "Git Rev: ${VC_TESTinsights_Revision}" - - // Update the TESTInsights command - TESTinsight_Command += " --vc-project-local-path=${origManageProject} --vc-project-scm-path=${VC_TESTinsights_SCM_URL}/${origManageProject} --src-local-path=${env.WORKSPACE} --src-scm-path=${VC_TESTinsights_SCM_URL}/ --vc-project-scm-technology=${VC_TESTinsights_SCM_Tech} --src-scm-technology=${VC_TESTinsights_SCM_Tech} --vc-project-scm-revision=${VC_TESTinsights_Revision} --src-scm-revision ${VC_TESTinsights_Revision} --versioned" - - } - // Run the command to push data to TESTInsights - runCommands(TESTinsight_Command) - - // Archive the push log - archiveArtifacts allowEmptyArchive: true, artifacts: 'TESTinsights_Push.log' - } - } } } } diff --git a/src/main/resources/scripts/baselinePostBuild.groovy b/src/main/resources/scripts/baselinePostBuild.groovy index 522bc766..3c6afc5a 100644 --- a/src/main/resources/scripts/baselinePostBuild.groovy +++ b/src/main/resources/scripts/baselinePostBuild.groovy @@ -1,4 +1,59 @@ import hudson.FilePath +import java.nio.charset.* +import java.nio.ByteBuffer +import java.nio.CharBuffer + +import java.io.StringWriter +import java.io.PrintWriter +import hudson.model.Result + +out = manager.listener.logger // same as console log +out.println "Post Build Groovy: Starting" + + +// Returns only the decoded text; logs the charset used. +def readWithFallback(FilePath fp) { + byte[] bytes + fp.read().withStream { is -> bytes = is.readAllBytes() } + + // Quick BOM hint + String bomHint = null + if (bytes.length >= 3 && bytes[0]==(byte)0xEF && bytes[1]==(byte)0xBB && bytes[2]==(byte)0xBF) bomHint = 'UTF-8' + else if (bytes.length >= 2 && bytes[0]==(byte)0xFF && bytes[1]==(byte)0xFE) bomHint = 'UTF-16LE' + else if (bytes.length >= 2 && bytes[0]==(byte)0xFE && bytes[1]==(byte)0xFF) bomHint = 'UTF-16BE' + + List charsets = [ + 'UTF-8', 'UTF-16LE', 'UTF-16BE', + 'GB18030', 'GBK', + 'windows-31j', 'Shift_JIS', 'EUC-JP', 'ISO-2022-JP', + 'MS949', 'x-windows-949', 'EUC-KR', 'ISO-2022-KR', + 'windows-1252', 'ISO-8859-1' + ].unique() + + if (bomHint && charsets.contains(bomHint)) { + charsets = [bomHint] + (charsets - bomHint) + } + + for (String name : charsets) { + try { + Charset cs = Charset.forName(name) + CharsetDecoder dec = cs.newDecoder() + .onMalformedInput(CodingErrorAction.REPORT) + .onUnmappableCharacter(CodingErrorAction.REPORT) + CharBuffer cb = dec.decode(ByteBuffer.wrap(bytes)) + out.println "Post Build Groovy: Decoded ${fp.getName()} with charset: ${name}" + return cb.toString() + } catch (CharacterCodingException ignore) { + // try next + } catch (Exception ignore) { + // try next + } + } + + // Fallback: ISO-8859-1 + out.println "Post Build Groovy: Fall Back Decode ${fp.getName()} with charset: ISO-8859-1 (fallback)" + return new String(bytes, Charset.forName('ISO-8859-1')) +} Boolean buildFailed = false Boolean buildUnstable = false @@ -84,55 +139,71 @@ if(manager.logContains(".*Abnormal Termination on Environment.*")) { manager.addBadge("icon-error icon-xlg", "Abnormal Termination of at least one Environment") } -def debugInfo = "" -def summaryStr = "" +try { + def summaryStr = "" -FilePath fp_cd = new FilePath(manager.build.getWorkspace(),'coverage_diffs.html_tmp') -FilePath fp_i = new FilePath(manager.build.getWorkspace(),'@PROJECT_BASE@_rebuild.html_tmp') -FilePath fp_f = new FilePath(manager.build.getWorkspace(),'@PROJECT_BASE@_full_report.html_tmp') -FilePath fp_m = new FilePath(manager.build.getWorkspace(),'@PROJECT_BASE@_metrics_report.html_tmp') + FilePath fp_cd = new FilePath(manager.build.getWorkspace(),'coverage_diffs.html_tmp') + FilePath fp_i = new FilePath(manager.build.getWorkspace(),'@PROJECT_BASE@_rebuild.html_tmp') + FilePath fp_f = new FilePath(manager.build.getWorkspace(),'@PROJECT_BASE@_full_report.html_tmp') + FilePath fp_m = new FilePath(manager.build.getWorkspace(),'@PROJECT_BASE@_metrics_report.html_tmp') -if (fp_cd.exists()) { - summaryStr = "
" - summaryStr += fp_cd.readToString() -} + if (fp_cd.exists()) { + summaryStr = "
" + summaryStr += readWithFallback(fp_cd) -if (fp_i.exists()) { - summaryStr += "
" - summaryStr += fp_i.readToString() -} + } -if (fp_f.exists()) { - summaryStr += "
" - summaryStr += fp_f.readToString() -} + if (fp_i.exists()) { + summaryStr += "
" + summaryStr += readWithFallback(fp_i) + } -if (fp_m.exists()) -{ - summaryStr += "
" - summaryStr += fp_m.readToString() + if (fp_f.exists()) { + summaryStr += "
" + summaryStr += readWithFallback(fp_f) + } -} + if (fp_m.exists()) + { + summaryStr += "
" + summaryStr += readWithFallback(fp_m) + } -manager.createSummary("icon-orange-square icon-xlg").appendText(summaryStr, false) + manager.createSummary("icon-orange-square icon-xlg").appendText(summaryStr, false) -if (!fp_f.exists() && !fp_m.exists()) -{ - manager.createSummary("icon-error icon-xlg").appendText("General Failure", false, false, false, "red") - manager.build.description = "General Failure, Incremental Build Report or Full Report Not Present. Please see the console for more information" - manager.addBadge("icon-error icon-xlg", "General Error") + if (!fp_f.exists() && !fp_m.exists()) + { + manager.createSummary("icon-error icon-xlg").appendText("General Failure", false, false, false, "red") + manager.build.description = "General Failure, Incremental Build Report or Full Report Not Present. Please see the console for more information" + manager.addBadge("icon-error icon-xlg", "General Error") - if (!buildFailed) { - buildUnstable = true + if (!buildFailed) { + buildUnstable = true + } } -} -if (buildFailed) -{ - manager.buildFailure() -} -if (buildUnstable) -{ - manager.buildUnstable() -} + if (buildFailed) + { + manager.buildFailure() + } + if (buildUnstable) + { + manager.buildUnstable() + } + +} catch (Throwable t) { + out.println "Post Build Groovy: Failed reading the reports" + + def sw = new StringWriter() + t.printStackTrace(new PrintWriter(sw)) + out.println "Post Build Groovy: ERROR: ${t.class.name}: ${t.message}" + out.println sw.toString() // full stack with file/line numbers when available + + // Mark build as failed (or UNSTABLE if you prefer) + manager.build.setResult(Result.FAILURE) + + // Re-throw if you want the Post Build Groovy: step itself to error out: + throw t +} +out.println "Post Build Groovy: Complete - No errors" diff --git a/src/main/resources/scripts/baselineSingleJobLinux.txt b/src/main/resources/scripts/baselineSingleJobLinux.txt index 205ebb5b..1a367610 100644 --- a/src/main/resources/scripts/baselineSingleJobLinux.txt +++ b/src/main/resources/scripts/baselineSingleJobLinux.txt @@ -1,70 +1,71 @@ -# Baseline Single Job Windows Batch file +# Baseline Single Job Linux Batch file +# Environment setup macros (kept as-is) $VCAST_ENVIRONMENT_SETUP_LINUX $VCAST_USE_CI_LICENSES_LINUX + export VCAST_RPTS_PRETTY_PRINT_HTML=FALSE export VCAST_NO_FILE_TRUNCATION=1 export VCAST_RPTS_SELF_CONTAINED=FALSE -# Use strict testcase import -if [[ $VCAST_USE_STRICT_IMPORT -eq 1 ]]; then - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --config=VCAST_STRICT_TEST_CASE_IMPORT=TRUE" +# --- Use strict testcase import --- +if [ "${VCAST_USE_STRICT_IMPORT}" -eq 1 ]; then + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --config=VCAST_STRICT_TEST_CASE_IMPORT=TRUE" fi -# Default Setup -$VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --full-status " -$VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --force --release-locks " -$VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --config VCAST_CUSTOM_REPORT_FORMAT=$VCAST_REPORT_FORMAT" - +# --- Default Setup --- +"$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --full-status" +"$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --force --release-locks" +"$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --config VCAST_CUSTOM_REPORT_FORMAT=${VCAST_REPORT_FORMAT}" -# Use Imported Results -if [[ $VCAST_USE_IMPORTED_RESULTS -eq 1 ]]; then - if [[ $VCAST_USE_LOCAL_IMPORTED_RESULTS -eq 1 ]] && [[ -f ${VCAST_PROJECT_BASENAME}_results.vcr ]] ; then - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --import-result=${VCAST_PROJECT_BASENAME}_results.vcr" - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --full-status " - if [[ -f ${VCAST_PROJECT_BASENAME}_results.vcr ]] ; then - cp -p ${VCAST_PROJECT_BASENAME}_results.vcr ${VCAST_PROJECT_BASENAME}_results_orig.vcr - fi - fi - if [[ $VCAST_USE_EXTERNAL_IMPORTED_RESULTS -eq 1 ]] && [[ -f "$VCAST_USE_EXTERNAL_FILENAME" ]] ; then - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --import-result="$VCAST_USE_EXTERNAL_FILENAME" " - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --full-status " +# --- Use Imported Results --- +if [ "${VCAST_USE_IMPORTED_RESULTS}" -eq 1 ]; then + if [ "${VCAST_USE_LOCAL_IMPORTED_RESULTS}" -eq 1 ] && [ -f "${VCAST_PROJECT_BASENAME}_results.vcr" ]; then + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --import-result=${VCAST_PROJECT_BASENAME}_results.vcr" + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --full-status" + if [ -f "${VCAST_PROJECT_BASENAME}_results.vcr" ]; then + cp -p "${VCAST_PROJECT_BASENAME}_results.vcr" "${VCAST_PROJECT_BASENAME}_results_orig.vcr" fi + fi + + if [ "${VCAST_USE_EXTERNAL_IMPORTED_RESULTS}" -eq 1 ] && [ -f "${VCAST_USE_EXTERNAL_FILENAME}" ]; then + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --import-result=\"${VCAST_USE_EXTERNAL_FILENAME}\"" + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --full-status" + fi fi -# Clean up old files -rm -f command.log -rm -f *_rebuild.html -rm -f coverage_diffs.html_tmp -rm -f combined_incr_rebuild.tmp -rm -f ${VCAST_PROJECT_BASENAME}_full_report.html_tmp -rm -f ${VCAST_PROJECT_BASENAME}_metrics_report.html +# --- Clean up old files --- +rm -f command.log *_rebuild.html coverage_diffs.html_tmp combined_incr_rebuild.tmp "${VCAST_PROJECT_BASENAME}_full_report.html_tmp" "${VCAST_PROJECT_BASENAME}_metrics_report.html" -$VCAST_EXECUTE_PREAMBLE_LINUX $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --build-execute $VCAST_USE_CBT --output ${VCAST_PROJECT_BASENAME}_rebuild.html " +# --- Build & Execute --- +$VCAST_EXECUTE_PREAMBLE_LINUX "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --build-execute ${VCAST_USE_CBT} --output ${VCAST_PROJECT_BASENAME}_rebuild.html" cp -p command.log complete_build.log -if [ -f {VCAST_PROJECT_BASENAME}_rebuild.${VCAST_HTML_OR_TEXT} ]; then - cp -p ${VCAST_PROJECT_BASENAME}_rebuild.${VCAST_HTML_OR_TEXT} ${VCAST_PROJECT_BASENAME}_rebuild.${VCAST_HTML_OR_TEXT}_tmp -fi -if [[ $VCAST_OPTION_USE_REPORTING -eq 1 ]]; then - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --config VCAST_CUSTOM_REPORT_FORMAT=HTML" - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/generate-results.py" --junit --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS "$VCAST_PROJECT_NAME" --buildlog complete_build.log - if [[ $VCAST_USE_RGW3 -eq 1 ]] ; then - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --clicast-args rgw export" - fi - if [[ $VCAST_USE_COVERAGE_PLUGIN -eq 1 ]] ; then - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/cobertura.py" "$VCAST_PROJECT_NAME" - fi - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/full_report_no_toc.py" "$VCAST_PROJECT_NAME" - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project $VCAST_PROJECT_NAME --full-status=${VCAST_PROJECT_BASENAME}_full_report.html" - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project $VCAST_PROJECT_NAME --create-report=aggregate --output=${VCAST_PROJECT_BASENAME}_aggregate_report.html" - - # Use Imported Results - if [[ $VCAST_USE_IMPORTED_RESULTS -eq 1 ]] && [[ $VCAST_USE_LOCAL_IMPORTED_RESULTS -eq 1 ]] ; then - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/managewait.py" --wait_time $VCAST_WAIT_TIME --wait_loops $VCAST_WAIT_LOOPS --command_line "--project "$VCAST_PROJECT_NAME" --export-result=${VCAST_PROJECT_BASENAME}_results.vcr " - $VECTORCAST_DIR/vpython "$WORKSPACE/vc_scripts/merge_vcr.py" --orig ${VCAST_PROJECT_BASENAME}_results_orig.vcr --new ${VCAST_PROJECT_BASENAME}_results.vcr - fi +if [ -f "${VCAST_PROJECT_BASENAME}_rebuild.${VCAST_HTML_OR_TEXT}" ]; then + cp -p "${VCAST_PROJECT_BASENAME}_rebuild.${VCAST_HTML_OR_TEXT}" "${VCAST_PROJECT_BASENAME}_rebuild.${VCAST_HTML_OR_TEXT}_tmp" + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/fixup_reports.py" "${VCAST_PROJECT_BASENAME}_rebuild.${VCAST_HTML_OR_TEXT}_tmp" fi +# --- Reporting / Plugins --- +if [ "${VCAST_OPTION_USE_REPORTING}" -eq 1 ]; then + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --config VCAST_CUSTOM_REPORT_FORMAT=HTML" + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/generate-results.py" --junit --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" "${VCAST_PROJECT_NAME}" --buildlog complete_build.log + + if [ "${VCAST_USE_RGW3}" -eq 1 ]; then + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --clicast-args rgw export" + fi + + if [ "${VCAST_USE_COVERAGE_PLUGIN}" -eq 1 ]; then + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/cobertura.py" --extended "${VCAST_PROJECT_NAME}" + fi + + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/full_report_no_toc.py" "${VCAST_PROJECT_NAME}" + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --create-report=aggregate --output=${VCAST_PROJECT_BASENAME}_aggregate_report.html" + + if [ "${VCAST_USE_IMPORTED_RESULTS}" -eq 1 ] && [ "${VCAST_USE_LOCAL_IMPORTED_RESULTS}" -eq 1 ]; then + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/managewait.py" --wait_time "${VCAST_WAIT_TIME}" --wait_loops "${VCAST_WAIT_LOOPS}" --command_line="--project \"${VCAST_PROJECT_NAME}\" --export-result=${VCAST_PROJECT_BASENAME}_results.vcr" + "$VECTORCAST_DIR/vpython" "${WORKSPACE}/vc_scripts/merge_vcr.py" --orig "${VCAST_PROJECT_BASENAME}_results_orig.vcr" --new "${VCAST_PROJECT_BASENAME}_results.vcr" + fi +fi diff --git a/src/main/resources/scripts/baselineSingleJobWindows.txt b/src/main/resources/scripts/baselineSingleJobWindows.txt index f48fe089..73ee90b1 100644 --- a/src/main/resources/scripts/baselineSingleJobWindows.txt +++ b/src/main/resources/scripts/baselineSingleJobWindows.txt @@ -20,12 +20,12 @@ if "%VCAST_USE_STRICT_IMPORT%"=="TRUE" ( :: Use Imported Results if "%VCAST_USE_IMPORTED_RESULTS%"=="TRUE" ( if "%VCAST_USE_LOCAL_IMPORTED_RESULTS%"=="TRUE" if exist "%VCAST_PROJECT_BASENAME%_results.vcr" ( - %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --import-result="%VCAST_PROJECT_BASENAME%_results.vcr"" + %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --import-result=""%VCAST_PROJECT_BASENAME%_results.vcr""" %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --full-status"" if exist %VCAST_PROJECT_BASENAME%_results.vcr ( copy %VCAST_PROJECT_BASENAME%_results.vcr %VCAST_PROJECT_BASENAME%_results_orig.vcr ) ) if "%VCAST_USE_EXTERNAL_IMPORTED_RESULTS%"=="TRUE" if exist "%VCAST_USE_EXTERNAL_FILENAME%" ( - %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --import-result="%VCAST_USE_EXTERNAL_FILENAME%" + %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --import-result=""%VCAST_USE_EXTERNAL_FILENAME%""" %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --full-status" ) ) @@ -38,11 +38,14 @@ del combined_incr_rebuild.tmp > nul 2>&1 del %VCAST_PROJECT_BASENAME%_full_report.html_tmp > nul 2>&1 del %VCAST_PROJECT_BASENAME%_metrics_report.html_tmp > nul 2>&1 -%VCAST_EXECUTE_PREAMBLE_WIN% %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --build-execute %VCAST_USE_CBT% --output %VCAST_PROJECT_BASENAME%_rebuild.%VCAST_HTML_OR_TEXT% +%VCAST_EXECUTE_PREAMBLE_WIN% %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --build-execute %VCAST_USE_CBT% --output %VCAST_PROJECT_BASENAME%_rebuild.%VCAST_HTML_OR_TEXT%" copy command.log complete_build.log if exist "%VCAST_PROJECT_BASENAME%_rebuild.%VCAST_HTML_OR_TEXT%" ( copy %VCAST_PROJECT_BASENAME%_rebuild.%VCAST_HTML_OR_TEXT% %VCAST_PROJECT_BASENAME%_rebuild.%VCAST_HTML_OR_TEXT%_tmp + + :: strip off the ToC, etc + %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\fixup_reports.py" %VCAST_PROJECT_BASENAME%_rebuild.%VCAST_HTML_OR_TEXT%_tmp ) if "%VCAST_OPTION_USE_REPORTING%" == "TRUE" ( @@ -52,11 +55,10 @@ if "%VCAST_OPTION_USE_REPORTING%" == "TRUE" ( %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --clicast-args rgw export" ) if "%VCAST_USE_COVERAGE_PLUGIN%"=="TRUE" ( - %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\cobertura.py" %VCAST_PROJECT_NAME% + %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\cobertura.py" --extended %VCAST_PROJECT_NAME% ) %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\full_report_no_toc.py" "%VCAST_PROJECT_NAME%" - %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --full-status=\"%VCAST_PROJECT_BASENAME%_full_report.html" - %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --create-report=aggregate --output=\"%VCAST_PROJECT_BASENAME%_aggregate_report.html" + %VECTORCAST_DIR%\vpython "%WORKSPACE%\vc_scripts\managewait.py" --wait_time %VCAST_WAIT_TIME% --wait_loops %VCAST_WAIT_LOOPS% --command_line "--project %VCAST_PROJECT_NAME% --create-report=aggregate --output=""%VCAST_PROJECT_BASENAME%_aggregate_report.html""" :: Use Imported Results if "%VCAST_USE_IMPORTED_RESULTS%"=="TRUE" if "%VCAST_USE_LOCAL_IMPORTED_RESULTS%"=="TRUE" ( diff --git a/src/main/resources/scripts/cobertura.py b/src/main/resources/scripts/cobertura.py index f506d8fc..bfa71983 100644 --- a/src/main/resources/scripts/cobertura.py +++ b/src/main/resources/scripts/cobertura.py @@ -23,12 +23,20 @@ # from lxml import etree + +from vcast_utils import dump, checkVectorCASTVersion, getVectorCASTEncoding + try: from vector.apps.DataAPI.vcproject_api import VCProjectApi from vector.apps.DataAPI.vcproject_models import VCProject except: pass -from vector.apps.DataAPI.cover_api import CoverApi + +try: + from vector.apps.DataAPI.cover_api import CoverApi +except: + pass + try: from vector.apps.DataAPI.unit_test_api import UnitTestApi except: @@ -36,10 +44,15 @@ import sys, os from collections import defaultdict from pprint import pprint +import argparse +try: + from safe_open import open +except: + pass fileList = [] -from vcast_utils import dump, checkVectorCASTVersion +encFmt = getVectorCASTEncoding() def write_xml(x, name, verbose = False): @@ -51,7 +64,8 @@ def write_xml(x, name, verbose = False): xml_str += etree.tostring(x,pretty_print=True).decode() - open(name + ".xml", "w").write(xml_str) + with open(name + ".xml", "wb") as fd: + fd.write(xml_str.encode(encFmt,"replace")) def getCoveredFunctionCount(source): if len(source.functions) == 0: @@ -85,11 +99,20 @@ def getFileXML(testXml, coverAPI, verbose = False, extended = False, source_root fname = coverAPI.display_name - fpath = os.path.relpath(coverAPI.display_path,prj_dir).replace("\\","/") + fpath = coverAPI.display_path + try: + fpath = os.path.relpath(fpath,prj_dir).replace("\\","/") + except: + fpath = fpath.replace("\\","/") + pass branch_totals = float(coverAPI.metrics.branches + coverAPI.metrics.mcdc_branches) - branch_covered = float(coverAPI.metrics.max_covered_branches + coverAPI.metrics.max_covered_mcdc_branches) - + branch_covered = float( + coverAPI.metrics.max_covered_branches + + coverAPI.metrics.max_covered_mcdc_branches + + coverAPI.metrics.max_annotations_branches + + coverAPI.metrics.max_annotations_mcdc_branches + ) if branch_totals > 0: branch_pct = branch_covered / branch_totals else: @@ -103,8 +126,8 @@ def getFileXML(testXml, coverAPI, verbose = False, extended = False, source_root file = None if verbose: - print (" fname = ", fname) - print (" fpath = ", fpath) + print (" fname = " + fname) + print (" fpath = " + fpath) for element in testXml.iter(): if element.tag == "class" and element.attrib['filename'] == fpath: @@ -130,16 +153,27 @@ def getFileXML(testXml, coverAPI, verbose = False, extended = False, source_root if coverAPI.metrics.branches or coverAPI.metrics.mcdc_branches: file.attrib['branch-rate'] = str(branch_pct) if coverAPI.metrics.function_calls > 0: - funcCallPercentStr = "{:.2f}".format(coverAPI.metrics.max_covered_function_calls_pct) + "% (" + str(coverAPI.metrics.max_covered_function_calls) + "/" + str(coverAPI.metrics.function_calls) + ")" + funcCallPercentStr = "{:.2f}% ( {} / {} )".format( + coverAPI.metrics.max_covered_function_calls_pct, + coverAPI.metrics.max_covered_function_calls + coverAPI.metrics.max_annotations_function_calls, + coverAPI.metrics.function_calls + ) + method.attrib['functioncall-coverage'] = funcCallPercentStr file.attrib['functioncall-coverage'] = funcCallPercentStr if coverAPI.metrics.mcdc_pairs > 0: - mcdcPairPercentStr = "{:.2f}".format(coverAPI.metrics.max_covered_mcdc_pairs_pct) + "% (" + str(coverAPI.metrics.max_covered_mcdc_pairs) + "/" + str(coverAPI.metrics.mcdc_pairs) + ")" + mcdcPairPercentStr = "{:.2f}% ( {} / {} )".format( + coverAPI.metrics.max_covered_mcdc_pairs_pct, + coverAPI.metrics.max_covered_mcdc_pairs + coverAPI.metrics.max_annotations_mcdc_pairs, + coverAPI.metrics.mcdc_pairs + ) file.attrib['mcdcpair-coverage'] = mcdcPairPercentStr funcCovTotal, funcTotal = getCoveredFunctionCount(coverAPI) if funcTotal > 0: - file.attrib['function-coverage'] = "{:.2f}".format(100.0 *funcCovTotal/funcTotal) + "% (" + str(funcCovTotal) + "/" + str(funcTotal) + ")" + file.attrib['function-coverage'] = "{:.2f}% ( {} / {} )".format( + 100.0 *funcCovTotal/funcTotal, funcCovTotal ,funcTotal + ) else: file.attrib['function-coverage'] = "0.00% (0/0)" @@ -229,18 +263,18 @@ def has_any_coverage(line): def has_anything_covered(line): - return (line.metrics.max_covered_statements + + return (line.metrics.max_covered_statements + line.metrics.max_covered_branches + line.metrics.max_covered_mcdc_branches + line.metrics.max_covered_mcdc_pairs + line.metrics.max_covered_functions + line.metrics.max_covered_function_calls + - line.metrics.max_covered_statements + - line.metrics.max_covered_branches + - line.metrics.max_covered_mcdc_branches + - line.metrics.max_covered_mcdc_pairs + - line.metrics.max_covered_functions + - line.metrics.max_covered_function_calls) + line.metrics.max_annotations_statements + + line.metrics.max_annotations_branches + + line.metrics.max_annotations_mcdc_branches + + line.metrics.max_annotations_mcdc_pairs + + line.metrics.max_annotations_functions + + line.metrics.max_annotations_function_calls) def processStatementBranchMCDC(fileApi, lines, extended = False): @@ -334,19 +368,24 @@ def procesCoverage(coverXML, coverApi, extended = False, source_root = ""): if extended: for func in coverApi.functions: - + + if isinstance(func.instrumented_functions[0].parameterized_name, bool): + continue + method = etree.SubElement(methods, "method") - method.attrib['name'] = func.name - method.attrib['signature'] = func.instrumented_functions[0].parameterized_name.replace(func.name,"",1) + method.attrib['signature'] = func.instrumented_functions[0].parameterized_name.replace(func.name,"",1) method.attrib['line-rate'] = str(func.metrics.max_covered_statements_pct/100.0) - statementPercentStr = "{:.2f}".format(func.metrics.max_covered_statements_pct) + "% (" + str(func.metrics.max_covered_statements) + "/" + str(func.metrics.statements) + ")" + statementPercentStr = "{:.2f}% ({} / {})".format( + func.metrics.max_covered_statements_pct, + func.metrics.max_covered_statements + func.metrics.max_annotations_statements, + func.metrics.statements) #method.attrib['statements'] = statementPercentStr func_total_br = func.metrics.branches + func.metrics.mcdc_branches func_cov_br = func.metrics.max_covered_branches + func.metrics.max_covered_mcdc_branches - + func_cov_br += func.metrics.max_annotations_branches + func.metrics.max_annotations_mcdc_branches func_branch_rate = 0.0 if func_total_br > 0: func_branch_rate = float(func_cov_br) / float(func_total_br) @@ -355,17 +394,25 @@ def procesCoverage(coverXML, coverApi, extended = False, source_root = ""): method.attrib['complexity'] = str(func.metrics.complexity) if func.metrics.function_calls > 0: - funcCallPercentStr = "{:.2f}".format(func.metrics.max_covered_function_calls_pct) + "% (" + str(func.metrics.max_covered_function_calls) + "/" + str(func.metrics.function_calls) + ")" + funcCallPercentStr = "{:.2f}% ( {} / {} )".format( + func.metrics.max_covered_function_calls_pct, + func.metrics.max_covered_function_calls + func.metrics.max_annotations_function_calls, + func.metrics.function_calls + ) method.attrib['functioncall-coverage'] = funcCallPercentStr if func.metrics.mcdc_pairs > 0: - mcdcPairPercentStr = "{:.2f}".format(func.metrics.max_covered_mcdc_pairs_pct) + "% (" + str(func.metrics.max_covered_mcdc_pairs) + "/" + str(func.metrics.mcdc_pairs) + ")" + mcdcPairPercentStr = "{:.2f}% ( {} / {} )".format( + func.metrics.max_covered_mcdc_pairs_pct, + func.metrics.max_covered_mcdc_pairs + func.metrics.max_annotations_mcdc_pairs, + func.metrics.mcdc_pairs + ) method.attrib['mcdcpair-coverage'] = mcdcPairPercentStr if (func.metrics.max_covered_functions_pct + func.metrics.max_covered_statements_pct + func.metrics.max_covered_branches_pct + func.metrics.max_covered_mcdc_branches_pct + - func.metrics.max_covered_mcdc_pairs + + func.metrics.max_covered_mcdc_pairs_pct + func.metrics.max_covered_function_calls_pct) > 0: method.attrib['function-coverage'] = "100% (1/1)" else: @@ -374,11 +421,13 @@ def procesCoverage(coverXML, coverApi, extended = False, source_root = ""): return processStatementBranchMCDC(coverApi, lines, extended) def runCoverageResultsMP(packages, mpFile, verbose = False, extended=False, source_root = ""): - + vcproj = VCProjectApi(mpFile) - api = vcproj.project.cover_api + api = vcproj.project.cover_api + results = runCoberturaResults(packages, api, verbose = False, extended = extended, source_root = source_root) + vcproj.close() - return runCoberturaResults(packages, api, verbose = False, extended = extended, source_root = source_root) + return results def runCoberturaResults(packages, api, verbose = False, extended = False, source_root = ""): @@ -436,8 +485,7 @@ def runCoberturaResults(packages, api, verbose = False, extended = False, source except: prj_dir = os.getcwd().replace("\\","/") + "/" - # get a sorted listed of all the files with the proj directory stripped off - + # get a sorted listed of all the files with the proj directory stripped off for file in api.SourceFile.all(): if file.display_name == "": continue @@ -446,7 +494,11 @@ def runCoberturaResults(packages, api, verbose = False, extended = False, source fname = file.display_name fpath = file.display_path.rsplit('.',1)[0] - fpath = os.path.relpath(fpath,prj_dir).replace("\\","/") + try: + fpath = os.path.relpath(fpath,prj_dir).replace("\\","/") + except: + fpath = fpath.replace("\\","/") + pass # print("*", file.name, file.display_name, fpath) @@ -514,16 +566,22 @@ def runCoberturaResults(packages, api, verbose = False, extended = False, source package.attrib['branch-rate'] = str(branch_rate) if file.has_mcdc_coverage: - mcdcPairPercentStr = "{:.2f}".format(MCDC_rate * 100.0) + "% (" + str(pkg_cov_mcdc) + "/" + str(pkg_total_mcdc) + ")" + mcdcPairPercentStr = "{:.2f}% ({} / {})".format( + MCDC_rate * 100.0, pkg_cov_mcdc, pkg_total_mcdc + ) package.attrib['mcdcpair-coverage'] = mcdcPairPercentStr if file.has_function_call_coverage: - funcCallPercentStr = "{:.2f}".format(FC_rate * 100.0) + "% (" + str(pkg_cov_fc) + "/" + str(pkg_total_fc) + ")" + funcCallPercentStr = "{:.2f}% ({} / {})".format( + FC_rate * 100.0, pkg_cov_fc, pkg_total_fc + ) package.attrib['functioncall-coverage'] = funcCallPercentStr funcCovTotal, funcTotal = getCoveredFunctionCount(file) if pkg_total_func > 0: func_rate = float(pkg_cov_func) / float(pkg_total_func) - funcPercentStr = "{:.2f}".format(func_rate * 100.0) + "% (" + str(pkg_cov_func) + "/" + str(pkg_total_func) + ")" + funcPercentStr = "{:.2f}% ({} / {})".format( + func_rate * 100.0, pkg_cov_func, pkg_total_func + ) package.attrib['function-coverage'] = funcPercentStr path_name = new_path @@ -556,14 +614,16 @@ def runCoberturaResults(packages, api, verbose = False, extended = False, source total_br += file.metrics.branches + file.metrics.mcdc_branches total_st += file.metrics.statements cov_br += file.metrics.max_covered_branches + file.metrics.max_covered_mcdc_branches - cov_st += file.metrics.max_covered_statements + cov_br += file.metrics.max_annotations_branches + file.metrics.max_annotations_mcdc_branches + cov_st += file.metrics.max_covered_statements + file.metrics.max_annotations_statements pkg_total_br += file.metrics.branches + file.metrics.mcdc_branches pkg_total_st += file.metrics.statements pkg_cov_br += file.metrics.max_covered_branches + file.metrics.max_covered_mcdc_branches - pkg_cov_st += file.metrics.max_covered_statements + pkg_cov_br += file.metrics.max_annotations_branches + file.metrics.max_annotations_mcdc_branches + pkg_cov_st += file.metrics.max_covered_statements + file.metrics.max_annotations_statements vg += file.metrics.complexity pkg_vg += file.metrics.complexity @@ -572,13 +632,13 @@ def runCoberturaResults(packages, api, verbose = False, extended = False, source total_fc += file.metrics.function_calls total_mcdc += file.metrics.mcdc_pairs - cov_fc += file.metrics.max_covered_function_calls - cov_mcdc += file.metrics.max_covered_mcdc_pairs + cov_fc += file.metrics.max_covered_function_calls + file.metrics.max_annotations_function_calls + cov_mcdc += file.metrics.max_covered_mcdc_pairs + file.metrics.max_annotations_mcdc_pairs pkg_total_fc += file.metrics.function_calls pkg_total_mcdc += file.metrics.mcdc_pairs - pkg_cov_fc += file.metrics.max_covered_function_calls - pkg_cov_mcdc += file.metrics.max_covered_mcdc_pairs + pkg_cov_fc += file.metrics.max_covered_function_calls + file.metrics.max_annotations_function_calls + pkg_cov_mcdc += file.metrics.max_covered_mcdc_pairs + file.metrics.max_annotations_mcdc_pairs funcCovTotal, funcTotal = getCoveredFunctionCount(file) pkg_total_func += funcTotal @@ -637,14 +697,20 @@ def runCoberturaResults(packages, api, verbose = False, extended = False, source if file.has_branch_coverage or file.has_mcdc_coverage: package.attrib['branch-rate'] = str(branch_rate) if file.has_mcdc_coverage: - mcdcPairPercentStr = "{:.2f}".format(MCDC_rate * 100.0) + "% (" + str(pkg_cov_mcdc) + "/" + str(pkg_total_mcdc) + ")" + mcdcPairPercentStr = "{:.2f}% ({} / {})".format( + MCDC_rate * 100.0, pkg_cov_mcdc, pkg_total_mcdc + ) package.attrib['mcdcpair-coverage'] = mcdcPairPercentStr if file.has_function_call_coverage: - funcCallPercentStr = "{:.2f}".format(FC_rate * 100.0) + "% (" + str(pkg_cov_fc) + "/" + str(pkg_total_fc) + ")" + funcCallPercentStr = "{:.2f}% ({} / {})".format( + FC_rate * 100.0, pkg_cov_fc, pkg_total_fc + ) package.attrib['functioncall-coverage'] = funcCallPercentStr if pkg_total_func > 0: - funcPercentStr = "{:.2f}".format(func_rate * 100.0) + "% (" + str(pkg_cov_func) + "/" + str(pkg_total_func) + ")" + funcPercentStr = "{:.2f}% ({} / {})".format( + func_rate * 100.0, pkg_cov_func, pkg_total_func + ) package.attrib['function-coverage'] = funcPercentStr package.attrib['complexity'] = str(pkg_vg) @@ -716,8 +782,8 @@ def generateCoverageResults(inFile, azure = False, xml_data_dir = "xml_data", ve coverages.attrib['timestamp'] = str(datetime.now()) tool_version = os.path.join(os.environ['VECTORCAST_DIR'], "DATA", "tool_version.txt") - with open(tool_version,"r") as fd: - ver = fd.read() + with open(tool_version,"rb") as fd: + ver = fd.read().decode(encFmt,"replace") coverages.attrib['version'] = "VectorCAST " + ver.rstrip() @@ -735,7 +801,7 @@ def generateCoverageResults(inFile, azure = False, xml_data_dir = "xml_data", ve if MCDC_rate != -1.0: print ("mcdc pairs: {:.2f}% ({:d} out of {:d})".format(MCDC_rate*100.0, cov_mcdc, total_mcdc)) if statement_rate != -1.0: print ("coverage: {:.2f}% of statements".format(statement_rate*100.0)) - print ("complexity: {:d}".format(complexity)) + if complexity != -1.0: print ("complexity: {:d}".format(complexity)) source = etree.SubElement(sources, "source") source.text = "./" @@ -748,22 +814,20 @@ def generateCoverageResults(inFile, azure = False, xml_data_dir = "xml_data", ve if __name__ == '__main__': if not checkVectorCASTVersion(21): - print("Cannot create Cobertura metrics. Please upgrade VectorCAST") - sys.exit() + print ("Cannot create Cobertura metrics. Please upgrade VectorCAST") + sys.exit(0) - extended = False - azure = False - - inFile = sys.argv[1] - try: - if "--azure" == sys.argv[2]: - azure = True - print ("using azure mode") - elif "--extended" == sys.argv[2]: - extended = True - except Exception as e: - azure = False - extended = False + parser = argparse.ArgumentParser() + parser.add_argument('ManageProject', help='Manager Project Name') + parser.add_argument('-e', '--extended', help='Enabled extended Cobertura format', action="store_true", default=False) + parser.add_argument('-a', '--azure', help='Generate results to target Azure', action="store_true", default=False) + parser.add_argument('--xml_data_dir', help='Set the base directory of the xml_data directory. Default is the workspace directory', default = 'xml_data') + args = parser.parse_args() + extended = args.extended + azure = args.azure + + inFile = args.ManageProject + xml_data_dir = args.xml_data_dir generateCoverageResults(inFile, azure, xml_data_dir = "xml_data", verbose = False, extended = extended) diff --git a/src/main/resources/scripts/copy_build_dir.py b/src/main/resources/scripts/copy_build_dir.py index 8708d90f..537e7ff5 100644 --- a/src/main/resources/scripts/copy_build_dir.py +++ b/src/main/resources/scripts/copy_build_dir.py @@ -1,7 +1,7 @@ # # The MIT License # -# Copyright 2024 Vector Informatik, GmbH. +# Copyright 2025 Vector Informatik, GmbH. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -30,6 +30,12 @@ import shutil import argparse +from vcast_utils import dump, checkVectorCASTVersion, getVectorCASTEncoding + +if not checkVectorCASTVersion(19): + print ("Copy build directory for pipelines not supported. Please upgrade VectorCAST") + sys.exit(0) + try: ## This tests to see if 2018 is present. from vector.apps.ReportBuilder.custom_report import CustomReport @@ -43,7 +49,7 @@ try: from vector.apps.DataAPI.vcproject_api import VCProjectApi except: - pass + raise ImportError("Full reports generated by previous call to generate-results.py") def make_relative(path, workspace, vCastProjectWorkspace, ManageProjectName): @@ -125,7 +131,7 @@ def addConvertCoverFile(tf, file, workspace, build_dir, nocase, vCastProjectWork fullpath = build_dir + os.path.sep + file bakpath = fullpath + '.bk' - print("Updating cover.db: ", fullpath) + print("Updating cover.db: " + fullpath) if os.path.isfile(fullpath): conn = sqlite3.connect(fullpath) @@ -237,18 +243,20 @@ def run(ManageProjectName, Level, BaseName, Env, workspace, vCastProjectWorkspac def getVcastProjectWorkspace(args): - vc_api = VCProjectApi(args.ManageProject) - vCastProjectWorkspace = vc_api.project.workspace - vc_api.close() - + vcproj = VCProjectApi(args.ManageProject) + vCastProjectWorkspace = vcproj.project.workspace + vcproj.close() return vCastProjectWorkspace if __name__ == '__main__': - + if not checkVectorCASTVersion(19): + print ("Pipeline's copy build directory not supported under. Please upgrade VectorCAST") + sys.exit(0) + parser = argparse.ArgumentParser() parser.add_argument('ManageProject', help='Manager Project Name') - parser.add_argument('-l', '--level', help='Environment Name if only doing single environment. Should be in the form of level/env', default="NotProvided/NotProvided") + parser.add_argument('-l', '--level', help='Environment Name if only doing single environment. Should be in the form of level/env', default="NotProvided/NotProvided") parser.add_argument('-b', '--basename', help='Enable verbose output', default="") parser.add_argument('-e', '--environment', help='Enable verbose output', default="") parser.add_argument('--notar', help='Don\'t Product a tar file', default=False, action="store_true") diff --git a/src/main/resources/scripts/create_index_html.py b/src/main/resources/scripts/create_index_html.py index 1d71ed22..0df6059c 100644 --- a/src/main/resources/scripts/create_index_html.py +++ b/src/main/resources/scripts/create_index_html.py @@ -1,8 +1,34 @@ +# +# The MIT License +# +# Copyright 2025 Vector Informatik, GmbH. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# import os import sys import argparse import glob +from vcast_utils import dump, checkVectorCASTVersion, getVectorCASTEncoding + +encFmt = getVectorCASTEncoding() class cd: """Context manager for changing the current working directory""" @@ -16,83 +42,113 @@ def __enter__(self): def __exit__(self, etype, value, traceback): os.chdir(self.savedPath) +def searchKeyword(search_string, filename): + with open(filename, "rb") as fd: + for line_number, line in enumerate(fd, start=1): + line = line.decode(encFmt, "replace") + if search_string in line: + start_idx = line.find(search_string) + if start_idx != -1: + start_idx += len(search_string) + + end_idx = line[start_idx:].find("<") + + if end_idx != -1: + end_idx += start_idx + + return line_number, start_idx, end_idx, line + + return -1, -1, -1, line # not found + +def getEnvName(search_string, filename): + report_name = None + line_number, start_idx, end_idx, line = searchKeyword("Environment Name",filename) + + if line_number == -1: + env_name = None + else: + env_name = line[start_idx:end_idx] + + return env_name + def getReportName(filename): reportName = filename reportType = 0 - if "aggregate" in filename: - manageProject = filename.split("_aggregate",1)[0] - reportName = "Aggregate Coverage Report" + if searchKeyword(">Aggregate Coverage Report<", filename)[0] != -1: + env_name = getEnvName("Environment Name",filename) + if env_name == None: + reportName = "Aggregate Coverage Report" + else: + reportName = "Aggregate Coverage Report {}".format(env_name) + reportType = 1 - elif "full_status" in filename: - manageProject = filename.split("_aggregate",1)[0] + elif searchKeyword(">Full Status Section<", filename)[0] != -1: reportName = "Full Status Report" - elif "environment" in filename: - manageProject = filename.split("_environment",1)[0] - reportName = "Environment Report" - - elif "manage_incremental_rebuild_report" in filename: - manageProject = filename.split("_manage_incremental_rebuild_report",1)[0] + elif searchKeyword("Manage Incremental Rebuild Report", filename)[0] != -1: reportName = "Incremental Report Report" - elif "metrics" in filename: - manageProject = filename.split("_metrics",1)[0] + elif searchKeyword(">Metrics Report<", filename)[0] != -1: reportName = "Metrics Report" - elif "html_reports" in filename: - ## html_reports/VectorCAST_MinGW_C++_UnitTesting_ENV_LINKED_LIST.html - comp_ts_env = filename.replace("html_reports/","").replace(".html","") - reportName = comp_ts_env + elif searchKeyword(">Test Case Summary Report<", filename)[0] != -1: + reportName = "System Test Status Report" + + elif searchKeyword(">PC-Lint Plus Results<", filename)[0] != -1: + reportName = "PC-Lint Plus Results" + + elif searchKeyword(">PC-Lint Plus Results<", filename)[0] != -1: + reportName = "PC-Lint Plus Results" + + elif searchKeyword(">Full Report<", filename)[0] != -1: + reportName = "Full Report " + reportName += getEnvName("Environment Name",filename) + reportType = 1 - elif "management" in filename: - comp_ts_env = filename.replace("management/","").replace(".html","") - reportName = comp_ts_env - reportType = 3 - else: reportType = 2 return reportName, reportType usingGitLabCI = False +baseOutputDir = "" -def create_index_html(mpName, isGitLab = False): - import pathlib +def create_index_html(mpName, isGitLab = False, output_dir = ""): from vector.apps.DataAPI.vcproject_api import VCProjectApi from vector.apps.ReportBuilder.custom_report import CustomReport global usingGitLabCI usingGitLabCI = isGitLab - api = VCProjectApi(mpName) + global baseOutputDir + baseOutputDir = output_dir + + vcproj = VCProjectApi(mpName) + # Set custom report directory to the where this script was # found. Must contain sections/index_section.py - rep_path = pathlib.Path(__file__).parent.resolve() + rep_path = os.path.abspath(os.path.dirname(__file__)) - if usingGitLabCI: - output_file="html_reports/index.html" - else: - output_file="index.html" + output_file=os.path.join(baseOutputDir,"index.html") CustomReport.report_from_api( - api=api, + api=vcproj, title="HTML Reports", report_type="INDEX_FILE", formats=["HTML"], output_file=output_file, sections=['CUSTOM_HEADER', 'REPORT_TITLE', 'TABLE_OF_CONTENTS','INDEX_SECTION', 'CUSTOM_FOOTER'], customization_dir=rep_path) - - api.close() - + vcproj.close() + def create_index_html_body (): - tempHtmlReportList = glob.glob("*.html") - tempHtmlReportList += glob.glob("html_reports/*.html") - tempHtmlReportList += glob.glob("management/*.html") + tempHtmlReportList = glob.glob(os.path.join(baseOutputDir,"*.html")) + tempHtmlReportList += glob.glob(os.path.join(baseOutputDir,"html_reports/*.html")) + tempHtmlReportList += glob.glob(os.path.join(baseOutputDir,"management/*.html")) htmlReportList = [] try: diff --git a/src/main/resources/scripts/fixup_reports.py b/src/main/resources/scripts/fixup_reports.py index d0314802..dee40364 100644 --- a/src/main/resources/scripts/fixup_reports.py +++ b/src/main/resources/scripts/fixup_reports.py @@ -25,25 +25,22 @@ from __future__ import division from __future__ import print_function +import sys, os, locale + import sys, os # adding path -workspace = os.getenv("WORKSPACE") -if workspace is None: - workspace = os.getcwd() - -jenkinsScriptHome = os.path.join(workspace,"vc_scripts") -python_path_updates = jenkinsScriptHome -sys.path.append(python_path_updates) - -# needed because vc18 vpython does not have bs4 package if sys.version_info[0] < 3: - python_path_updates += os.sep + 'vpython-addons' + python_path_updates = os.path.join(os.path.dirname(os.path.abspath(__file__)),'vpython-addons') sys.path.append(python_path_updates) from bs4 import BeautifulSoup -from safe_open import open +try: + from safe_open import open +except: + pass import tee_print +from vcast_utils import getVectorCASTEncoding def fixup_2020_soup(main_soup): @@ -76,6 +73,7 @@ def fixup_2020_soup(main_soup): 'col_subprogram': 'word-break:break-all;width:30%;', 'col_complexity': 'white-space:nowrap;', 'col_metric': 'white-space:nowrap;', + 'mcdc-all-pairs': 'white-space:nowrap;', 'i0' : 'padding-left:0.25em;min-width:11em', 'i1' : 'padding-left: 1.25em;min-width: 11em;', 'i2' : 'padding-left: 2.25em;', @@ -107,18 +105,42 @@ def fixup_2020_soup(main_soup): return main_soup def fixup_2020_reports(report_name): - with open(report_name,"r") as fd: + + encFmt = getVectorCASTEncoding() + + with open(report_name, "rb") as fd: + raw = fd.read().decode(encFmt, "replace") + + try: + # First attempt: use lxml if available, else let BS pick try: - main_soup = BeautifulSoup(fd,features="lxml") - except: - main_soup = BeautifulSoup(fd) + import lxml # noqa + parser = "lxml" + except ImportError: + parser = "html.parser" + + main_soup = BeautifulSoup(raw, features=parser) + + except Exception: + try: + # Fallback to UTF-8 + main_soup = BeautifulSoup( + raw.encode("utf-8", "replace"), + features=parser + ) + except Exception: + main_soup = BeautifulSoup( + raw.encode(encFmt, "replace") + ) main_soup = fixup_2020_soup(main_soup) - with open(report_name,"w") as fd: - fd.write(main_soup.prettify(formatter="html")) - + with open(report_name, "wb") as fd: + fd.write(main_soup.prettify(formatter="html").encode(encFmt, "replace")) + + if __name__ == '__main__': + report_name = sys.argv[1] fixup_2020_reports(report_name) diff --git a/src/main/resources/scripts/full_report_no_toc.py b/src/main/resources/scripts/full_report_no_toc.py index ff36124f..efd84a28 100644 --- a/src/main/resources/scripts/full_report_no_toc.py +++ b/src/main/resources/scripts/full_report_no_toc.py @@ -36,23 +36,6 @@ def generate_full_status(manageProject): mpName = os.path.splitext(os.path.basename(manageProject))[0] full_report_name = mpName + "_full_report.html" metrics_report_name = mpName + "_metrics_report.html" - - # try: - # from vector.apps.DataAPI.vcproject_api import VCProjectApi - # api = VCProjectApi(manageProject) - - # api.report(report_type="MANAGE_STATUS_FULL_REPORT", formats=["HTML"], output_file=full_report_name , environments=api.Environment.all(), levels = []) - # api.report(report_type="MANAGE_METRICS_REPORT" , formats=["HTML"], output_file=metrics_report_name, environments=api.Environment.all(), levels = []) - - # shutil.copy(full_report_name,full_report_name + "_tmp") - # fixup_reports.fixup_2020_reports(full_report_name + "_tmp") - - # shutil.copy(metrics_report_name,metrics_report_name + "_tmp") - # fixup_reports.fixup_2020_reports(metrics_report_name + "_tmp") - - # api.close() - - # except: from managewait import ManageWait diff --git a/src/main/resources/scripts/generate-results.py b/src/main/resources/scripts/generate-results.py index dc6871f9..785429a2 100644 --- a/src/main/resources/scripts/generate-results.py +++ b/src/main/resources/scripts/generate-results.py @@ -1,7 +1,7 @@ # # The MIT License # -# Copyright 2024 Vector Informatik, GmbH. +# Copyright 2025 Vector Informatik, GmbH. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -27,6 +27,7 @@ import os import sys + import argparse import shutil import re @@ -38,23 +39,8 @@ import tee_print from safe_open import open +from vcast_utils import getVectorCASTEncoding -# adding path -workspace = os.getenv("WORKSPACE") -if workspace is None: - workspace = os.getcwd() - -jenkinsScriptHome = os.path.join(workspace,"vc_scripts") - -python_path_updates = jenkinsScriptHome -sys.path.append(python_path_updates) - -if sys.version_info[0] < 3: - python_path_updates += os.sep + 'vpython-addons' - sys.path.append(python_path_updates) - using_27_python = True -else: - using_27_python = False import tcmr2csv import vcastcsv2jenkins @@ -81,7 +67,9 @@ from vector.apps.DataAPI.vcproject_api import VCProjectApi except: pass - + +encFmt = getVectorCASTEncoding() + #global variables global verbose global print_exc @@ -178,11 +166,14 @@ def readManageVersion(ManageFile): version = 14 if os.path.isfile(ManageFile + ".vcm"): ManageFile = ManageFile + '.vcm' - with open(ManageFile, 'r') as projFile: - for line in projFile: + + with open(ManageFile, 'rb') as projFile: + for raw_line in projFile: # iterates lazily, line by line + line = raw_line.decode(encFmt, "replace") # decode each line if 'version' in line and 'project' in line: version = int(re.findall(r'\d+', line)[0]) break + if verbose: print("Version of VectorCAST project file = %d" % version) print("(Levels change in version 17 (*maybe) and above)") @@ -308,8 +299,8 @@ def fixup_css(report_name): if not need_fixup: return - with open(report_name,"r") as fd: - data = fd.read() + with open(report_name,"rb") as fd: + data = fd.read().decode('utf-8','replace') #fix up inline CSS because of Content Security Policy violation newData = data[: data.index("