diff --git a/.gitignore b/.gitignore new file mode 100755 index 0000000..8a9b319 --- /dev/null +++ b/.gitignore @@ -0,0 +1,105 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.idea/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100755 index 0000000..a11e7e8 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,27 @@ +# Contributing + + +Fork, then clone the repo: +```shell + git clone https://github.com/dowjones/sast.git +``` +The easiest way to run test, lint, etc is via docker: + +https://docs.docker.com/install/ + +Make sure the tests pass: +```shell + docker exec -ti sast_open python -m unittest discover /tmp/sast_controller/tests/ +``` + + +### Testing +You will need to create fork of the library and cover you feature with tests. + +Push to your fork and [submit a pull request][pr]. + +We may suggest some changes, improvements, or alternatives. + +If the above requirements are met and communications are clear (good comments, dialog, and [commit messages][commit]) your PR will likely be accepted. + +[commit]: http://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project#Commit-Guidelines \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100755 index 0000000..446dccf --- /dev/null +++ b/Dockerfile @@ -0,0 +1,83 @@ +FROM python:3.6-slim + +LABEL maintainer="karen.florykian@dowjones.com,igor.bakalo@dowjones.com" + +ENV appDir /code +ENV GIT_SSL_NO_VERIFY 1 +ENV OPEN_SOURCE_CONFIGURATION_ORIGIN LocalPath +ENV CX_WSDL_VERSION v7 +ENV DEPENDENCY_CHECK_VERSION 3.3.0 +ENV NPM_REGISTRY https://registry.npmjs.org +ENV MASK_PERCENT 0.7 +ENV MAX_TOKEN_LEN 5 +ENV SBT_VERSION 0.13.15 + +RUN mkdir -p /usr/share/man/man1 && \ + apt-get -qq update && apt-get install -q -y --no-install-recommends \ + unzip \ + curl \ + wget \ + openjdk-11-jdk \ + apt-transport-https \ + rsync \ + git \ + python3-pip \ + python3-setuptools \ + zlib1g-dev \ + lsb-release \ + gnupg \ + dirmngr \ + maven && \ + apt-get clean && \ + apt-get autoremove --purge && \ + rm -rf /var/lib/apt/lists/* + +# Install NodeJS +## Adding the NodeSource signing key to your keyring... +RUN wget -qO- https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - +## Creating apt sources list file for the NodeSource Node.js 10.x repo... +RUN echo 'deb https://deb.nodesource.com/node_10.x stretch main' > /etc/apt/sources.list.d/nodesource.list +RUN echo 'deb-src https://deb.nodesource.com/node_10.x stretch main' >> /etc/apt/sources.list.d/nodesource.list +RUN apt-get update && apt-get install -y --no-install-recommends \ + nodejs && \ + apt-get clean && \ + apt-get autoremove --purge && \ + rm -rf /var/lib/apt/lists/* + +RUN npm -g i n && n 10 --test +RUN npm -g i npm@6.1 snyk@1.110.2 --test + +RUN echo "deb https://dl.bintray.com/sbt/debian /" | tee -a /etc/apt/sources.list.d/sbt.list +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823 +RUN apt-get update && \ + apt-get install -y sbt --no-install-recommends && \ + apt-get clean && \ + apt-get autoremove --purge && \ + rm -rf /var/lib/apt/lists/* + +RUN mkdir -p ~/.sbt/0.13/plugins && mkdir -p ~/.sbt/1.0/plugins +RUN echo "addSbtPlugin(\"net.virtual-void\" % \"sbt-dependency-graph\" % \"0.9.2\")" > ~/.sbt/0.13/plugins/plugins.sbt +RUN echo "addSbtPlugin(\"net.virtual-void\" % \"sbt-dependency-graph\" % \"0.9.2\")" > ~/.sbt/1.0/plugins/plugins.sbt + +# Installing Python Dependencies + +RUN pip3 install --upgrade pip && \ + pip3 install bandit==1.5.1 junit-xml==1.8 requests==2.21.0 zeep==2.5.0 PyYAML==3.12 bs4==0.0.1 \ + slackclient==1.2.1 jira==1.0.15 configparser==3.5.0 PyJWT==1.6.4 cryptography==2.2.2 \ + xmltodict==0.11.0 junit2html==21 && \ + pip3 install https://github.com/reportportal/client-Python/archive/3.2.2.zip + +# Creating code directory + +RUN mkdir ${appDir} + +COPY bugbar /tmp/bugbar +COPY sast_controller /tmp/sast_controller +ADD ./setup.py /tmp/setup.py +RUN cd /tmp && python3 setup.py install + +WORKDIR ${appDir} +ADD ./entrypoint.sh /tmp/entrypoint.sh +RUN chmod +x /tmp/entrypoint.sh + +ENTRYPOINT ["/tmp/entrypoint.sh"] \ No newline at end of file diff --git a/README.md b/README.md new file mode 100755 index 0000000..1a9a485 --- /dev/null +++ b/README.md @@ -0,0 +1,28 @@ +![Reapsaw](docs/logo.png) +## Reapsaw + + +### Introduction + +Dow Jones Reapsaw is our way to embed security testing within an SDLC lifecycle. This helps to ensure that security testing is consistent and effective. + +Reapsaw is an orchestration platform for various security tools (static and run-time) which helps in identifying different types of security bugs during the process (open source vulnerabilities, static code security bugs, insecure functions, secrets in code, authentication bypass etc.). +It can be easily integrated into already-established process and tools in an organizations SDLC, bug trackers, source repositories and other testing tools. Being CI/CD friendly,once enabled within the CI pipeline, reapsaw will help in identifying very high fidelity and low false positive bugs into developers backlog. This will enable in quick feedback and provide developers friendly recommendations on fixes. + + +![Reapsaw](docs/execution-model.png) + +Static application security testing (SAST) is a set of technologies designed to analyze application source code for coding and design conditions that are indicative of security vulnerabilities. + +### Documentation +- [Wiki](https://github.com/dowjones/reapsaw/wiki) +- [Github Docs](https://github.com/dowjones/reapsaw/tree/develop/docs) + +### Tests + +- see also [CONTRIBUTING](https://github.com/dowjones/reapsaw/blob/develop/CONTRIBUTING.md) + +``` +tox -e install +tox -e test +``` \ No newline at end of file diff --git a/bugbar/bugbar.json b/bugbar/bugbar.json new file mode 100755 index 0000000..2062918 --- /dev/null +++ b/bugbar/bugbar.json @@ -0,0 +1,38 @@ +{ + "Cross-site Scripting (XSS)": { + "is_issue": "", + "risk_rating": "High", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "Cross-site scripting (reflected);Reflected_XSS_All_Clients;Stored_XSS;Reflected Cross-Site Scripting;Reflected_XSS;Client_DOM_XSS" + }, + "SQL Injection": { + "is_issue": "", + "risk_rating": "Critical", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "SQL Injection in Content Provider;SQL_Injection" + }, + "Sensitive Information Disclosure": { + "is_issue": "", + "risk_rating": "High", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "Sensitive Information Disclosure;Client_Password_In_Comment;Client_Password_Weak_Encryption;CPP_Insecure_Credential_Storage;HardcodedCredentials;Hardcoded_Connection_String;Hardcoded_Password;Hardcoded_password_in_Connection_String;Hardcoded_Password_In_Gradle;Hardcoded_Session_Secret_Token;Hard_Coded_Cryptography_Key;Insufficiently_Protected_Credentials;Kony_Hardcoded_EncryptionKey;Missing_Password_Field_Masking;Password_In_Comment;Password_in_Configuration_File;Password_misuse;Plaintext_Storage_of_a_Password;Storing_Passwords_in_a_Recoverable_Format;Use_of_Hardcoded_Cryptographic_Key;Use_Of_Hardcoded_Password;Use_of_Hardcoded_Password;Use_of_Hard_coded_Cryptographic_Key;Use_of_Hard_coded_Security_Constants" + }, + "Vulnerable Software": { + "is_issue": "", + "risk_rating": "High", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "Obsolete WordPress version;Client_Use_Of_JQuery_Outdated_Version;Client_JQuery_Deprecated_Symbols;Vulnerable Software Version" + } +} \ No newline at end of file diff --git a/docs/bug_bar.md b/docs/bug_bar.md new file mode 100755 index 0000000..56c2780 --- /dev/null +++ b/docs/bug_bar.md @@ -0,0 +1,65 @@ +BugBar is designed to normalize vulnerabilities across all the tools to the single format: +* `Vulnerabilities name`, +* `Risk Rating`, +* `Priority`, +* `Description`, +* `Recommendation` + +# Bug Bar structure +Please find BugBar sample by the link [Reapsaw](https://github.com/dowjones/sast/blob/develop/bugbar/bugbar.json) + +> Bug Bar is configurable + +
JSON report +

+ +```json +{ + "Cross-Site Request Forgery (CSRF)": { + "is_issue": "", + "risk_rating": "Medium", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "XSRF" + }, + "SQL Injection": { + "is_issue": "", + "risk_rating": "Critical", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "SQL Injection in Content Provider;SQL_Injection" + }, + "Sensitive Information Disclosure": { + "is_issue": "", + "risk_rating": "High", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "Sensitive Information Disclosure;Client_Password_In_Comment;Client_Password_Weak_Encryption;CPP_Insecure_Credential_Storage;HardcodedCredentials;Hardcoded_Connection_String;Hardcoded_Password;Hardcoded_password_in_Connection_String;Hardcoded_Password_In_Gradle;Hardcoded_Session_Secret_Token;Hard_Coded_Cryptography_Key;Insufficiently_Protected_Credentials;Kony_Hardcoded_EncryptionKey;Missing_Password_Field_Masking;Password_In_Comment;Password_in_Configuration_File;Password_misuse;Plaintext_Storage_of_a_Password;Storing_Passwords_in_a_Recoverable_Format;Use_of_Hardcoded_Cryptographic_Key;Use_Of_Hardcoded_Password;Use_of_Hardcoded_Password;Use_of_Hard_coded_Cryptographic_Key;Use_of_Hard_coded_Security_Constants" + }, + "Vulnerable Software": { + "is_issue": "", + "risk_rating": "High", + "jira_priority": "Major", + "grouped": "", + "description": {}, + "recommendation": {}, + "cxsast": "Obsolete WordPress version;Client_Use_Of_JQuery_Outdated_Version;Client_JQuery_Deprecated_Symbols;Vulnerable Software Version" + } +} +``` +

+
+ +## Features +* Grouping mechanism +* Mark issues as `No Defect` +* Set `Severity` for specific Vulnerability +* Set `Priority` for specific Vulnerability +* Set `Description` for specific Vulnerability and language +* Set `Recommendation` for specific Vulnerability and language diff --git a/docs/checkmarx_configuration.md b/docs/checkmarx_configuration.md new file mode 100755 index 0000000..14bee3e --- /dev/null +++ b/docs/checkmarx_configuration.md @@ -0,0 +1,38 @@ +Contact your administrator for getting Checkmarx access. + +>`Reapsaw` creates new project with “Default” preset if no existing + +## How to create new project +Before running the container we recommend to create new project and specify preset in Checkmarx. +1. Create new project and click next: +![cx1](https://github.com/dowjones/sast/blob/develop/docs/create_cx_prj.png) + +2. In "Location" select "Source Control": GIT https://github.com/OWASP/NodeGoat: +![cx2](https://github.com/dowjones/sast/blob/develop/docs/create_cx_prj_3.png) + +3. Click finish +![cx2](https://github.com/dowjones/sast/blob/develop/docs/create_cx_prj_1.png) + +4. Verify that project created: +![cx3](https://github.com/dowjones/sast/blob/develop/docs/create_cx_prj2.png) + +Details can be found in Checkmarx documentation by the links: +* [Create Project](https://checkmarx.atlassian.net/wiki/spaces/KC/pages/589955153/Creating+and+Configuring+a+CxSAST+Project+v8.8.0) +* [Preset Manager](https://checkmarx.atlassian.net/wiki/spaces/KC/pages/49250315/Preset+Manager) +* [Creating and Managing Projects](https://checkmarx.atlassian.net/wiki/spaces/KC/pages/28606543/Creating+and+Managing+Projects). + +## Reapsaw parameters: +```js + CX_URL # Checkmarx (e.g. https://checkmarx.com) + CX_USER # username + CX_PASSWORD # password + CX_PROJECT # project name +``` + +### [Optional parameters]: +Use next parameter to remove from scan unused folders or files. + +```js + cx_files # Coma separated list, e.g. cx_files="txt,png,xls" + cx_path # Coma separated list e.g. cx_path="docs/,tests/,some/other/code" +``` \ No newline at end of file diff --git a/docs/create_cx_prj.png b/docs/create_cx_prj.png new file mode 100755 index 0000000..c62eee3 Binary files /dev/null and b/docs/create_cx_prj.png differ diff --git a/docs/create_cx_prj2.png b/docs/create_cx_prj2.png new file mode 100755 index 0000000..5f8289a Binary files /dev/null and b/docs/create_cx_prj2.png differ diff --git a/docs/create_cx_prj_1.png b/docs/create_cx_prj_1.png new file mode 100755 index 0000000..f811720 Binary files /dev/null and b/docs/create_cx_prj_1.png differ diff --git a/docs/create_cx_prj_3.png b/docs/create_cx_prj_3.png new file mode 100755 index 0000000..2cb9592 Binary files /dev/null and b/docs/create_cx_prj_3.png differ diff --git a/docs/execution-model.png b/docs/execution-model.png new file mode 100755 index 0000000..f9d3916 Binary files /dev/null and b/docs/execution-model.png differ diff --git a/docs/getting_started.md b/docs/getting_started.md new file mode 100755 index 0000000..48c206d --- /dev/null +++ b/docs/getting_started.md @@ -0,0 +1,324 @@ +## Overview +Reapsaw automation solution was build to run scans and provide developers consolidated report with friendly recommendations on fixes. + +* Vulnerabilities reported to build pipeline, ReportPortal and Jira, +* Easy to integrate within build pipeline; +* BugBar functionality. + +## Prerequisites: + +* Installed [Docker v18.09+](https://docs.docker.com/docker-for-mac/install/) +* You need to build and package the project for test it as for deploy +* Checkmarx or Snyk API token + +Instructions about how to setup Checkmarx and Snyk can be found on the right hand menu. + +Pull the code of the project for scanning from the CSV: +```bash +$ git clone +``` + +For the demo purposes we will use one of the [OWASP repositories](https://github.com/OWASP/NodeGoat): + +```bash +$ git clone https://github.com/OWASP/NodeGoat +$ cd NodeGoat +$ npm install # exit code 0 +``` +> Make sure you're got no errors after the build step command. + +## Install Reapsaw + +This will let you run `Reapsaw` on your local machine. + +> Ensure you have Checkmarx or Snyk token in place. + + +1. Clone [`Reapsaw`](https://github.com/dowjones/sast): +```bash +$ git clone https://github.com/dowjones/sast +``` +Now, look in your project folder and see that there is a newly created `sast` folder with `Reapsaw` source code. + +2. Build image: +```bash +$ cd sast +$ docker build -t sast:latest . +.... + ---> b995eb5d7d1a +Successfully built b995eb5d7d1a +Successfully tagged sast:latest +``` +Here `-t sast:latest` is the name and tag of image. + +The resulting image will be tagged as `sast:latest`: + +3. Run container: +```js +$ docker run --name sast --rm \ + -v :/code \ + -e TASKS= \ + -e CX_PROJECT= \ + -e CX_URL= \ + -e CX_USER= \ + -e CX_PASSWORD= \ + -e SNYK_TOKEN= \ + sast:latest +``` +There are a few different run options: +```js + -e TASKS # Available option: "cx","snyk","cx,snyk" + -e TASKS='cx' # e.g You want Checkmarx scan to happen: + -e CX_PROJECT # Project name in Checkmarx + -e CX_URL # Checkmarx (e.g. https://checkmarx.com) + -e CX_USER # username + -e CX_PASSWORD # password + -e SNYK_TOKEN # Snyk API token +``` + +> If you want to Checkmarx and Snyk to happen together: +```js +$ docker run --name sast --rm \ + -v /Users/demo/NodeGoat:/code \ + -e TASKS="cx,snyk" \ + -e CX_PROJECT="demo_sast" \ + -e CX_URL="https://mycheckmarx.com" \ + -e CX_USER="my_user_id" \ + -e CX_PASSWORD="my_user_pwd" \ + -e SNYK_TOKEN="my-snyk-api-token" \ + sast:latest +``` + +`Reapsaw` will start scanning source code and creates reports in `reports/` folder. +Container will be automatically deleted when run finished. + +Reports are available on your host machine in `reports` folder: + +```bash +$ cat /reports/json_report.json +``` + +> If you want only Snyk to happen +```js +$ docker run --name sast --rm \ + -e TASKS="snyk" \ + -v :/code \ + -e SNYK_TOKEN= \ + sast:latest +``` + + +## Reports: +* JSON report +```python +[ + { + "Issue Name": , # Vulnerability name e.g "SQL Injection" + "Issue Tool": , # Tool name e.g "Checkmarx" + "Issue Priority": , # "Major" by default + "Issue Severity": , # According to BugBar configuration + "Description": , # Used for Jira, e.g The software does not sufficiently validate ... + "Recommendations": , # Recommendation from tool or BugBar e.g Upgrade `marked` to version 0.6.2 or higher + "Instances": , # Location of finding/module e.g "File routes/login.js" + "Jira Name": , # Title of the corresponding ticket created in Jira. Get's from BugBar + "Links": , # Link to the "Checkmarx Viewer" + "Snippet": , # Code snippet if applicable: "var url_params = url.parse(req.url, true).query;" + "top_level_module": , # Name of the top level module to update e.g "marked" , + .... + } +] +``` + +
Example +

+ +#### JSON report + +```json +[ + { + "Issue Name": "Cross-site Scripting (XSS).app/routes/contributions.js", + "Issue Tool": "Checkmarx", + "Steps To Reproduce": "", + "Issue Priority": "Major", + "Issue Severity": "High", + "Issue Confidence": "Certain", + "CWE": "[CWE-79|https://cwe.mitre.org/data/definitions/79]", + "CVE": "", + "Overview": "", + "Recommendations": "# Validate all input, regardless of source. Validation should be based on a whitelist: accept only data fitting a specified structure, rather than reject bad patterns. Check for:\n#* Data type\n#* Size\n#* Range\n#* Format\n#* Expected values\n# Fully encode all dynamic data before embedding it in output.\n# Encoding should be context-sensitive. For example:\n#* HTML encoding for HTML content\n#* HTML Attribute encoding for data output to attribute values\n#* JavaScript encoding for server-generated JavaScript.\n# Consider using either the ESAPI encoding library, or the built-in platform functions. For earlier versions of ASP.NET, consider using the AntiXSS library.\n# In the Content-Type HTTP response header, explicitly define character encoding (charset) for the entire page.\n# Set the httpOnly flag on the session cookie, to prevent XSS exploits from stealing the cookie.\n\n", + "References": "Line 34 in file [app/routes/contributions.js|https://checkmarx.com/CxWebClient/ViewerMain.aspx?scanid=1041725&projectid=3133&pathid=6]", + "Paths": "", + "URLs": "", + "error_string": "Cross-site Scripting (XSS) 79\napp/routes/contributions.js", + "Description": " The software does not sufficiently validate, filter, escape, and/or encode user-controllable input before it is placed in output that is used as a web page that is served to other users.\n GROUP: JavaScript_Server_Side_Vulnerabilities\n CATEGORY: A7-Cross-Site Scripting (XSS)\n *Code*:\n ``` var userId = req.session.userId; ```", + "Instances": "File app/routes/contributions.js", + "Attachments": [], + "Tags": [ + { + "TestType": "sast" + }, + { + "Provider": "Reapsaw" + }, + { + "Tool": "Checkmarx" + } + ], + "Jira Name": "Cross-site Scripting (XSS)", + "Repo": "", + "Links": "https://checkmarx.com/CxWebClient/ViewerMain.aspx?scanid=1041725&projectid=3133&pathid=6", + "Snippet": "var userId = req.session.userId;", + "Jira Description": "h3.*Instances:*\nFile app/routes/contributions.js\nh3.*Recommendations:*\n\n# Validate all input, regardless of source. Validation should be based on a whitelist: accept only data fitting a specified structure, rather than reject bad patterns. Check for:\n#* Data type\n#* Size\n#* Range\n#* Format\n#* Expected values\n# Fully encode all dynamic data before embedding it in output.\n# Encoding should be context-sensitive. For example:\n#* HTML encoding for HTML content\n#* HTML Attribute encoding for data output to attribute values\n#* JavaScript encoding for server-generated JavaScript.\n# Consider using either the ESAPI encoding library, or the built-in platform functions. For earlier versions of ASP.NET, consider using the AntiXSS library.\n# In the Content-Type HTTP response header, explicitly define character encoding (charset) for the entire page.\n# Set the httpOnly flag on the session cookie, to prevent XSS exploits from stealing the cookie.\n\n\nh3.*Overview:*\n{panel:title=Cross-site Scripting (XSS)}*Description*: \nThe software does not sufficiently validate, filter, escape, and/or encode user-controllable input before it is placed in output that is used as a web page that is served to other users.\n GROUP: JavaScript_Server_Side_Vulnerabilities\n CATEGORY: A7-Cross-Site Scripting (XSS)\n *Code*:\n ``` var userId = req.session.userId; ```\n\n*References*: \nLine 10 in file [app/routes/contributions.js|https://checkmarx.com/CxWebClient/ViewerMain.aspx?scanid=1041725&projectid=3133&pathid=5]\n\n{panel}\n{panel:title=Cross-site Scripting (XSS)}*Description*: \nThe software does not sufficiently validate, filter, escape, and/or encode user-controllable input before it is placed in output that is used as a web page that is served to other users.\n GROUP: JavaScript_Server_Side_Vulnerabilities\n CATEGORY: A7-Cross-Site Scripting (XSS)\n *Code*:\n ``` var userId = req.session.userId; ```\n\n*References*: \nLine 34 in file [app/routes/contributions.js|https://checkmarx.com/CxWebClient/ViewerMain.aspx?scanid=1041725&projectid=3133&pathid=6]\n\n{panel}\n" + }, + { + "Issue Name": "Regular Expression Denial of Service (ReDoS).marked", + "Issue Tool": "Snyk", + "Steps To Reproduce": "", + "Issue Priority": "Major", + "Issue Severity": "High", + "Issue Confidence": "Certain", + "CWE": "CWE-185", + "CVE": "", + "Overview": "", + "Recommendations": "Upgrade `marked` to version 0.6.2 or higher", + "References": "\r\n- [GitHub PR](https://github.com/markedjs/marked/pull/1083)\r\n- [GitHub Commit](https://github.com/markedjs/marked/pull/1083)", + "Paths": "owasp-nodejs-goat>marked@0.3.9", + "URLs": "", + "error_string": "marked", + "Description": "*Vulnerable Package:* marked\n*Current Version:* 0.3.9\n*Vulnerable Version(s):* <0.3.18\n \n *Remediation:*\r\nUpgrade marked to version 0.3.17 or higher\r\n\r\n\n Overview\r\n[`marked`](https://www.npmjs.com/package/marked) is a markdown parser built for speed\r\n\r\nAffected versions of this package are vulnerable to Regular Expression Denial of Service (ReDoS) attacks. This can cause an impact of about 10 seconds matching time for data 150 characters long.\r\n\r\n\n ", + "Instances": "marked", + "Attachments": [], + "Tags": [ + { + "TestType": "sast" + }, + { + "Provider": "Reapsaw" + }, + { + "Tool": "Snyk" + } + ], + "Jira Name": "Vulnerable Software", + "Repo": "", + "top_level_module": "marked", + "upgrades": [ + false, + "marked@0.3.18" + ], + "language": "js", + "RP Defect Type": "Product Bug" + } +] +``` +

+
+ +
JUNIT report +

+ + +```xml + + + + + + + + + + + + +``` +

+
+ +## Other options +1. Run multiple tools: +```js +$ docker exec -t sast scan +``` + * to run only Checkmarx: + +```js +$ docker exec -t -e TASKS=cx sast scan +``` + +2. Generating reports: + +```js +$ docker exec -it sast generate_reports -r false +``` +3. Convert Report into HTML: + +```js +$ docker exec -it sast junit2html /code/reports/junit_report.xml /code/reports/report.html +``` +4. HTML report in Browser: + +```js +$ open /reports/report.html + +``` + +
Example +

+ +#### HTML report +![HTML](https://github.com/dowjones/sast/blob/develop/docs/html_report.png) +

+
+ + +5. Push reports to ReportPortal : + +> If want to send results in Report Portal you should add next environment variables : +```js + -e REPORT_PORTAL_URL # Report Portal (e.g. http://reportportal.io) + -e RP_TOKEN # You can get UUID user profile page in the Report Portal. + -e RP_PROJECT # Project name in Report Portal +``` +Find Report Portal configuration by the link: + +```js +$ docker exec -it \ + -e RP_PROJECT="demo_data" \ + -e REPORT_PORTAL_URL="https://reportportal.io" \ + -e RP_TOKEN="API-TOKEN" \ + sast generate_reports +``` + + +6. Push vulnerabilities to JIRA +> Make sure you're have a connection to Jira. + +* Set env variables with JIRA parameters: +```js + -e JIRA_HOST # URL to JIRA + -e JIRA_USR # user name + -e JIRA_PWD # user password +``` +```js +$ docker exec -it \ + -e JIRA_HOST='https://jira.example.net' \ + -e JIRA_PWD='password' \ + -e JIRA_USR='username' \ + sast push_to_jira --jira_project TEST --jira_assignee testuser +``` +There are different options : + + --jira_project # Required + --jira_assignee # Required + --report_file # Optional + --defect_type # Optional + +
JIRA Ticket Example +

+ticket_example +

+
\ No newline at end of file diff --git a/docs/html_report.png b/docs/html_report.png new file mode 100755 index 0000000..258f607 Binary files /dev/null and b/docs/html_report.png differ diff --git a/docs/jenkins-freestyle.md b/docs/jenkins-freestyle.md new file mode 100755 index 0000000..12d2535 --- /dev/null +++ b/docs/jenkins-freestyle.md @@ -0,0 +1,108 @@ +## Jenkins execute shell example + +## Prerequisites: + +* Create freestyle Jenkins job +* Built `Reapsaw` container +* Update next parameters in script: +** CX_URL= +** CX_USER= +** CX_PASSWORD= +** RP_URL= +** RP_TOKEN= + +Information how to do it can be found by the links below: +* https://wiki.jenkins.io/display/JENKINS/Credentials+Binding+Plugin + + + +Please find below example how to get `Reapsaw` results against `https://github.com/appsecco/dvna` in Jenkins: +```shell +# Make sure you already build container `docker build -t sast .` +# The TOOLS is a list of scanning tools +# will be cx by default, Snyk can be used as a dependency scanner. In order to use it, please set SNYK_TOKEN +TOOLS="cx" + +# The CX_USER is a name of application user for automated scns +CX_URL="PLEASE_PASTE_CX_URL" +CX_USER="PLEASE_PASTE_CX_USERID" +CX_PASSWORD="PLEASE_PASTE_CX_PASSWORD" + +# The CX_PATH contain paths to be excluded from Checkmarx scan (Coma separated list e.g. code,some/other/code) +CX_PATH="docs,terraform,tests" + +RP_URL="PLEASE_PASTE_REPORT_PORTAL_URL" +# The RP_TOKEN used to send results in aggregation storage +RP_TOKEN="PLEASE_PASTE_RP_TOKEN" + +# PRJ is a Checkmarx project name +PRJ="demo_sast" + +# Clear Jenkins job WORKSPACE +rm -rf $WORKSPACE/* + +# Clone repository +mkdir code + +# clone code base in `code` folder +git clone https://github.com/appsecco/dvna $WORKSPACE/code -b master + +# create folder for reports +mkdir -p code/reports + +# Stop container if in run state +if docker stop $PRJ ; then + echo 'stopping sast container' +else + echo 'starting sast..' +fi + +# Remove container if in run state +if docker rm $PRJ ; then + echo 'removing sast container' +else + echo 'starting sast..' +fi + +docker run -d -t --entrypoint=cat \ + -e TASKS=$TOOLS \ + -e CX_URL=$CX_URL \ + -e CX_USER=$CX_USER \ + -e CX_PASSWORD=$CX_PASSWORD \ + -e CX_PROJECT=$PRJ \ + -e RP_PROJECT=$PRJ \ + -e cx_path="$CX_PATH" \ + --name $PRJ sast:latest + +# copy code base from workspace inside container +docker cp "$WORKSPACE/code/." $PRJ:/code + +# print working dir files for the scan inside container +docker exec -t demo_sast ls -la + +# start scan +docker exec -t demo_sast scan + +# generate reports locally - without sending in RP +docker exec -t demo_sast generate_reports -r=false + + +# review results +docker exec -t demo_sast ls reports + +# review results : json +docker exec -t demo_sast cat reports/json_report.json + +# generate html report +docker exec -i demo_sast junit2html /code/reports/junit_report.xml /code/reports/report.html + +# send results in ReportPortal +docker exec -t -e REPORT_PORTAL_URL=$RP_URL -e RP_TOKEN=$RP_TOKEN -e RP_PROJECT=$PRJ -e RP_LAUNCH_NAME=$PRJ demo_sast generate_reports + +# stop sast container +docker stop $PRJ + +# remove sast container +docker rm $PRJ + +``` \ No newline at end of file diff --git a/docs/jenkins-pipelines.md b/docs/jenkins-pipelines.md new file mode 100755 index 0000000..b002dad --- /dev/null +++ b/docs/jenkins-pipelines.md @@ -0,0 +1,57 @@ +## Jenkins pipelines + +You need to add next credentials and tokens in Jenkins: +* sastPassword: Used as Checkmarx user password +* rpToken: Used for writing to ReportPortal +* snykToken: Used as Snyk token + +Information how to do it can be found by the links below: + +* [Credentials Binding Plugin](https://jenkins.io/doc/pipeline/steps/credentials-binding/) + + +Please find below Jenkins pipeline example with SAST step : + +```bash + +node() { + stage('Checkout') { + deleteDir() + // scmUrl string param: link to GitHub repo + git branch: 'master', url: 'https://github.com/OWASP/NodeGoat' + } + + stage('SAST') { + + def appName = 'demo_sast' + def runParams = "" + def sast_params = [ + PROJECT : appName, + CX_URL : '', + CX_USER : '', + CX_PASSWORD: '', + TASKS : 'cx,snyk', + REPORT_PORTAL_URL: '', + RP_TOKEN : '', + SNYK_TOKEN : ''] + for (param in sast_params) { + runParams += " -e ${param.key}=\"${param.value}\"" + } + try { + sh "mkdir -p reports" + sh "docker run -td --entrypoint=cat --network host ${runParams} --name ${appName} sast:latest" + sh "docker cp ${WORKSPACE}/. ${appName}:/code" + sh "docker exec -t ${appName} scan" + sh "docker exec -t ${appName} generate_reports" + sh 'echo "Results:"' + sh "docker exec -t ${appName} cat reports/json_report.json" + } catch (e) { + sh 'echo "Error in SAST scan"' + sh "echo ${e.getMessage()}" + } finally{ + sh "docker stop ${appName}" + sh "docker rm ${appName}" + } + } +} +``` \ No newline at end of file diff --git a/docs/known-issues.md b/docs/known-issues.md new file mode 100755 index 0000000..e6f065e --- /dev/null +++ b/docs/known-issues.md @@ -0,0 +1,29 @@ +## Failed to establish connection: + +``` +[Errno -3] Temporary failure in name resolution': +``` + +If you have network connection on the host machine but no internet inside docker container this means you should change network interface for docker run command. + +### Resolution: +Add `--network host` in run docker command: + +```js +docker run --network host --name sast --rm -d \ + -e TASKS= \ + -v :/code \ + -e CX_PROJECT= \ + -e SNYK_TOKEN= \ + -e CX_USER= \ + -e CX_PASSWORD= sast:latest + +``` + +## Checkmarx error: + +2019-05-23 10:33:00,905 - sast_controller.drivers.cx.Checkmarx - CRITICAL - Invalid connection + +### Resolution: +1. Double check CX_USER and PASSWORD parameters in script +2. Verify that manually you are able to login in Checkmarx \ No newline at end of file diff --git a/docs/logo.png b/docs/logo.png new file mode 100755 index 0000000..7839afd Binary files /dev/null and b/docs/logo.png differ diff --git a/docs/report_portal.md b/docs/report_portal.md new file mode 100755 index 0000000..b9b77ea --- /dev/null +++ b/docs/report_portal.md @@ -0,0 +1,23 @@ +Report Portal used to aggregate results and analyze findings from different tools in one place. + +>If you do not create a ReportPortal project before scanning, `Reapsaw` do it automatically + +How to get : +* Login to RP +* Click on USER PROFILE +* Copy UUI (UNIVERSALLY UNIQUE IDENTIFIER) + +## Reapsaw parameters: +```js + REPORT_PORTAL_URL # RerportPortal (e.g. https://reportportal.io) + RP_PROJECT # project name + RP_LAUNCH_NAME # launch name + RP_TOKEN # UUI +``` +Details can be found by the link: +* [Report Portal](https://reportportal.io/docs/What-is-ReportPortal?). + + +## Finding example +![Finding Template](https://github.com/dowjones/sast/blob/develop/docs/rp_finding.png) + diff --git a/docs/rp_finding.png b/docs/rp_finding.png new file mode 100755 index 0000000..1c26d58 Binary files /dev/null and b/docs/rp_finding.png differ diff --git a/docs/snyk_configuration.md b/docs/snyk_configuration.md new file mode 100755 index 0000000..3f6cf63 --- /dev/null +++ b/docs/snyk_configuration.md @@ -0,0 +1,122 @@ +Snyk helps to find and fix known vulnerabilities in your dependencies, both ad hoc and as part of your CI (Build) system. +> We are using "Snyk CLI" you should `Snyk API token` in order to use it. + +## Prerequisites +You can use free version with `Reapsaw` for scanning open source repositories. +Using open source license you can run only `200` scans on private projects. + +> Details about Snyk licence can be found by the link [plans](https://snyk.io/plans/) + +* Snyk API token +* Internet connection with `snyk.io` + +`SNYK_TOKEN` env variable with your Snyk API token and add "snyk" in TASKS environment variable. + +`Reapsaw` languages support: +* NodeJs +* Java +* .Net +* Scala + +## How to build application code base + +### NodeJS Playbook +1. Before scan install application modules: +```shell +npm install +``` +2. Add `snyk` in `TASKS` environment variable: +```js +$ docker run --name sast --rm -d \ + -e TASKS="snyk" \ + -v :/code \ + -e SNYK_TOKEN= \ + sast:latest +``` + +3. Report example: + +> Note: For the findings from Snyk we are reporting top level dependencies + +
JSON report +

+ +```js +[{ + "Issue Name": "Regular Expression Denial of Service (ReDoS).marked", + "Issue Tool": "Snyk", + "Steps To Reproduce": "", + "Issue Priority": "Major", + "Issue Severity": "High", + "Issue Confidence": "Certain", + "CWE": "CWE-185", + "CVE": "", + "Overview": "", + "Recommendations": "Upgrade `marked` to version 0.6.2 or higher", + "References": "\r\n- [GitHub PR](https://github.com/markedjs/marked/pull/1083)\r\n- [GitHub Commit](https://github.com/markedjs/marked/pull/1083)", + "Paths": "owasp-nodejs-goat>marked@0.3.9", + "URLs": "", + "error_string": "marked", + "Description": "*Vulnerable Package:* marked\n*Current Version:* 0.3.9\n*Vulnerable Version(s):* <0.3.18\n \n *Remediation:*\r\nUpgrade marked to version 0.3.17 or higher\r\n\r\n\n Overview\r\n[`marked`](https://www.npmjs.com/package/marked) is a markdown parser built for speed\r\n\r\nAffected versions of this package are vulnerable to Regular Expression Denial of Service (ReDoS) attacks. This can cause an impact of about 10 seconds matching time for data 150 characters long.\r\n\r\n\n ", + "Instances": "marked", + "Attachments": [], + "Tags": [ + { + "TestType": "sast" + }, + { + "Provider": "Reapsaw" + }, + { + "Tool": "Snyk" + } + ], + "Jira Name": "Vulnerable Software", + "Repo": "", + "top_level_module": "marked", + "upgrades": [ + false, + "marked@0.3.18" + ], + "language": "js", + "RP Defect Type": "Product Bug" + }] +``` +

+
+ +### .NET Playbook + +If you are using .Net you should add required params: +* `lang`='dotnet' +* `sln_file`= # e.g `src/Project.sln` + +1. Build application +```bash +$ dotnet restore +``` +```js +$ docker run --name sast --rm -d \ + -e TASKS="snyk" \ + -v :/code \ + -e lang='dotnet' \ + -e sln_file= \ + -e SNYK_TOKEN= \ + sast:latest +``` + + + +### Java Playbook +1. Build application +```bash +$ mvn build +``` +> It should work for Java other build tools if not reach out to us. + +2. Run `Reapsaw` +### Scala Playbook +1. Before scan install application modules: +```bash +$ sbt build +``` \ No newline at end of file diff --git a/docs/ticket.png b/docs/ticket.png new file mode 100755 index 0000000..b020da9 Binary files /dev/null and b/docs/ticket.png differ diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100755 index 0000000..21cc11c --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +create_reports() +{ + +# ---------------------------------------------------------------- +# Generate consolidated report and send results to Report Portal +# ---------------------------------------------------------------- + RP=false + if [[ "${REPORT_PORTAL_URL}" && "${RP_TOKEN}" ]]; then + if [[ "${RP_PROJECT}" || "${PROJECT}" ]]; then + RP=true + else + echo "Please specify RP_PROJECT as environment variable for sending in Report Portal" + exit 0 + fi + + fi + generate_reports -r $RP + exit 0 +} + +{ + if [[ "${TASKS}" ]]; then + mkdir -p /code/reports + if [[ "${TASKS}" == "snyk" ]]; then + echo "Snyk selected..." + if [[ "${SNYK_TOKEN}" ]]; then + scan + create_reports + else + echo "Please specify SNYK_TOKEN as environment variable." + fi + elif [[ "${TASKS}" == *"cx" ]] || [[ "${TASKS}" == *"cx_commit" ]] || [[ "${TASKS}" == "cx"* ]]; then + if [[ "${CX_USER}" ]] && [[ "${CX_PASSWORD}" ]] && [[ "${CX_URL}" ]]; then + if [[ "${TASKS}" == *"snyk"* ]]; then + if [[ "${SNYK_TOKEN}" ]]; then + echo "Checkmarx and Snyk tools selected..." + scan + create_reports + else + echo "Please specify SNYK_TOKEN as environment variable." + fi + else + echo "Checkmarx selected..." + scan + create_reports + fi + else + echo "Please specify next environment variables to run Checkmarx: 'CX_USER', 'CX_PASSWORD' and 'CX_URL'." + fi + else + echo "Unsupported TASKS value: ${TASKS}" + echo "Possible options: \"cx,snyk\", \"cx\",\"snyk\"" + fi + else + echo "Please set TASKS environment variable" + fi + +} || { + echo "Something went wrong. Please verify docker run command." + exit 0 +} diff --git a/jobConfig.yaml b/jobConfig.yaml new file mode 100755 index 0000000..cff97d7 --- /dev/null +++ b/jobConfig.yaml @@ -0,0 +1,15 @@ +appName: barrage_slaves +authorizedUsers: rozumenkoa + +version: 1.0.0 + +scm: git +build: + type: tox + +staticAnalysis: + type: nope + +publish: + type: docker + version: 1.0.0 \ No newline at end of file diff --git a/licenses/licenses.csv b/licenses/licenses.csv new file mode 100755 index 0000000..8c9606e --- /dev/null +++ b/licenses/licenses.csv @@ -0,0 +1,21 @@ +package, license +bandit, Apache 2.0 +junit-xml, MIT +requests, Apache 2.0 +zeep, MIT +PyYAML, MIT +bs4, MIT +slackclient, MIT +jira, BSD +sonarqube-api, MIT +configparser, MIT +PyJWT, MIT +cryptography, BSD or Apache License Version 2.0 +xmltodict, MIT +junit2html, MIT +markdownify, BSD +snyk, Apache 2.0 +GitPython, BSD +reportportal-client, Apache 2.0 +repo-supervisor, MIT +maven, Apache 2.0 \ No newline at end of file diff --git a/reportportal-client-v.3.2.0.zip b/reportportal-client-v.3.2.0.zip new file mode 100755 index 0000000..4138128 Binary files /dev/null and b/reportportal-client-v.3.2.0.zip differ diff --git a/requirements.txt b/requirements.txt new file mode 100755 index 0000000..5db504c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,16 @@ +bandit==1.5.1 +junit-xml==1.8 +requests==2.21.0 +zeep==2.5.0 +PyYAML==3.12 +bs4==0.0.1 +slackclient==1.2.1 +jira==1.0.15 +sonarqube-api==1.3.1 +configparser==3.5.0 +PyJWT==1.6.4 +cryptography==2.2.2 +xmltodict==0.11.0 +junit2html==21 +markdownify==0.4.1 +node-semver==0.6.1 diff --git a/sast_controller/__init__.py b/sast_controller/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/bin/__init__.py b/sast_controller/bin/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/bin/config.py b/sast_controller/bin/config.py new file mode 100755 index 0000000..e66e10d --- /dev/null +++ b/sast_controller/bin/config.py @@ -0,0 +1,37 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + + +class Config(object): + """Class with default configuration parameters""" + EXCLUDED_TYPES = ["png", "zip", "css", "txt", "svg", "mp3", "wav", "less", "gif"] + EXCLUDED_PATH = ["node_modules", "config", "coverage", "dist_", "test", "report", "i18n"] + CODE_PATH = os.environ.get('CODE_PATH') or '/code' + + CX_PROJECT_NAME = os.environ.get('CX_PROJECT', os.environ.get('PROJECT', None)) + + CX_INCREMENTAL = os.environ.get('cx_incremental', 'true') + CX_FILES = os.environ.get('cx_files', None) + CX_PATH = os.environ.get("cx_path", None) + + TMP_FOLDER = '/tmp' + ZIP_NAME = 'cx.zip' + + # OUTPUT + CX_OUTPUT_PATH = os.environ.get('CX_OUTPUT_PATH') or '/code/reports/checkmarx-report.xml' + SNYK_OUTPUT_PATH = os.environ.get('SNYK_OUTPUT_PATH') or '/code/reports/snyk.json' + BUGBAR_ITEMS_OUTPUTPATH = os.environ.get('BUGBAR_ITEMS_OUTPUTPATH') or '/code/reports/bugbar_output.json' + JSON_OUTPUT_PATH = os.environ.get('JSON_OUTPUT_PATH') or '/code/reports/json_report.json' diff --git a/sast_controller/bin/generate_reports.py b/sast_controller/bin/generate_reports.py new file mode 100755 index 0000000..a9429d2 --- /dev/null +++ b/sast_controller/bin/generate_reports.py @@ -0,0 +1,200 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import os +import logging.config + +import re +import requests + +from junit_xml import TestSuite +from requests.exceptions import ConnectionError +from argparse import ArgumentParser, ArgumentTypeError + +from sast_controller.bin.config import Config + +from sast_controller.converters.CheckmarxReport import CheckmarxReport +from sast_controller.converters.SnykReport import SnykReport +from sast_controller.converters.Converter import Converter + +from sast_controller.drivers.rp.report_portal_writer import ReportPortalDataWriter +from sast_controller.drivers.rp.rp_portal_controller import ReportPortalService + +ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + +logger = logging.getLogger(__name__) + + +def str2bool(v): + """ + Convert from str to bool + :param v: + :return: + :raise: ArgumentTypeError if unable to convert to bool + """ + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + elif v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + else: + raise ArgumentTypeError('Boolean value expected.') + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument('-r', '--reportportal', type=str2bool, default=True, help="Write to report portal data or not") + parser.add_argument('-o', '--output', type=str, default='/code/reports/junit_report.xml', help="output file") + parser.add_argument('--json_output', type=str, default=Config.JSON_OUTPUT_PATH, help="json output file") + parser.add_argument('--cm_input', type=str, default=Config.CX_OUTPUT_PATH, help="input file") + parser.add_argument('--sn_input', type=str, default=Config.SNYK_OUTPUT_PATH, help="input file") + return parser.parse_args() + + +def check_connectivity(endpoint, error_msg='ConnectionError'): + """ + Check connectivity to endpoint + :param error_msg: + :param endpoint: + :return: + """ + error = False + response = None + try: + response = requests.get(endpoint, verify=False) + except ConnectionError: + error = True + try: + ip = requests.get('https://api.ipify.org').text + if error: + raise ConnectionError('{}. Endpoint: {}. Public IP: {}'.format(error_msg, endpoint, ip)) + elif response and response.status_code != 200: + raise ConnectionError('{}. Invalid response code : {}. Public IP: {}'.format(error_msg, + response.status_code, ip)) + except ConnectionError: + raise ConnectionError('Internet Connection Error!') + + +def send_items_to_rp(items, url=None, project=None, launch_name=None, token=None, launch_tags=None): + """ + Send findings to RP + :param items: + :param launch_name: + :param url: + :param project: + :param token: + :param launch_tags: + :return: + """ + if not url: + url = os.environ.get('REPORT_PORTAL_URL', '') + if not project: + project = os.environ.get('RP_PROJECT', os.environ.get('PROJECT', None)) + if not launch_name: + launch_name = os.environ.get('RP_LAUNCH_NAME', 'SAST scan') + if not token: + token = os.environ.get('RP_TOKEN', None) + logger.critical(f"REPORT_PORTAL_URL = {url}") + logger.critical(f"RP_PROJECT = {project}") + logger.critical(f"RP_LAUNCH_NAME = {launch_name}") + + if not (url and project and token): + raise Exception('Please specify REPORT_PORTAL_URL, RP_PROJECT and RP_TOKEN!') + + if not re.fullmatch('[a-zA-Z_\-0-9]+', project): + raise Exception('Only latin letters, numeric characters, ' + 'underscores and dashes are supported in RP project name.') + + check_connectivity(url, 'ConnectionError with Report Portal') + + rp_service = ReportPortalService(url, token) + print(rp_service.create_project(project)) + + rp_data_writer = ReportPortalDataWriter(endpoint=url, token=token, project=project, + launch_name=launch_name, launch_tags=launch_tags) + rp_data_writer.start_test() + for item in items: + rp_data_writer.start_test_item(item.issue, description=item.description, tags=item.get_tags(), + parameters=item.get_params()) + info_msg = [] + for msg in item.msgs: + if msg.status == 'ERROR': + rp_data_writer.test_item_message(msg.message, msg.status) + else: + info_msg.append(msg.message) + if item.attachments: + for attachment in item.attachments: + rp_data_writer.test_item_message(attachment['name'], 'INFO', attachment) + rp_data_writer.test_item_message('!!!MARKDOWN_MODE!!! %s ' % '\n\n'.join(set(info_msg)), 'INFO') + rp_data_writer.finish_test_item(item.defect_type_info) + if rp_data_writer.is_test_started(): + rp_data_writer.finish_test() + + +def get_models(args): + """ + :param args: argparse.Namespace + commandline arguments + :return: dict of BaseReport + """ + models = dict() + if os.path.isfile(args.cm_input): + models[args.cm_input] = CheckmarxReport + if os.path.isfile(args.sn_input): + models[args.sn_input] = SnykReport + return models + + +def generate_reports(args, models): + """ + Generate Report Portal, JUnit, JSON reports + :param args: argparse.Namespace + commandline arguments + :param models: dict of BaseReport + """ + repo = os.environ.get('REPO', '') + branch = os.environ.get('BRANCH', 'develop') + if repo.endswith('.git'): + repo = repo[:-len('.git')] + canonical = Converter(models, repo, branch) + ti = canonical.get_rp_items() + + if ti: + if args.reportportal: + send_items_to_rp(ti) + + junit_items = canonical.get_junit_items() + if os.path.exists(os.path.dirname(args.output)): + if junit_items: + with open(args.output, 'w') as f: + TestSuite.to_file(f, [junit_items], prettyprint=False) + if os.path.exists(os.path.dirname(args.json_output)): + json_items = canonical.get_json_items() + if json_items: + with open(args.json_output, 'w') as f: + json.dump(json_items, f, indent=4, sort_keys=True) + else: + logger.critical('There are no findings in report.') + + +def main(): + """Entrypoint for execution script""" + args = parse_args() + + models = get_models(args) + + generate_reports(args, models) + + +if __name__ == '__main__': + main() diff --git a/sast_controller/bin/json_to_jira.py b/sast_controller/bin/json_to_jira.py new file mode 100755 index 0000000..3ccd081 --- /dev/null +++ b/sast_controller/bin/json_to_jira.py @@ -0,0 +1,56 @@ +import json +from argparse import ArgumentParser + +from sast_controller.bin.config import Config +from sast_controller.converters import PRIORITY_MAPPING + +from sast_controller.drivers.jira.baseClient import JiraBaseClient + + +def send_to_jira(project, assignee, defect_type, json_items): + """ + Create JIRA tickets from JSON report + :param project: + :param assignee: + :param defect_type: + :param json_items: + """ + jira = JiraBaseClient() + + if jira and jira.check_project(project): + for item in json_items: + labels = [] + for _ in item['Tags']: + labels.extend(_.values()) + issue_data = { + 'project': {'key': project}, + 'summary': item.get('Jira Name', item['Issue Name']), + 'description': item['Jira Description'], + 'issuetype': {'name': defect_type}, + 'assignee': {'name': assignee}, + 'priority': {'name': PRIORITY_MAPPING.get(item['Issue Severity'], item['Issue Priority'])}, + 'labels': labels + } + jira.create_issue(issue_data) + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument('-p', '--jira_project', type=str, help="Jira project name") + parser.add_argument('-a', '--jira_assignee', type=str, help="Jira assignee") + parser.add_argument('-f', '--report_file', type=str, default=Config.JSON_OUTPUT_PATH, + help="Path to json report file") + parser.add_argument('--defect_type', type=str, default='Vulnerability', help="Jira tickets type") + return parser.parse_args() + + +def main(): + """Entry point for execution script""" + args = parse_args() + with open(args.report_file, 'r') as f: + report = json.loads(f.read()) + send_to_jira(args.jira_project, args.jira_assignee, args.defect_type, report) + + +if __name__ == '__main__': + main() diff --git a/sast_controller/bin/notifications.py b/sast_controller/bin/notifications.py new file mode 100755 index 0000000..09384d9 --- /dev/null +++ b/sast_controller/bin/notifications.py @@ -0,0 +1,166 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from math import fabs +from slackclient import SlackClient +from os import environ +from argparse import ArgumentParser +from sast_controller.drivers.rp.rp_portal_controller import ReportPortalService + +logger = logging.getLogger(__name__) + +launch_description_template = """*{project} "{name}" #{number} has been finished* +*Total Issues: {failed}* <{link}/ui/#{project}/launches/all%7Cpage.page=1&page.size=50&page.sort=start_time,number\ +%2CDESC/{launch_id}?page.page=1&page.size=50&page.sort=start_time%2CASC|link> +>Product Bugs: {product_bug} +>Validate With Dev Team: {automation_bug} +>System Issues: {system_issue} +>To Investigate: {to_investigate} +>No Defects: {no_defect}""" + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument('-r', '--reportportal', type=str, + default=environ.get("REPORT_PORTAL_URL", ""), + help="Report Portal URL") + parser.add_argument('-t', '--token', type=str, + default=environ.get("RP_TOKEN", ""), + help="Report Portal authorisation token") + parser.add_argument('-sc', '--slack_channel', type=str, + default=environ.get("SLACK_CHANNEL", ""), + help="Slack channel") + parser.add_argument('-st', '--slack_token', type=str, + default=environ.get("SLACK_TOKEN", ""), + help="Slack token") + parser.add_argument('-rs', '--reportportal_scan', type=str, + default=environ.get("RP_LAUNCH_NAME", ""), + help="Report portal scan name") + parser.add_argument('-rp', '--reportportal_project', type=str, + default=environ.get("RP_PROJECT", ""), + help="Report portal project name") + return parser.parse_args() + + +def send_slack_message(sc, message, channel, thread_ts=None): + """ + Send message to Slack channel + :param sc: + :param message: + :param channel: + :param thread_ts: + :return: + """ + return sc.api_call("chat.postMessage", channel=channel, + text=message, thread_ts=thread_ts) + + +def get_difference(current, previous): + """ + Get difference between two Report Portal launches + :param current: + :param previous: + :return: + """ + if current - previous == 0: + return "No difference" + dif = int(fabs(current - previous)) + percent = dif / max(current, previous) * 100 + if current - previous > 0: + return f"rate increased {round(percent, 2)}% (+{dif} item(s))" + else: + return f"rate decreased {round(percent, 2)}% (-{dif} item(s))" + + +def get_launch_info_msg(rp_link, project, launch_id, launch_info): + """ + Format launch info message using template + :param rp_link: + :param project: + :param launch_id: + :param launch_info: + :return: + """ + content = launch_info['content'][0] + number = content["number"] + name = content["name"] + status = content["status"] + statistics = content["statistics"] + message_ = launch_description_template.format(link=rp_link, project=project, + name=name, + number=number, + launch_id=launch_id, + status=status, + failed=statistics["executions"]["failed"], + product_bug=statistics["defects"]["product_bug"]["total"], + automation_bug=statistics["defects"]["automation_bug"]["total"], + system_issue=statistics["defects"]["system_issue"]["total"], + to_investigate=statistics["defects"]["to_investigate"]["total"], + no_defect=statistics["defects"]["no_defect"]["total"]) + return message_ + + +def get_compare_launches_msg(current_launch, previous_launch): + """ + Format compare launches message using template + :param current_launch: + :param previous_launch: + :return: + """ + defects = current_launch["statistics"]["defects"] + previous_defects = previous_launch["statistics"]["defects"] + message_ = "" + for defect in defects: + current = defects[defect]["total"] + previous = previous_defects[defect]["total"] + if (current - previous) != 0: + message_ = f"{message_}\n" \ + f">{defect.capitalize().replace('_', ' ')} " \ + f"{get_difference(current, previous)}" + if len(message_) == 0: + message_ = "No difference." + return "*Comparison with previous run:* " + message_ + + +def main(): + """Entrypoint for execution script""" + args = parse_args() + token = args.token + rp = ReportPortalService(args.reportportal, token) + try: + launch_id, response = rp.get_launch_info_by_number(args.reportportal_project, args.reportportal_scan, 1) + message = get_launch_info_msg(args.reportportal, args.reportportal_project, launch_id, response) + + if 'page' in response and response['page']['totalPages'] > 1: + second_launch_id, second_launch_response = rp.get_launch_info_by_number(args.reportportal_project, + args.reportportal_scan, 2) + message += "\n" + get_compare_launches_msg(response["content"][0], second_launch_response["content"][0]) + + sc = SlackClient(args.slack_token) + + if sc.rtm_connect(): + send_slack_message(sc, channel=args.slack_channel, message=message) + logger.info("Notification was sent.") + else: + logger.critical("Unable to connect to Slack.") + except Exception as ex: + print("Error occurred: [{}] {}".format(type(ex), ex)) + finally: + rp.close_session() + + +if __name__ == '__main__': + main() diff --git a/sast_controller/bin/scan.py b/sast_controller/bin/scan.py new file mode 100755 index 0000000..83cc3d9 --- /dev/null +++ b/sast_controller/bin/scan.py @@ -0,0 +1,220 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import fnmatch +import os +import traceback +from multiprocessing import Pool +from os import environ, path +import subprocess +from time import time +import logging + +from requests.exceptions import ConnectionError + +from sast_controller.drivers.cx.utils import zip_prj, write_file, zip_latest_files + +from sast_controller.drivers.cx import CxManager +from sast_controller.drivers.cx.CxManager import CxIncrementalScanException +from sast_controller.bin.config import Config +from sast_controller.bin.generate_reports import check_connectivity + + +def cx_scan(local_, prj_, inc_, f_path): + try: + report = CxManager.scan_project(local_path=local_, project=prj_, incremental_scan=inc_) + except CxIncrementalScanException: + report = CxManager.scan_project(local_path=local_, project=prj_, incremental_scan=False) + except Exception as e: + logging.error(str(e)) + return + if report and report.ScanResults: + data = str(report.ScanResults).replace("b'\\xef\\xbb\\xbf", "", 1) + data = data.replace("\\r\\n", "\n") + data = data.replace(">'", ">") + write_file(f_path, data) + + +def cx_exclude_rules(): + user_files_excl = Config.CX_FILES + if user_files_excl: + user_files_excl = user_files_excl.split(",") + else: + user_files_excl = [] + user_path_excl = Config.CX_PATH + if user_path_excl: + user_path_excl = user_path_excl.split(",") + else: + user_path_excl = [] + paths = Config.EXCLUDED_PATH + user_path_excl + logging.info("Checkmarx excluded paths: %s" % ", ".join(paths)) + types = Config.EXCLUDED_TYPES + user_files_excl + logging.info("Checkmarx excluded extensions: %s" % ", ".join(types)) + return paths, types + + +def run_checkmarx_test(project, path_, excluded_paths, excluded_types, incremental_scan, xml_path): + local_path = path.join(path_, Config.ZIP_NAME) + try: + zip_prj(path_, local_path, excluded_paths, excluded_types) + except FileNotFoundError: + logging.error('No such directory:', path_) + return + except Exception as e: + logging.error(str(e)) + return + cx_scan(local_path, project, incremental_scan, xml_path) + + +def cx_connectivity(): + if not (environ.get('OWNER') or environ.get('CX_USER')) or not ( + environ.get('PASSWORD') or environ.get('CX_PASSWORD')) or not environ.get('CX_URL'): + logging.critical( + f'Please specify Checkmarx data: CX_USER, CX_PASSWORD and CX_URL environment variables.') + else: + try: + check_connectivity(environ['CX_URL'], 'Unable to connect to Checkmarx') + return True + except ConnectionError as ex: + logging.error(str(ex)) + return False + + +def snyk_scan_dotnet(): + sln_file = environ.get('sln_file', '') + if sln_file: + cmd = f'snyk test --file={sln_file} --json'.split(" ") + raw_result = subprocess.Popen(cmd, encoding='utf-8', stdout=subprocess.PIPE).stdout.read() + return raw_result + + scanned_projects = dict() + for root, dirs, files in os.walk('.'): + for file_name in files: + if fnmatch.fnmatch(file_name, '*.sln'): + file_path = path.join(root, file_name) + cmd = f'snyk test --file={file_path} --json'.split(' ') + raw_result = subprocess.Popen(cmd, encoding='utf-8', stdout=subprocess.PIPE).stdout.read() + try: + json_result = json.loads(raw_result) + if not isinstance(json_result, list): + json_result = [json_result] + if 'error' in json_result: + logging.critical(json_result) + else: + for report in json_result: + scanned_projects[report['path']] = report + except Exception: + logging.error(raw_result) + snyk_report = [] + for report in scanned_projects.values(): + snyk_report.append(report) + return json.dumps(snyk_report) + + +def snyk_scan(): + lang_ = environ.get("lang", "") + + if not environ.get('SNYK_TOKEN'): + logging.error(f'Please specify SNYK_TOKEN environment variable.') + return + if lang_ == 'dotnet': + output = snyk_scan_dotnet() + else: + cmd = 'snyk test --json'.split(" ") + test = subprocess.Popen(cmd, encoding="utf-8", stdout=subprocess.PIPE) + output = test.stdout.read() + json_output = json.loads(output) + if "error" in json_output: + if 'Invalid auth token provided' in output: + logging.error(f"Invalid auth token provided. Please double check SNYK_TOKEN") + raise AssertionError('Invalid Snyk auth token provided') + logging.error(f"{output}") + if path.isfile('package-lock.json'): + subprocess.Popen(['/usr/bin/npm', "i", "--package-lock-only"], encoding="utf-8", + stdout=subprocess.PIPE).communicate() + logging.info("Try to run using package-lock.json") + cmd = 'snyk test --file=package-lock.json --json'.split(" ") + test = subprocess.Popen(cmd, encoding="utf-8", stdout=subprocess.PIPE) + output = test.stdout.read() + else: + logging.error(f'Unable to run Snyk') + return + + write_file(Config.SNYK_OUTPUT_PATH, output) + + +def copy_folder(source, dest): + """ + Copy folder content + + :param source: + :param dest: destination + :return: + """ + subprocess.Popen(["rsync", "-av", source, dest, "--exclude", "node_modules", + "--exclude", "test", "--exclude", "dist_integration", "--exclude", "dist_production", + "--exclude", "dist_staging"], encoding="utf-8", stdout=subprocess.PIPE).communicate() + + +def f(cmd): + """ + :param cmd: command to execute + :return: + """ + output = '' + ts_ = time() + source_path = path.join(Config.TMP_FOLDER, path.basename(Config.CODE_PATH)) + + if cmd == 'cx_commit': + if cx_connectivity(): + logging.info("Starting Checkmarx commit-base scan..") + copy_folder(Config.CODE_PATH, Config.TMP_FOLDER) + + cx_exclude_paths, cx_exclude_types = cx_exclude_rules() + zip_path = path.join(Config.TMP_FOLDER, Config.ZIP_NAME) + zip_latest_files(source_path, zip_path, cx_exclude_types, cx_exclude_paths) + if path.os.path.exists(zip_path): + logging.info('Start Scanning ..') + cx_scan(zip_path, Config.CX_PROJECT_NAME, Config.CX_INCREMENTAL, Config.CX_OUTPUT_PATH) + else: + logging.info("Based on excluded rules there are no files to scan") + + elif cmd == 'cx': + if cx_connectivity(): + cx_exclude_paths, cx_exclude_types = cx_exclude_rules() + project_name = Config.CX_PROJECT_NAME + cx_incremental_scan = Config.CX_INCREMENTAL + if project_name: + run_checkmarx_test(project_name, Config.CODE_PATH, cx_exclude_paths, cx_exclude_types, + cx_incremental_scan, Config.CX_OUTPUT_PATH) + elif cmd == 'snyk': + try: + snyk_scan() + except Exception: + traceback.print_exc() + print(cmd, ' fin ', time() - ts_) + return output + + +def main(): + """Entrypoint for execution script""" + p = Pool(5) + tasks = environ.get("TASKS", "cx,snyk").split(",") + logging.info(f'Your tasks: {tasks}') + r = p.map_async(f, tasks) + r.wait() + + +if __name__ == '__main__': + main() diff --git a/sast_controller/bug_bar/__init__.py b/sast_controller/bug_bar/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/bug_bar/bug_bar.py b/sast_controller/bug_bar/bug_bar.py new file mode 100755 index 0000000..094ba66 --- /dev/null +++ b/sast_controller/bug_bar/bug_bar.py @@ -0,0 +1,43 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import json + +import os + +try: + from json.decoder import JSONDecodeError +except ImportError: + JSONDecodeError = ValueError + +BUGBAR_FILE = os.environ.get('BUGBAR_FILE', '/tmp/bugbar/bugbar.json') + + +def read_json(file_path=BUGBAR_FILE, default=None): + """ + Read json file. Error while reading return default value + :param file_path: + :param default: + :return: + """ + try: + with open(file_path) as fd: + try: + return json.load(fd) + except JSONDecodeError as exception: + logging.warning(exception) + except (OSError, IOError) as exception: + logging.error(exception) + return default diff --git a/sast_controller/converters/BaseReport.py b/sast_controller/converters/BaseReport.py new file mode 100755 index 0000000..721ab75 --- /dev/null +++ b/sast_controller/converters/BaseReport.py @@ -0,0 +1,92 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import xml.etree.ElementTree as ET +from json import loads + +import os + +from sast_controller.bug_bar import bug_bar + + +class BaseReport(object): + """Canonical data model""" + _bug_bar = None + provider = "Reapsaw" + test_type = "sast" + severity_mapping = { + 'Critical': 'Blocker', + 'High': 'Critical', + 'Medium': 'Major', + 'Moderate': 'Minor', + 'Low': 'Minor', + 'Information': 'Trivial', + 'Info': 'Trivial' + } + + canonical_issue_model = { + "Issue Name": "", # Name issue will have in RP and Jira + "Issue Tool": "", + "Steps To Reproduce": "", + "Issue Priority": "", # Priority according to Bug Bar + "Issue Severity": "", # Risk Rating according to Bug Bar + "Issue Confidence": "", + "CWE": "", # in case available + "CVE": "", # in case available + "Overview": "", + "Recommendations": "", + "References": "", # where you can read more about issue(s) + "Paths": "", # paths in code affected by issue + "URLs": "", # URLs for issue description or affected by issue (in case of DAST) + "error_string": "", + "Description": "", + "Instances": "", + "Attachments": [], + "Tags": [], + "Jira Name": "", + "Repo": os.environ.get('REPO', '') + } + + def __init__(self, report): + """ + :param report: + raw report from SAST tool + """ + if report != 'sonar': + if 'xml' not in report: + with open(report) as f: + self.report = loads(f.read()) + else: + tree = ET.parse(report) + self.report = tree.getroot() + self.new_items = dict() + self.report = self._canonify() + + @property + def bug_bar(self): + if not self._bug_bar: + self._bug_bar = bug_bar.read_json() + return self._bug_bar + + @staticmethod + def get_git_path(repo, branch, file, line=''): + if not repo or not branch: + return '' + git_path = f'{repo}/blob/{branch}/{file}' + if line: + git_path += f"#L{line}" + return git_path + + def _canonify(self): + raise NotImplementedError diff --git a/sast_controller/converters/CheckmarxReport.py b/sast_controller/converters/CheckmarxReport.py new file mode 100755 index 0000000..64fde2a --- /dev/null +++ b/sast_controller/converters/CheckmarxReport.py @@ -0,0 +1,208 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from copy import deepcopy +import logging + +import os + +from sast_controller.bin import config +from sast_controller.drivers.cx import Checkmarx, utils +from sast_controller.extractors import vulnerability_info as vi +from sast_controller.converters.BaseReport import BaseReport + + +LOG = logging.getLogger(__name__) +utils.configure_logging(LOG) + +CX_PROJECT = config.Config().CX_PROJECT_NAME + +RP_DEFECT_TYPES = { + '0': 'To Investigate', + '1': 'No Defect', + '2': 'Product Bug', + '3': 'Product Bug', + '4': 'System Issue' +} + + +class CheckmarxReport(BaseReport): + """Canonical dta model implementation for Checkmarx""" + tool_name = "Checkmarx" + info_message = """ {} + GROUP: {} + CATEGORY: {} + *Code*: + ``` {} ```""" + instance = 'Line {} in file [{}|{}]' + recommendation = 'Please review and modify vulnerable code in line {} of {}' + + @staticmethod + def mask(text): + """ + Add masking for found secrets + :param text: + :return: + """ + hidden_part = max(int(len(text) * float(os.environ['MASK_PERCENT'])), len(text) - int( + os.environ['MAX_TOKEN_LEN'])) + hidden_token = "".join((text[:-hidden_part], "*" * (len(text) - hidden_part))) + return hidden_token + + @staticmethod + def _get_repo(repo, cx_client): + """ + Get Repo from Cx project Settingssa + :param repo: + :return: + """ + conf = cx_client.find_project_by_name(CX_PROJECT) + if conf and "SourceCodeSettings" in conf and conf.SourceCodeSettings.SourceOrigin == 'SourceControl': + repo = conf.SourceCodeSettings.SourceControlSetting.ServerName + if '@' in repo: + repo = f"https://{str(repo).split('@')[1].replace(':', '/')}" + return repo + + def _canonify(self): + # TODO: If no connectivity with CX should we generate report or not? + cx_client = Checkmarx.Checkmarx(CX_PROJECT) + report = [] + bugbar_vulns = set() + existing_bb = set() + jira_recommendations = {} + jira_desc = {} + repo = os.environ.get('REPO', '') + branch = os.environ.get('BRANCH', '') + if not repo: + self._get_repo(repo, cx_client) + for query in self.report: + group = query.attrib.get("group") + query_id = query.attrib.get("id") + cwe = query.attrib.get("cweId") + category = query.attrib.get("categories") + bugbar_vulns.add(query.attrib.get("name")) + if category and category.rfind(";"): + category_place = category.rfind(";") + 1 + category = category[category_place:] + existing_results = set() + for result in query: + for path_ in result: + result_file = result.attrib["FileName"] + name = query.attrib.get("name") + language = query.attrib.get("Language") + line = result.attrib.get("Line") + # do not append multiple results with the same file an line of code to report + if result_file + line not in existing_results: + existing_results.add(result_file + line) + else: + continue + result_state = result.attrib.get('state') + remark = result.attrib.get("Remark") + rp_defect_type = RP_DEFECT_TYPES[result_state] + priority = '' + severity = result.attrib.get("Severity") + deep_link = result.attrib.get("DeepLink") + git_link = self.get_git_path(repo, branch, result.attrib["FileName"]) + file_index = str(result_file).rfind("/") + test_name = str(result_file)[file_index + 1:] + issue = deepcopy(self.canonical_issue_model) + name, priority, severity, desc, rec = self.__get_from_bugbar(existing_bb, issue, name, priority, + severity, language) + + if not priority and not os.environ.get("send_without_bb", ""): + continue + issue['Issue Name'] = f"{name}.{result_file}" if issue.get('Grouped', True) else name + issue['Jira Name'] = name + issue['Issue Tool'] = self.tool_name + issue["Issue Severity"] = severity + issue["Issue Priority"] = priority + issue['Issue Confidence'] = 'Certain' + issue['error_string'] = f"{name} {cwe}\n{result_file}" + issue['Links'] = deep_link + issue['CWE'] = f"[CWE-{cwe}|{vi.CWE_LINK + cwe}]" + issue['Repo'] = repo + if rp_defect_type: + issue['RP Defect Type'] = rp_defect_type + issue['RP Comment'] = remark + if not rec: + try: + if query_id not in jira_recommendations: + jira_recommendations[query_id] = utils.get_jira_recommendations(cx_client, query_id) + + issue['Recommendations'] = jira_recommendations[query_id] + except Exception: + LOG.info(f'Could not parse issue recommendation for this query id {query_id}') + # use hardcoded recommendations in case of any issues with parsing html returned by Cx + issue['Recommendations'] = self.recommendation.format(line, test_name) + jira_recommendations[query_id] = self.recommendation.format(line, test_name) + else: + issue['Recommendations'] = rec + issue["Tags"].extend([{"TestType": self.test_type}, + {"Provider": self.provider}, + {"Tool": self.tool_name}]) + place = self.instance.format(line, result_file, deep_link) + issue['References'] = place + + if git_link: + issue['Instances'] = f"File {git_link}" + else: + issue['Instances'] = f"File {result_file}" + try: + # TODO: remove hardcoded values - use config instead + if name == 'Sensitive Information Disclosure': + snippet = self.mask(path_[0].find("Snippet").find("Line").find("Code").text.strip()) + else: + snippet = path_[0].find("Snippet").find("Line").find("Code").text.strip() + except AttributeError: + snippet = path_[0].find("Name").text.strip() + issue['Snippet'] = snippet + if not desc: + issue["Description"] = self.info_message.format(name, group, category, snippet[:100]) + try: + if cwe not in jira_desc: + jira_desc[cwe] = utils.get_jira_overview(cx_client, cwe) + issue["Description"] = self.info_message.format(jira_desc[cwe], group, category, + snippet[:100]) + except Exception: + LOG.info(f'Could not parse issue overview for this cwe id {cwe}') + issue["Description"] = self.info_message.format(name, group, category, snippet[:100]) + jira_desc[cwe] = name + else: + issue["Description"] = self.info_message.format(desc, group, category, + snippet[:100]) + report.append(issue) + print("Checkmarx report generation finished") + self.new_items[self.tool_name] = bugbar_vulns.difference(existing_bb) + return report + + def __get_from_bugbar(self, existing_bb, issue, name, priority, severity, lang): + desc = '' + recommendation = '' + for bug_bar_issue in self.bug_bar: + items = [item.lower() for item in self.bug_bar[bug_bar_issue]['cxsast'].split(';')] + if name.lower() in items: + existing_bb.add(name) + name = bug_bar_issue + if self.bug_bar[bug_bar_issue]['is_issue'] == 'FALSE': + continue + severity = self.bug_bar[bug_bar_issue]['risk_rating'] + priority = self.bug_bar[bug_bar_issue]['jira_priority'] + if lang.lower() in self.bug_bar[bug_bar_issue]['description']: + desc = self.bug_bar[bug_bar_issue]['description'][lang.lower()] + if lang.lower() in self.bug_bar[bug_bar_issue]['recommendation']: + recommendation = self.bug_bar[bug_bar_issue]['recommendation'][lang.lower()] + + if self.bug_bar[bug_bar_issue].get('grouped', '') == 'FALSE': + issue['Grouped'] = False + return name, priority, severity, desc, recommendation diff --git a/sast_controller/converters/Converter.py b/sast_controller/converters/Converter.py new file mode 100755 index 0000000..def5b10 --- /dev/null +++ b/sast_controller/converters/Converter.py @@ -0,0 +1,254 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from json import dumps +from junit_xml import TestSuite, TestCase + +from sast_controller.drivers.rp.TestItem import TestItem +from sast_controller.drivers.rp.TestMessage import TestMessage +from sast_controller.converters import PRIORITY_MAPPING, RP_DEFECT_TYPE_PRIORITY, SEVERITY_MAPPING + + +class Converter(object): + """Convert report to canonical data model""" + + def __init__(self, models, repo='', branch=''): + self.report = [] + self.test_items = [] + self.json_items = [] + self.report_name = 'SAST Scan' + for report_file, model in models.items(): + model.repo = repo + model.branch = branch + model_ = model(report_file) + self.report += model_.report + + def get_rp_items(self): + """ + Convert from canonical data model to Report portal test items + :return: + """ + test_items_ = [] + aggregated_issues = dict() + # aggregate issues by issue name and file that contains the issue + for item in self.report: + tags = item.get('Tags', None) + issue_name = item['Issue Name'] + if item.get('Grouped', True): + issue_key = '%s_%s' % (issue_name, item['Instances']) + else: + issue_key = issue_name + if {'Tool': 'Snyk'} in tags: + issue_key = item.get('top_level_module') + if issue_key in aggregated_issues: + aggregated_issues[issue_key].append(item) + else: + aggregated_issues[issue_key] = [item] + for issues in aggregated_issues.values(): + tags = issues[0].get('Tags', None) + severity = issues[0]["Issue Severity"] + priority = issues[0]["Issue Priority"] + name = issues[0]['Issue Name'] + if {'Tool': 'Snyk'} in tags: + pkg_name = '.'.join(name.split('.')[1:]) + name = f"Vulnerable Software Version.{pkg_name}" + issue_confidence = issues[0]['Issue Confidence'] + attachments = [] + tools = set() + recommendations = '' + rp_defect_info = self.get_rp_defect_info(issues) + error_msgs = set() + info_msgs = set() + description = set() + instances = set() + steps = set() + paths = set() + refs = set() + params = None + if not name: + continue + if not priority: + priority = PRIORITY_MAPPING[severity] + severity_val = SEVERITY_MAPPING[issues[0]["Issue Severity"]] + issue = None + for issue in issues: + if issue['Recommendations'] and issue['Recommendations'] not in recommendations: + recommendations += f"\n{issue['Recommendations']}" + if issue['Description']: + description.add(issue['Description']) + if issue['Attachments']: + attachments.extend(issue['Attachments']) + if issue['Instances']: + instances.add(issue['Instances']) + if issue['Repo'] and not any([_ for _ in instances if issue['Repo'] in _]): + instances.add(f"Repo {issue['Repo']}") + if issue['References']: + refs.add(issue['References']) + if issue['Steps To Reproduce']: + steps.add(issue['Steps To Reproduce']) + if issue['Paths']: + paths.add(issue['Paths']) + if severity_val < SEVERITY_MAPPING[issue["Issue Severity"]]: + severity = issue["Issue Severity"] + severity_val = SEVERITY_MAPPING[issue["Issue Severity"]] + if issue["Issue Priority"]: + priority = issue["Issue Priority"] + else: + priority = PRIORITY_MAPPING[severity] + if issue['Jira Name']: + params = {"Name": issue['Jira Name']} + tools.add(issue['Issue Tool']) + error_msgs.add(issue['error_string']) + info_msgs.add(self.get_info_msg(issue, info_msgs)) + + ti = self.create_ti(name, tools, priority, severity, steps, + issue_confidence, recommendations, paths, refs, + error_msgs, info_msgs, description, instances, tags, + attachments, params, rp_defect_info) + test_items_.append(ti) + if issue: + issue['Jira Description'] = self.get_jira_description(ti) + self.json_items.append(issue) + + self.test_items = test_items_ + return test_items_ + + @staticmethod + def get_jira_description(ti): + for msg in ti.msgs: + if msg.status == 'INFO': + return msg.message + return ti.description + + @staticmethod + def get_rp_defect_info(issues): + defect_type = issues[0].get('RP Defect Type', 'To Investigate') + defect_type_comment = issues[0].get('RP Comment', '') + for issue in issues: + current_defect_type = issue.get('RP Defect Type', 'To Investigate') + if current_defect_type != defect_type: + if defect_type is None: + defect_type = current_defect_type + defect_type_comment = issue.get('RP Comment', '') + else: + if current_defect_type is not None: + if RP_DEFECT_TYPE_PRIORITY[current_defect_type] < RP_DEFECT_TYPE_PRIORITY[defect_type]: + defect_type = current_defect_type + defect_type_comment = issue.get('RP Comment', '') + return { + 'RP Defect Type': defect_type, + 'RP Comment': defect_type_comment + } + + def get_junit_items(self, new_items=''): + """ + Convert from canonical data model to junit test suit + :param new_items: + :return: + """ + test_cases = [] + if not self.test_items and not new_items: + raise ValueError('There it no test items') + data = self.test_items if not new_items else new_items + + for item in data: + tc = TestCase(item.issue, classname=item.confidence) + message = '' + for msg in item.msgs: + message = message + msg.message + "\n\n" + tc.add_error_info(message=message, error_type=item.severity) + test_cases.append(tc) + ts = TestSuite(self.report_name, test_cases) + return ts + + def get_json_items(self): + """ + Returns items after conversion in canonical format as JSON report + :return: + """ + return self.json_items + + def get_raw_data(self): + return dumps(self.report, indent=2) + + @staticmethod + def create_ti(name, tools, priority, severity, steps, confidence, recommendations, paths, refs, error_msgs, + info_msgs, description, instances, tags=None, attachments=None, params=None, + rp_defect_info=None) -> TestItem: + """Create test item with parameters""" + issue_description = "" + info_log = "" + + if len(instances) > 0: + info_log += "h3.*Instances:*\n{}\n".format('\n'.join(sorted(instances))) + + if len(recommendations) > 0: + issue_description += "%s: %s\n" % ('Recommendations', recommendations) + info_log += "h3.*Recommendations:*\n{}\n".format(recommendations) + + if len(steps) > 0: + info_log += "h3.*Steps to Reproduce:*\n{}\n".format('\n'.join(sorted(steps))) + + if len(paths) > 0: + issue_description += "%s: %s\n" % ('Paths', '\n'.join(sorted(paths))) + if len(refs) > 0: + issue_description += "%s: %s\n" % ('References', '\n'.join(sorted(refs))) + + if any(info_msgs): + info_log += "h3.*Overview:*\n" + for _ in sorted(info_msgs): + if _: + info_log += _ + '\n' + + test_item = TestItem(name, tool=tools.pop(), severity=severity, confidence=confidence, + description=issue_description, priority=priority, attachments=attachments, + defect_type_info=rp_defect_info) + for _ in tools: + test_item.add_tag("Tool", _) + if tags and isinstance(tags, list): + for item in tags: + (k, v), = item.items() + test_item.add_tag(k, v) + for _ in error_msgs: + test_item.add_message(TestMessage(_, "ERROR")) + + test_item.add_message(TestMessage(info_log, "INFO")) + if params: + for k, v in params.items(): + test_item.add_param(k, v) + return test_item + + @staticmethod + def get_info_msg(issue, exists_msgs): + """ + Convert canonical date model to log message + :param issue: + :param exists_msgs: + :return: + """ + issue_name = issue['Issue Name'] + if '.' in issue['Issue Name']: + issue_name = issue['Issue Name'].split('.')[0] + info_message = '{panel:title=%s}' % issue_name + for field in ['Description', 'References']: + if issue.get(field, ''): + info_message += "*%s*: \n%s\n\n" % (field, issue[field].strip()) + if issue.get('Paths', ''): + paths = '\n'.join(sorted(issue['Paths'].strip().split('\n\n'))) + info_message += "*Paths*: \n%s\n\n" % paths + info_message += '{panel}' + for _ in exists_msgs: + if info_message in _: + return '' + return info_message diff --git a/sast_controller/converters/SnykReport.py b/sast_controller/converters/SnykReport.py new file mode 100755 index 0000000..dfe0e54 --- /dev/null +++ b/sast_controller/converters/SnykReport.py @@ -0,0 +1,218 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from copy import deepcopy +import json +from re import sub + +import semver + +from sast_controller.converters.BaseReport import BaseReport +from sast_controller.converters import SEVERITY_MAPPING, SEVERITY_MAPPING_TYPE + + +class SnykReport(BaseReport): + """Canonical dta model implementation for Snyk""" + + tool_name = "Snyk" + + @staticmethod + def max_severity(item1, item2): + if SEVERITY_MAPPING.get(item1['Issue Severity']) > SEVERITY_MAPPING.get(item2['Issue Severity']): + return item1['Issue Severity'] + return item2['Issue Severity'] + + @staticmethod + def get_exact_top_deps(): + """ + Returns top level packages that have exact versions (non-range) + :return: + """ + # Note, this currently works only for NPM and only when running from folder containing package.json file + try: + with open('package.json', 'r') as package_json: + dependencies = json.loads(package_json.read()).get('dependencies', []) + except FileNotFoundError: + return [] + fixed_dependencies = [] + for dependency in dependencies: + if not any(symbol in dependencies[dependency] for symbol in ['^', '~', '>', '<']): + fixed_dependencies.append('@'.join([dependency, dependencies[dependency]])) + return fixed_dependencies + + @staticmethod + def filter_vulnerable_paths(vulnerabilities): + fixed_deps = SnykReport.get_exact_top_deps() + if not fixed_deps: + return vulnerabilities + result = [] + for vulnerability in vulnerabilities: + vuln_paths = vulnerability['Paths'].copy() + for vuln_path in vuln_paths: + if (any(fixed_dependency in vuln_path.split('>')[2:] for fixed_dependency in fixed_deps) + and 'Re-install' in vulnerability['Recommendations']): + vulnerability['Paths'].remove(vuln_path) + if vulnerability['Paths']: + result.append(vulnerability) + return result + + def group_vulnerabilities(self, vulnerabilities): + """ + Groups vulnerabilities by top level module and recommendations + :return: + """ + grouped_issues = {} + for vuln in vulnerabilities: + converted_vuln = self.get_item(vuln) + if not converted_vuln['upgrades'] and converted_vuln['language'] != 'dotnet': + continue + # do not report issues which don't contain upgrades for top modules, except javascript until + # we know from Snyk what we can do for such vulnerable modules + if (converted_vuln['upgrades'] and converted_vuln['upgrades'][1] == converted_vuln['from'][1] + and converted_vuln['language'] != 'js'): + continue + key = (converted_vuln['top_level_module'], converted_vuln['Recommendations']) + if key not in grouped_issues: + grouped_issues[key] = converted_vuln + else: + grouped_issues[key]['Paths'] = grouped_issues[key]['Paths'].union(converted_vuln['Paths']) + grouped_issues[key]['Issue Severity'] = self.max_severity(grouped_issues[key], + converted_vuln) + grouped_issues[key]['RP Defect Type'] = SEVERITY_MAPPING_TYPE[grouped_issues[key]['Issue Severity']] + return list(grouped_issues.values()) + + @staticmethod + def update_recommendations(vulnerabilities): + recommendations = dict() + for vuln in vulnerabilities: + # dotnet issues don't have upgrades + if not vuln['upgrades']: + continue + if vuln['from'][1] not in recommendations: + recommendations[vuln['from'][1]] = vuln['upgrades'][1] + else: + + module = vuln['upgrades'][1][:vuln['upgrades'][1].rfind('@')] + if semver.compare(vuln['upgrades'][1].split('@')[-1], recommendations[vuln['from'][1]].split('@')[-1], + loose=True) == -1: + max_version = recommendations[vuln['from'][1]].split('@')[-1] + else: + max_version = vuln['upgrades'][1].split('@')[-1] + recommendations[vuln['from'][1]] = '@'.join([module, max_version]) + for vuln in vulnerabilities: + if vuln['language'] == 'dotnet': + vuln['Recommendations'] = f'Upgrade `{vuln["top_level_module"]}` to the latest compatible version.' + continue + if semver.compare(vuln['from'][1].split("@")[-1], recommendations[vuln['from'][1]].split("@")[-1], + loose=True) == -1: + vuln['Recommendations'] = (f'Upgrade `{vuln["top_level_module"]}` ' + f'to version {recommendations[vuln["from"][1]].split("@")[-1]} or higher') + else: + vuln['Recommendations'] = (f'Your dependencies are out of date. Please remove your `node_modules` ' + f'directory and lock file, run `npm install` and commit new lock file to ' + f'your repo. Note, this will likely make a lot of changes to lock file.') + + def _canonify(self): + if isinstance(self.report, list): + vulnerabilities = [] + for project_report in self.report: + vulnerabilities.extend(project_report.get('vulnerabilities', [])) + else: + vulnerabilities = self.report.get('vulnerabilities', []) + grouped_vulnerabilites = self.group_vulnerabilities(vulnerabilities) + SnykReport.update_recommendations(grouped_vulnerabilites) + filtered_vulns = self.filter_vulnerable_paths(grouped_vulnerabilites) + for vulnerability in filtered_vulns: + vulnerability['Paths'] = '\n\n'.join(vulnerability['Paths']) + del vulnerability['from'] + return filtered_vulns + + def get_item(self, vulnerability): + """ + Convert to canonical data model + :param vulnerability: + :return: + """ + smileys = [':\)', ':\(', ':P', ':D', + ';\)', '\(y\)', '\(n\)', '\(on\)', '\(off\)', + '\(!\)', '\(\*\)', '\(\*r\)', '\(\*g\)', '\(\*b\)', + '\(\*y\)', '\(/\)', '\(x\)', '\(i\)', '\(\+\)', + '\(-\)', '\(\?\)', '<3', ''.join(vulnerability['from']) + issue_base = deepcopy(self.canonical_issue_model) + issue_base['Paths'] = set() + issue_base['Issue Tool'] = self.tool_name + issue_base['Issue Priority'] = priority + issue_base['Issue Severity'] = vulnerability['severity'].title() + + description = vulnerability['description'].split('##') + remediation = " ".join([s for s in description if 'Remediation' in s]) + remediation = remediation.replace('Remediation', '*Remediation:*') + overview = " ".join([s for s in description if 'Overview' in s]) + + issue_base['Description'] = f"*Vulnerable Package:* " \ + f"{vulnerability['packageName']}\n*Current Version:* " \ + f"{vulnerability['version']}\n*Vulnerable Version(s):* " \ + f"{vulnerable_versions}\n " \ + f"\n{remediation}\n " \ + f"{overview}\n " + + issue_base['Paths'].add(vulnerable_path) + issue_base['Description'] = issue_base['Description'].replace("##", "").replace("**Example:**", '') + issue_base['Description'] = sub(r'```((.|\n)*)```', '', issue_base['Description']).replace(r'', '') + issue_base['Description'] = sub(r'|'.join(smileys), '', issue_base['Description']) + try: + issue_base['Recommendations'] = f"Package {vulnerability['from'][1]} " \ + f"contains known vulnerabilities" + issue_base['Issue Name'] = f'{vulnerability["title"]}.{vulnerability["from"][1]}' + except Exception: + issue_base['Recommendations'] = f"Package {vulnerability['packageName']}:{vulnerability['version']} " \ + f"contains known vulnerabilities" + issue_base['Issue Name'] = f'{vulnerability["title"]}.' \ + f'{vulnerability["packageName"]}:{vulnerability["version"]}' + if 'Remediation' in vulnerability.get("description"): + issue_base['Recommendations'] = \ + vulnerability["description"].split("Remediation")[1].split("References")[0].replace("#", "").strip() + issue_base['References'] = vulnerability["description"].split("Remediation")[1].split("References")[1] + try: + issue_base['CWE'] = vulnerability['identifiers']['CWE'][0] + except Exception: + pass + separator_index = vulnerability["from"][1].rfind('@') + top_level_module = vulnerability["from"][1][:separator_index] + issue_base['top_level_module'] = top_level_module + issue_base['error_string'] = top_level_module + issue_base['upgrades'] = vulnerability.get('upgradePath') + issue_base['from'] = vulnerability.get('from') + issue_base['language'] = vulnerability.get('language') + issue_base['Issue Confidence'] = 'Certain' + issue_base["Tags"].extend([{"TestType": self.test_type}, + {"Provider": self.provider}, + {"Tool": self.tool_name}]) + issue_base['Jira Name'] = 'Vulnerable Software' + issue_base['Instances'] = issue_base['top_level_module'] + issue_base['Issue Name'] = f"{vulnerability['title']}.{issue_base['top_level_module']}" + + return issue_base diff --git a/sast_controller/converters/__init__.py b/sast_controller/converters/__init__.py new file mode 100755 index 0000000..062854b --- /dev/null +++ b/sast_controller/converters/__init__.py @@ -0,0 +1,60 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SEVERITY_MAPPING = { + 'Critical': 4, + 'High': 3, + 'Medium': 2, + 'Moderate': 2, + 'Low': 1, + 'Information': 0, + 'Info': 0 +} + +SEVERITY_MAPPING_TYPE = { + 'Critical': 'Product Bug', + 'High': 'Product Bug', + 'Medium': 'No Defect', + 'Moderate': 'No Defect', + 'Low': 'No Defect', + 'Information': 'No Defect', + 'Info': 'No Defect' +} + +PRIORITY_MAPPING = { + 'Critical': 'Blocker', + 'High': 'Critical', + 'Medium': 'Major', + 'Moderate': 'Major', + 'Low': 'Minor', + 'Information': 'Trivial', +} + +STATUS_MAPPING = { + 'Blocker': 'Critical', + 'Critical': 'Critical', + 'Major': 'High', + 'Minor': 'Medium', + 'Info': 'Information', +} + +SONAR_RULES_LINK = 'https://rules.sonarsource.com/javascript/type/Vulnerability/RSPEC-{}' +SONAR_PROJECT_LINK = '{}:{}/project/issues?id={}&types=VULNERABILITY' + +RP_DEFECT_TYPE_PRIORITY = { + 'Product Bug': 0, + 'System Issue': 1, + 'To Investigate': 2, + 'No Defect': 3 +} diff --git a/sast_controller/drivers/__init__.py b/sast_controller/drivers/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/drivers/connect_utils.py b/sast_controller/drivers/connect_utils.py new file mode 100755 index 0000000..f74f68e --- /dev/null +++ b/sast_controller/drivers/connect_utils.py @@ -0,0 +1,44 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import contextlib +import requests +import urllib3 +import warnings + +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + +try: + from functools import partialmethod +except ImportError: + from functools import partial + + class partialmethod(partial): + def __get__(self, instance, owner): + if instance is None: + return self + + return partial(self.func, instance, *(self.args or ()), + **(self.keywords or {})) + + +@contextlib.contextmanager +def no_ssl_verification(): + """Method that disable SSL verification""" + old_request = requests.Session.request + requests.Session.request = partialmethod(old_request, verify=False) + warnings.filterwarnings('ignore', 'Unverified HTTPS request') + yield + warnings.resetwarnings() + requests.Session.request = old_request diff --git a/sast_controller/drivers/cx/Checkmarx.py b/sast_controller/drivers/cx/Checkmarx.py new file mode 100755 index 0000000..e3673be --- /dev/null +++ b/sast_controller/drivers/cx/Checkmarx.py @@ -0,0 +1,291 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import string +import random +import os + +from sast_controller.drivers.cx.CheckmarxConnection import CheckmarxConnection +from sast_controller.drivers.cx.utils import extract_zip, configure_logging + +ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + +PRESENT_ID = 0 + + +def id_generator(size=6, chars=string.ascii_uppercase + string.digits): + return ''.join(random.choice(chars) for _ in range(size)) + + +class Checkmarx(object): + """Checkmarx connector wrapper""" + + ReportFormat = 'XML' + + def __init__(self, project=None): + self.logger = configure_logging(logging.getLogger(__name__)) + self.checkmarx_url = os.environ.get('CX_URL') + self.owner = os.environ.get('CX_USER', '') + if not self.owner: + self.owner = os.environ.get('OWNER') + self.password = os.environ.get('CX_PASSWORD', '') + if not self.password: + self.password = os.environ.get('PASSWORD') + self.project = project + + connection = CheckmarxConnection(self.checkmarx_url, self.owner, + self.password) + sdk_client = connection.get_client() + session_id = connection.session_id + self.version = "{http://Checkmarx.com/%s}" % (os.environ.get('CX_WSDL_VERSION')) + if not session_id: + self.client = None + self.session = None + self.valid = False + + else: + # default client is SDK type client + self.client = sdk_client + self.sdk_client = sdk_client + self.web_portal_client = connection.get_client('WebPortal') + self.session = session_id + self.valid = True + project_settings = self.find_project_by_name(project) + self.project_config = project_settings.ProjectSettings if project_settings else None + self.group_details = self.get_associated_groups() + self.scan_path = self.group_details['GroupList']['Group'][0]['GroupName'] + self.associated_group_id = self.group_details['GroupList']['Group'][0]['ID'] + + def get_type(self, cx_type, *args): + """ + Get client type + :param cx_type: + :param args: + :return: + """ + return self.client.get_type('%s%s' % (self.version, cx_type))(*args) + + def report_type(self): + return self.get_type('CxWSReportType', self.ReportFormat) + + def report_request(self, long_scan_id): + """ + Report request + :param long_scan_id: + :return: + """ + return self.get_type('CxWSReportRequest', self.report_type(), + long_scan_id) + + def get_result_description(self, scan_id, path_id): + """ + Get result description + :param scan_id: + :param path_id: + :return: + """ + res = self.web_portal_client.service.GetResultDescription(self.session, scan_id, path_id) + if res['IsSuccesfull']: + return res['ResultDescription'] + + def get_query_description_by_query_id(self, query_id): + """ + Get query description by query id + :param query_id: + :return: + """ + res = self.web_portal_client.service.GetQueryDescriptionByQueryId(self.session, query_id) + if res['IsSuccesfull']: + return res['QueryDescription'] + + def get_cwe_description(self, cwe_id): + """ + Get Checkmarx description by CWE id + :param cwe_id: + :return: + """ + res = self.web_portal_client.service.GetCWEDescription(self.session, cwe_id) + if res['IsSuccesfull']: + return res['QueryDescription'] + + def run_scan(self, local_path=None, incremental_scan=False, long_project_id=None): + """ + Run Checkmarx scan + :param local_path: + :param incremental_scan: + :param long_project_id: + :return: + """ + args = self.client.get_type(f'{self.version}CliScanArgs')() + if not long_project_id: + if self.project_config: + print('Existing Project:', self.project) + args.PrjSettings = self.project_config + args.IsIncremental = incremental_scan + else: + print('New project:', self.project) + self.set_new_project_config(args, self.project) + else: + conf = self.client.service.GetProjectConfiguration( + self.session, long_project_id).ProjectConfig + if not conf: + self.logger.critical(f"GetProjectConfiguration Failed: {conf.ErrorMessage}") + return False + args.PrjSettings = conf.ProjectSettings + args.PrjSettings.PresetID = PRESENT_ID + args.IsIncremental = incremental_scan + args.PrjSettings.ProjectName = "%s\%s" % (self.scan_path, self.project) + args.SrcCodeSettings = self.get_type('SourceCodeSettings') + args.SrcCodeSettings.SourceOrigin = 'Local' + args.SrcCodeSettings.UserCredentials = None + args.SrcCodeSettings.PathList = None + args.SrcCodeSettings.SourceControlSetting = None + args.SrcCodeSettings.PackagedCode = self.get_type('LocalCodeContainer') + args.SrcCodeSettings.PackagedCode.FileName = "@%s\%s.zip" % (self.scan_path, id_generator()) + args.SrcCodeSettings.PackagedCode.ZippedFile = extract_zip(local_path) + args.SrcCodeSettings.SourceFilterLists = self.get_type('SourceFilterPatterns') + args.SrcCodeSettings.SourceFilterLists.ExcludeFilesPatterns = "" + args.SrcCodeSettings.SourceFilterLists.ExcludeFoldersPatterns = "" + args.Comment = 'Running from code' + args.IsPrivateScan = False + args.ClientOrigin = 'SDK' + args.IgnoreScanWithUnchangedCode = False + args.SrcCodeSettings.SourceFilterLists.ExcludeFilesPatterns = "" + args.SrcCodeSettings.SourceFilterLists.ExcludeFoldersPatterns = "" + args.SrcCodeSettings.SourceControlCommandId = 0 + return self.client.service.Scan(self.session, args) + + def create_scan_report(self, long_scan_id): + """ + Create scan report by long scan id + :param long_scan_id: + :return: + """ + return self.client.service.CreateScanReport( + self.session, self.report_request(long_scan_id)) + + def get_scan_report_status(self, request_id): + """ + Get scanning status + :param request_id: + :return: + """ + return self.client.service.GetScanReportStatus(self.session, request_id) + + def get_status_of_single_run(self, run_id): + """ + Get status for single run + :param run_id: + :return: + """ + return self.client.service.GetStatusOfSingleScan(self.session, run_id) + + def get_project_scanned_display_data(self): + """ + Get project scanned display data + :return: + """ + data = self.client.service.GetProjectScannedDisplayData(self.session) + return data.ProjectScannedList.ProjectScannedDisplayData + + def get_scan_report(self, request_id): + """ + Ger scan report by request id + :param request_id: + :return: + """ + return self.client.service.GetScanReport(self.session, request_id) + + def get_associated_groups(self): + """ + :return: + { + 'IsSuccesfull': True, + 'ErrorMessage': None, + 'GroupList': { + 'Group': [ + { + 'GroupName': 'CxServer\\Z\\Org\\Group', + 'ID': uuid, + 'Type': 'Team', + 'Guid': None, + 'FullPath': None, + 'Path': None + } + ] + } + } + """ + return self.client.service.GetAssociatedGroupsList(self.session) + + def get_projects_display_data(self): + return self.client.service.GetProjectsDisplayData(self.session).projectList.ProjectDisplayData + + def set_project_config(self, args, conf): + """ + Set config for project + :param args: + :param conf: + """ + args.PrjSettings = conf.ProjectConfig.ProjectSettings + project_name = args.PrjSettings.ProjectName + args.PrjSettings.PresetID = 0 + args.PrjSettings.ProjectName = self.scan_path + "\\" + project_name + + def set_new_project_config(self, args, project): + """ + Set new config for project + :param args: + :param project: + """ + self.logger.info("Create new project") + args.PrjSettings = self.get_type('ProjectSettings') + args.PrjSettings.projectID = 0 + args.PrjSettings.TaskId = 0 + args.PrjSettings.PresetID = PRESENT_ID + args.PrjSettings.AssociatedGroupID = self.associated_group_id + args.PrjSettings.ScanConfigurationID = '1' + args.PrjSettings.Description = project + args.PrjSettings.Owner = self.owner + args.PrjSettings.IsPublic = True + args.PrjSettings.OpenSourceAnalysisOrigin = 'LocalPath' + args.PrjSettings.ProjectName = self.scan_path + "\\" + project + args.IsIncremental = False + + def find_project_by_name(self, project): + """ + { + 'projectID': 000, + 'ProjectName': 'demo_project', + 'PresetID': 0000000, + 'TaskId': 000, + 'AssociatedGroupID': 'UUID', + 'ScanConfigurationID': 1, + 'Description': None, + 'Owner': 'some@example.com', + 'IsPublic': True, + 'OpenSourceSettings': None, + 'OpenSourceAnalysisOrigin': 'LocalPath' + } + """ + projects = self.get_projects_display_data() + for prj in projects: + if prj.ProjectName.lower() == project.lower(): + long_project_id = prj.projectID + conf = self.client.service.GetProjectConfiguration(self.session, long_project_id).ProjectConfig + if conf.ProjectSettings: + return conf + else: + self.logger.critical(f"GetProjectConfiguration Failed: {conf.ErrorMessage}") diff --git a/sast_controller/drivers/cx/CheckmarxConnection.py b/sast_controller/drivers/cx/CheckmarxConnection.py new file mode 100755 index 0000000..371f6af --- /dev/null +++ b/sast_controller/drivers/cx/CheckmarxConnection.py @@ -0,0 +1,85 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import requests +import logging + +from requests import Session, ConnectionError +from zeep import Client +from zeep.transports import Transport + +from sast_controller.drivers.cx.utils import configure_logging + +requests.packages.urllib3.disable_warnings() + +ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class CheckmarxConnection(object): + """Connector to Checkmarx""" + + def __init__(self, hostname=None, username=None, password=None): + """ + :param hostname: + Checkmarx hostname + :param username: + Checkmarx username + :param password: + Checkmarx password + """ + self.logger = configure_logging(logging.getLogger(__name__)) + self.hostname = hostname + self.username = username + self.password = password + self.resolver_url = "%s/cxwebinterface/cxwsresolver.asmx?wsdl" % self.hostname + session = Session() + session.verify = False + self.transport = Transport(session=session) + try: + self._resolver_client = Client(self.resolver_url, transport=self.transport) + except Exception as error: + self.logger.error("Checkmarx connection failed: {error}".format(error=error)) + raise ConnectionError(f"Checkmarx connection failed. Wrong or inaccessible hostname: {hostname}") from None + self.session_id = None + self.clients = {} + + def get_client_url(self, client_type='SDK'): + return self._resolver_client.service.GetWebServiceUrl(client_type, 1).ServiceURL + + def get_client(self, client_type='SDK'): + """ + Connect to Checkmarx client + :param client_type: + :return: + """ + if client_type in self.clients: + return self.clients[client_type] + try: + client_url = self.get_client_url(client_type) + client = Client(client_url + "?wsdl", transport=self.transport, strict=False) + credentials = {'User': self.username, 'Pass': self.password} + login = client.service.Login(credentials, 1033) + if not login.IsSuccesfull: + raise AssertionError(f"Unable to login in Checkmarx. \n" + f"Please double check CX_PASSWORD and CX_USER.") + + if self.session_id is None: + self.session_id = login.SessionId + self.clients[client_type] = client + return client + except ConnectionError as error: + self.logger.critical( + "Checkmarx connection failed. Wrong or inaccessible hostname: {error}".format(error=error)) + return False, False diff --git a/sast_controller/drivers/cx/CxManager.py b/sast_controller/drivers/cx/CxManager.py new file mode 100755 index 0000000..1202175 --- /dev/null +++ b/sast_controller/drivers/cx/CxManager.py @@ -0,0 +1,93 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from sast_controller.drivers.cx.Checkmarx import Checkmarx + +ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + +INCREMENTAL_SCAN_ERROR = 'full scan should be submitted for accurate results' +NO_SOURCES_ERROR = 'worker failed to retrieve scan' +NO_SOURCES = "No supported files to scan in Checkmarx. \n Please find details by the link:\n " \ + "https://checkmarx.atlassian.net/wiki" \ + "/spaces/KC/pages/141328390/8.5.0+Supported+Code+Languages+and+Frameworks " + + +class CxIncrementalScanException(Exception): + """Use when unable to start Checkmarx incremental scan""" + + def __init__(self, message): + self.message = message + + +class CxNoSourceScanException(Exception): + """Use when no supported files in zip""" + + def __init__(self, message): + self.message = message + + +def scan_project(local_path=None, project=None, incremental_scan=False): + """ + Scan project using Checkmarx + :param local_path: + path to folder with project + :param project: + name of Checkmarx project + :param incremental_scan: + :return: + :raise: CxIncrementalScanException + if unable to start incremental scan + """ + cxClient = Checkmarx(project) + report = None + if not cxClient.valid: + cxClient.logger.critical("Invalid connection") + return report + response = cxClient.run_scan(local_path=local_path, + incremental_scan=incremental_scan) + if not response: + cxClient.logger.critical("No response") + return report + run_id = response.RunId + if run_id: + currently_running = None + scan_id = None + while currently_running != 'Finished': + scan = cxClient.get_status_of_single_run(run_id) + status = scan.CurrentStatus + currently_running = status + if currently_running == 'Finished': + cxClient.logger.info("Scan Finished") + try: + scan_id = scan.ScanId + except Exception: + cxClient.logger.critical(str(scan)) + raise + if currently_running == 'Failed': + cxClient.logger.critical("Scan Failed") + if scan.StageMessage.find(NO_SOURCES_ERROR) > -1: + raise CxNoSourceScanException(NO_SOURCES) + + cxClient.logger.critical(str(scan)) + if str(scan).find(INCREMENTAL_SCAN_ERROR) > -1: + raise CxIncrementalScanException(str(scan)) + break + if currently_running != "Failed": + report_id = cxClient.create_scan_report(scan_id).ID + while not cxClient.get_scan_report_status(report_id).IsReady: + cxClient.logger.info("Report generation in progress") + report = cxClient.get_scan_report(report_id) + return report diff --git a/sast_controller/drivers/cx/__init__.py b/sast_controller/drivers/cx/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/drivers/cx/utils.py b/sast_controller/drivers/cx/utils.py new file mode 100755 index 0000000..e702069 --- /dev/null +++ b/sast_controller/drivers/cx/utils.py @@ -0,0 +1,268 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import re +import time +import sys +import os +import zipfile + +import bs4 +from git import Repo + + +def configure_logging(logger): + logger.setLevel(logging.DEBUG) + + ch = logging.StreamHandler(sys.stdout) + ch.setLevel(logging.DEBUG) + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + ch.setFormatter(formatter) + logger.addHandler(ch) + return logger + + +def is_not_excluded_type(file, exclude_types): + """Return False if file type excluded, else True""" + if exclude_types: + for exclude_type in exclude_types: + if file.lower().endswith(exclude_type.lower()): + return False + return True + + +def zinfo_from_file(fullname): + st = os.stat(fullname) + mtime = time.localtime(st.st_mtime) + date_time = mtime[0:6] + + if date_time[0] > 1980: + # TODO Add Jira ticket with list of files/ add to sast documentations + return True + return False + + +def is_not_excluded_path(path, exclude_paths): + """Return False if path excluded, else True""" + if exclude_paths: + for exclude_path in exclude_paths: + if exclude_path.lower().strip() in path.lower(): + return False + return True + + +def generate_zip(zip_path, files_to_pack, project_path): + """ + Generate zip from file list + :param zip_path: + :param files_to_pack: + """ + zipf = zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) + for file_ in files_to_pack: + zipf.write(file_, file_.replace(project_path, '', 1)) + zipf.close() + + +def zip_prj(prj_path, zip_path, exclude_pathes=None, exclude_types=None): + """ + Generate zip file + :param prj_path: + folder to archive + :param zip_path: + path to zip archive + :param exclude_pathes: + paths to exclude from zip + :param exclude_types: + file types to exclude from zip + """ + files_ = [] + for root, dirs, files in os.walk(prj_path): + files = [f for f in files if not f[0] == '.'] + dirs[:] = [d for d in dirs if not d[0] == '.'] + + if is_not_excluded_path(root, exclude_pathes): + for file in files: + if is_not_excluded_type(file, exclude_types): + f_path = os.path.join(root, file) + try: + if zinfo_from_file(f_path): + files_.append(f_path) + except Exception: + print('Unable to include in zip ', f_path) + pass + generate_zip(zip_path, files_, prj_path) + + +def extract_zip(local_path): + """ + Extract zip file + :param local_path: + :return: + """ + with open(local_path, 'rb') as f: + return f.read() + + +def write_file(f_path, data): + """ + Write data to output file + :param f_path: + output file path + :param data: + data to write + """ + with open(f_path, 'w') as f_out: + f_out.write(data) + + +def diff(path_, excluded_, excluded_path_): + """ + Get modified and new files from last commit + :param path_: + :param excluded_: + :param excluded_path_: + :return: + """ + repo = Repo(path_) + hcommit = repo.head.commit + difs_ = hcommit.diff('HEAD~1') + inc = list() + for type_ in ("M", "D"): + for _ in difs_.iter_change_type(type_): + filename, file_extension = os.path.splitext(os.path.join(path_, _.a_path)) + if os.path.basename(filename)[0] != '.': + if file_extension not in excluded_: + excluded = [True for _ in excluded_path_ if _ in filename.lower()] + if not excluded: + inc.append(_.a_path) + return inc + + +def zip_latest_files(path_to_repo, zip_path, excluded_types, excluded_paths): + """ + Create zip archive with last commit files + :param excluded_paths: + :param path_to_repo: + :param zip_path: + :param excluded_types: + :return: + """ + print('excluded:', excluded_types) + inc_f = diff(path_to_repo, excluded_types, excluded_paths) + print("Files changed in last commit: ", inc_f) + files_changed = [os.path.join(path_to_repo, f_path) for f_path in inc_f] + if inc_f: + generate_zip(zip_path, files_changed, path_to_repo) + + +def get_jira_tag(html_tag): + """ + Convert from HTML tags to JIRA markdown + :param html_tag: + :return: + """ + html_to_jira_tags = { + 'ol': '#', + 'ul': '*', + 'li': '' + } + if html_tag not in html_to_jira_tags: + return '' + else: + return html_to_jira_tags[html_tag] + + +def _get_recommendations(element, parent_tag=''): + children = [child for child in element.children] + parent_name = element.name + tag = parent_tag + get_jira_tag(parent_name) + text = '' + for child in children: + if isinstance(child, str): + continue + if child.name == 'li': + # check if list element contains nested list elements and then parse them + # otherwise retrieve the text of the element + if child.find('li') or child.find('ol') or child.find('ul'): + child_text = child.find(text=True, recursive=False) + text += f"{tag} {child_text}\n" + text += _get_recommendations(child, tag) + else: + child_text = child.text.strip() + text += f"{tag} {child_text}\n" + else: + text += _get_recommendations(child, tag) + return text + + +def get_jira_recommendations(cx_client, query_id): + """ + Get recommendation from Checkmarx + :param cx_client: + :param query_id: + :return: + """ + query_description = cx_client.get_query_description_by_query_id(query_id) + soup = bs4.BeautifulSoup(query_description, 'html.parser') + pattern = re.compile('How to avoid it') + recommendations = soup(text=pattern)[0] + current_tag = recommendations.next + response = "" + while True: + if current_tag is None: + break + if isinstance(current_tag, bs4.element.NavigableString): + current_tag = current_tag.next + continue + text = current_tag.find(text=True, recursive=False) + if text is None: + current_tag = current_tag.next + continue + # `Source Code Examples` is the next section after `How to avoid it` + # should stop processing here + if 'Source Code Examples' not in text: + if current_tag.name in ['ol', 'ul']: + response += _get_recommendations(current_tag, '') + '\n' + current_tag = current_tag.next_sibling + else: + text = text.strip() + if text: + response += text + '\n' + current_tag = current_tag.next + else: + break + return response + + +def get_jira_overview(cx_client, cwe_id): + """ + Get overview by CWE code + :param cx_client: + :param cwe_id: + :return: + """ + cwe_description = cx_client.get_cwe_description(cwe_id) + soup = bs4.BeautifulSoup(cwe_description, 'html.parser') + summary_pattern = 'Description Summary' + summary = soup(text=summary_pattern)[0].next + while True: + try: + summary_text = summary.text + except AttributeError: + summary_text = str(summary) + if summary_text.strip(): + summary_text = summary_text.replace('\n', ' ') + return ' '.join(summary_text.split()) + summary = summary.next diff --git a/sast_controller/drivers/jira/__init__.py b/sast_controller/drivers/jira/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/drivers/jira/baseClient.py b/sast_controller/drivers/jira/baseClient.py new file mode 100755 index 0000000..078dc81 --- /dev/null +++ b/sast_controller/drivers/jira/baseClient.py @@ -0,0 +1,40 @@ +import logging + +from os import environ +from jira import JIRA, JIRAError + + +class JiraBaseClient(object): + def __init__(self): + self.client = self.connect() + + def connect(self): + """Connect to Jira. Return None on error """ + try: + jira = JIRA(environ['JIRA_HOST'], basic_auth=(environ['JIRA_USR'], environ['JIRA_PWD']), + options={'verify': False}) + return jira + except Exception as e: + logging.error("Failed to connect to JIRA: %s" % e) + return None + + def create_issue(self, issue_data): + """Create new Jira ticket""" + issue = self.client.create_issue(fields=issue_data) + logging.info(' \u2713 %s issue was created: %s', + issue_data['issuetype']['name'], issue.key) + return issue + + def check_project(self, project): + """Check if project exists in Jira""" + logging.info('checking if project "%s" exists in jira', project) + try: + self.client.search_issues('project = {}'.format(project)) + logging.info('project "%s" exists', project) + return True + except JIRAError as exc: + logging.error('project "%s" not found in jira', project) + logging.info( + 'please specify correct project name or create project "%s" in jira', + project) + logging.debug('status code: %s; text: %s', exc.status_code, exc.text) diff --git a/sast_controller/drivers/rp/TestItem.py b/sast_controller/drivers/rp/TestItem.py new file mode 100755 index 0000000..6c1926e --- /dev/null +++ b/sast_controller/drivers/rp/TestItem.py @@ -0,0 +1,101 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class TestItem(object): + """Test item for Report Portal""" + + def __init__(self, issue, severity, confidence, description, tool=None, priority='', attachments=None, + defect_type_info=None): + """ + :param issue: + :param severity: + :param confidence: + :param description: + :param tool: + :param priority: + :param attachments: + :param defect_type_info: + """ + self.issue = issue + self.tool = tool + self.severity = severity + self.priority = priority + self.confidence = confidence + self.description = description + self.attachments = attachments + self.msgs = [] + self.tags = {"Tool": tool, "Severity": severity, + "Confidence": confidence, "Priority": priority} + self.params = { + "Tool": tool + } + self.defect_type_info = defect_type_info + + def add_message(self, msg): + """ + Add new message to test item + :param msg: + """ + if self.msgs.count(msg) == 0: + self.msgs.append(msg) + + def add_tag(self, key, value): + """ + Add tag to test item + :param key: + :param value: + """ + self.tags[key] = value + + def add_param(self, param, value): + """ + Add parameter to test item + :param param: + :param value: + """ + self.params[param] = value + + def get_tags(self) -> list: + """ + Get list of tags from test item + """ + tags = [] + for tag in self.tags: + tags.append(f"{tag}: {self.tags[tag]}") + return tags + + def get_params(self) -> dict: + """ + Get list of parameters from test item + """ + params = { + "Severity": self.severity, + "Confidence": self.confidence + } + for k, v in self.params.items(): + params[k] = v + return params + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(self, other.__class__): + is_same_name = (self.issue.count(other.issue) > 0 or other.issue.count(self.issue) > 0) + is_same_link = (self.params == other.params + or (self.params.get("Links") and other.params.get("Links") + and (self.params.get("Links").count(other.params.get("Links")) > 0 + or other.params.get("Links").count(self.params.get("Links")) > 0))) + return ((self.issue == other.issue and self.confidence == other.confidence) + or (is_same_name and is_same_link)) + return False diff --git a/sast_controller/drivers/rp/TestMessage.py b/sast_controller/drivers/rp/TestMessage.py new file mode 100755 index 0000000..f8bf701 --- /dev/null +++ b/sast_controller/drivers/rp/TestMessage.py @@ -0,0 +1,33 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class TestMessage(object): + """ Test message object """ + + def __init__(self, message, status): + """ + :param message: + message text + :param status: + message status, should be INFO or ERROR + """ + self.message = message + self.status = status + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(self, other.__class__): + return self.message == other.message and self.status == other.status + return False diff --git a/sast_controller/drivers/rp/__init__.py b/sast_controller/drivers/rp/__init__.py new file mode 100755 index 0000000..a1e97e8 --- /dev/null +++ b/sast_controller/drivers/rp/__init__.py @@ -0,0 +1,28 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +URLS = { + "login_url": "/uat/sso/oauth/token?grant_type=password&password={password}&username={login}", + "get_launch_list_url": "/api/v1/{project}/launch?filter.eq.name={scan}&page.sort=number%2Cdesc&page.page={page}&page.size={size}", # noqa + "compare_url": "/api/v1/{project}/launch/compare?ids={current_launch}&ids={previous_launch}", + "launch_url": "/ui/#{project}/launches/all%7Cpage.page=1&page.size=50&page.sort=start_time,number%2CDESC/{launch_id}?page.page=1&page.size=50&page.sort=start_time%2CASC", # noqa + "update_launch_url": "/api/v1/{project}/launch/{launch_id}", + "update_test_item_url": "/api/v1/{project}/activity/item/{test_item_id}", + "get_project_info_url": "/api/v1/project/{project}", + "get_launch_info_url": "/api/v1/{project}/item?filter.eq.launch={launch_id}&page.page={page}", + "post_ticket_url": "/api/v1/{project}/external-system/{system_id}/ticket", + "put_item_url": "/api/v1/{project}/item", + "load_issue": "/api/v1/{project}/item/issue/add", + "get_log": "/api/v1/{project}/log?filter.eq.item={test_item}&page.page={page}&page.size=100&page.sort=time%2CASC" +} diff --git a/sast_controller/drivers/rp/report_portal_writer.py b/sast_controller/drivers/rp/report_portal_writer.py new file mode 100755 index 0000000..5b8df16 --- /dev/null +++ b/sast_controller/drivers/rp/report_portal_writer.py @@ -0,0 +1,140 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import traceback + +from reportportal_client import ReportPortalServiceAsync +from time import time + +MAX_MESSAGE_LEN = 30000 + + +def timestamp(): + return str(int(time() * 1000)) + + +def my_error_handler(exc_info): + """ + This callback function will be called by async service client when error occurs. + Return True if error is not critical and you want to continue work. + :param exc_info: result of sys.exc_info() -> (type, value, traceback) + :return: + """ + traceback.print_exception(*exc_info) + + +class ReportPortalDataWriter(object): + """Wrapper around async Report Portal service""" + + def __init__(self, endpoint, token, project, launch_name=None, + launch_doc=None, launch_tags=None, verify_ssl=False): + """ + :param endpoint: + link to Report Portal + :param token: + user token + :param project: + Report Portal project name + :param launch_name: + Report Portal launch name + :param launch_doc: + launch description + :param launch_doc: + launch tags + :param verify_ssl: + option to not verify ssl certificates + """ + self.endpoint = endpoint + self.token = token + self.project = project + self.launch_name = launch_name + self.launch_doc = launch_doc + self.launch_tags = launch_tags + self.service = None + self.test = None + self.verify_ssl = verify_ssl + + def start_test(self): + """ + Start new launch in Report Portal + """ + self.service = ReportPortalServiceAsync(endpoint=self.endpoint, + project=self.project, + token=self.token, + error_handler=my_error_handler, + verify_ssl=self.verify_ssl) + self.service.start_launch(name=self.launch_name, + start_time=timestamp(), + description=self.launch_doc, + tags=self.launch_tags) + + def finish_test(self): + """ + Finish started launch in Report Portal + """ + self.service.finish_launch(end_time=timestamp()) + self.service.terminate() + self.service = None + + def is_test_started(self) -> bool: + """ + Return True in case if launch was started + """ + if self.service: + return True + return False + + def start_test_item(self, issue, description, tags, parameters): + """ + Start new test item inside the launch + :param issue: + :param description: + :param tags: + :param parameters: + """ + self.test = self.service.start_test_item(issue, + description=description, + tags=tags, + start_time=timestamp(), + item_type="STEP", + parameters=parameters) + + def test_item_message(self, message, level="ERROR", attachment=None): + """ + Add new log message inside test item + :param message: + :param level: + :param attachment: + """ + self.service.log(time=timestamp(), message=message[:MAX_MESSAGE_LEN], + level=level, attachment=attachment) + + def finish_test_item(self, defect_type_info): + """ + Finish started test item + :param defect_type_info: + """ + defect_mapping = { + 'To Investigate': 'TI001', + 'No Defect': 'ND001', + 'Product Bug': 'PB001', + 'System Issue': 'SI001' + } + defect_type = defect_type_info['RP Defect Type'] + issue = None + if defect_type in defect_mapping: + issue = {'issue_type': defect_mapping[defect_type], 'comment': defect_type_info['RP Comment']} + self.service.finish_test_item(end_time=timestamp(), + status="FAILED", + issue=issue) diff --git a/sast_controller/drivers/rp/rp_portal_controller.py b/sast_controller/drivers/rp/rp_portal_controller.py new file mode 100755 index 0000000..a36f05d --- /dev/null +++ b/sast_controller/drivers/rp/rp_portal_controller.py @@ -0,0 +1,377 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests +import json +import urllib.parse +from copy import deepcopy + +from sast_controller.drivers.rp import URLS + + +class ReportPortalService: + """ + Service that realise Report Portal REST API + """ + + def __init__(self, host, token): + """ + :param host: + Report Portal host URL + :param token: + Report Portal API token + """ + self._token = token + headers = { + 'Accept': 'application/json', + 'Authorization': f'bearer {self._token}', + 'Content-type': 'application/json;charset=UTF-8'} + self.report_portal_link = host + self.ses = requests.Session() + self.ses.headers.update(headers) + self.urls = deepcopy(URLS) + + def close_session(self): + """Close Report Portal session""" + self.ses.close() + + def send_request(self, method, url, body=None, status_codes=None, verify=False): + """ + Send request to Report Portal API + + :param method: + HTTP method + :param url: + request URL + :param body: + request body + :param status_codes: + list of acceptable status codes + :param verify: + set True to verify ssl certificate + :return: + + @:raise Exception in case if response code not in status codes + """ + if status_codes is None: + status_codes = [200] + url_ = urllib.parse.urljoin(self.report_portal_link, url) + if method == "GET": + response = self.ses.get(url_, verify=verify) + elif method == "POST": + response = self.ses.post(url_, body, verify=verify) + elif method == "PUT": + response = self.ses.put(url_, body, verify=verify) + else: + raise Exception(f"Unsupported request method {method}") + if response.status_code in status_codes: + return json.loads(response.text) + if response.text.find('invalid_token') > -1: + raise AssertionError("Invalid Report Portal UUID token. Please verify RP_TOKEN param.") + raise Exception(f"Wrong response.\n" + f"{method} {self.report_portal_link + url}\n" + f"Status code {response.status_code}\n " + f"{response.text}") + + def get_launch_info_by_number(self, project, scan_name, number): + """ + GET /api/v1/{project}/launch?filter.eq.name={scan}&page.sort=number%2Cdesc&page.page={page}&page.size={size} + :param project: + :param scan_name: + :param number: + :return: + launch ID, + { + "owner": "", + "share": , + "id": "", + "name": "", + "number": 4, + "start_time": , + "end_time": , + "status": "", + "statistics": { + "executions": { + "total": "", + "passed": "", + "failed": "", + "skipped": "0" + }, + "defects": { + } + }, + "mode": "DEFAULT", + "isProcessing": false, + "approximateDuration": , + "hasRetries": false + } + """ + url = self.urls["get_launch_list_url"].format( + project=project, scan=urllib.parse.quote_plus(scan_name), + page=number, size=1) + launch_list = self.send_request("GET", url) + try: + content = launch_list['content'][0] + except IndexError: + raise IndexError( + 'There is no {launch} inside {project} project.' + '\nPlease double check Launch name and Project Name.'.format( + launch=scan_name, project=project)) + launch_id = content['id'] + return launch_id, launch_list + + def get_launch_info(self, project, launch_id): + """ + GET /api/v1/{project}/item?filter.eq.launch={launch_id}&page.page={page} + :param project: + :param launch_id: + :return: + { + "content": [], + "page": { + "number": 1, + "size": 20, + "totalElements": 0, + "totalPages": 0 + } + } + """ + info_list = [] + page = 1 + total_pages = 1 + while page <= total_pages: + url = self.urls["get_launch_info_url"].format(project=project, + launch_id=launch_id, + page=page) + req = self.send_request("GET", url) + info_list.append(req) + total_pages = int(req['page']['totalPages']) + page += 1 + return info_list + + def compare_launches(self, project, current_launch, previous_launch): + """ + GET /api/v1/{project}/launch/compare?ids={current_launch}&ids={previous_launch} + :param project: + :param current_launch: + :param previous_launch: + :return: + { + "result": [ + { + "values": {}, + "name": "", + "startTime": "", + "number": "", + "id": "" + } + ] + } + """ + url = self.urls["compare_url"].format(project=project, + current_launch=current_launch, + previous_launch=previous_launch) + return self.send_request("GET", url) + + def get_test_item_log(self, project, test_item): + """ + GET /api/v1/{project}/log?filter.eq.item={test_item}&page.page={page}&page.size=100&page.sort=time%2CASC + :param project: + :param test_item: + :return: + { + "content": [ + { + "id": "", + "time": , + "message": "", + "level": "", + "test_item": "" + } + ], + "page": { + "number": 1, + "size": 20, + "totalElements": 3, + "totalPages": 1 + } + } + """ + content = [] + total_pages = 1 + page = 1 + while page <= total_pages: + url = self.urls["get_log"].format(project=project, + test_item=test_item, + page=page) + response = self.send_request("GET", url) + content += response["content"] + total_pages = int(response['page']['totalPages']) + page += 1 + return content + + def get_prj_info(self, prj): + """ + GET /api/v1/project/{project} + :param prj: + :return: + { + "addInfo": "string", + "configuration": { + "analyzer_mode": "ALL", + "emailConfiguration": { + }, + "entryType": "string", + "externalSystem": [ + ], + "interruptedJob": "ONE_HOUR", + "isAutoAnalyzerEnabled": true, + "keepLogs": "TWO_WEEKS", + "keepScreenshots": "ONE_WEEK", + "projectSpecific": "string", + "statisticCalculationStrategy": "STEP_BASED", + "subTypes": {} + }, + "creationDate": "2019-03-27T12:26:56.203Z", + "customer": "string", + "projectId": "string", + "users": [ + { + "login": "string", + "projectRole": "string", + "proposedRole": "string" + } + ] + } + """ + url = self.urls["get_project_info_url"].format(project=prj) + return self.send_request("GET", url, status_codes=[200, 404]) + + def get_external_system_info(self, project_): + """ + Get external system config + :raise IndexError if no external system in project + :return + { + "accessKey": "string", + "domain": "string", + "fields": [ + { + "definedValues": [ + { + "valueId": "string", + "valueName": "string" + } + ], + "fieldName": "string", + "fieldType": "string", + "id": "string", + "required": true, + "value": [ + "string" + ] + } + ], + "id": "string", + "project": "string", + "projectRef": "string", + "systemAuth": "string", + "systemType": "string", + "url": "string", + "username": "string" + } + """ + project_info = self.get_prj_info(project_) + external_system = project_info["configuration"]["externalSystem"] + if len(external_system) == 0: + raise IndexError("No available external system. Please create one.") + return external_system[0] + + def create_project(self, project_name): + """ + Create project is project not exists + POST /api/v1/project + :param project_name: + """ + project_info = self.get_prj_info(project_name) + + if 'Did you use correct project name?' in str(project_info): + url = '/api/v1/project' + post_body = { + "entryType": "INTERNAL", + "projectName": project_name + } + return self.send_request("POST", url, json.dumps(post_body), status_codes=[201]) + return 'Project already exist' + + def update_ext_sys(self, prj, sys_id, params): + """ + POST /api/v1/{prj}/external-system/{sys_id} + params: + {"url":"","systemType":"JIRA","systemAuth":"BASIC","project":"", + "fields":[{"fieldName":"Issue Type","id":"issuetype","fieldType":"issuetype","required":true, + "value":[""],"definedValues":[]}, + {"fieldName":"Summary","id":"summary","fieldType":"string","required":true,"definedValues":[], + "value":[""]}, + {"fieldName":"Assignee","id":"assignee","fieldType":"user","required":true,"definedValues":[], + "value":["test"]}]} + + :return: + """ + url = f'/api/v1/{prj}/external-system/{sys_id}' + return self.send_request("PUT", url, json.dumps(params)) + + def assign_users(self, prj, users): + """ + PUT /api/v1/project/test_new_prj_new/assign + params: + {"userNames":{"user":"ADMIN"}} + :return: + """ + url = f'/api/v1/project/{prj}/assign' + + return self.send_request("PUT", url, json.dumps(users)) + + def setup_external_sys(self, project, params): + """ + POST /api/v1/{project}/external-system + params: + { + "domain": "", + "password": "pwd", + "project": "", + "systemAuth": "Basic", + "systemType": "JIRA", + "url": "", + "username": "userId" + } + + :return: + """ + url = f'/api/v1/{project}/external-system' + return self.send_request("POST", url, json.dumps(params), status_codes=[201]) + + def put_issue_status(self, project, body_params): + """ + PUT /api/v1/{project}/item + :param project: + :param body_params: + :return: + """ + url = self.urls["put_item_url"].format(project=project) + + put_body = '{"issues": [{"issue": {"issue_type": "%s",' \ + '"autoAnalyzed": false,"ignoreAnalyzer": false},' \ + '"test_item_id": "%s"}]}' % (body_params["issue_type"], body_params["test_item_id"]) + return self.send_request("PUT", url, put_body) diff --git a/sast_controller/extractors/__init__.py b/sast_controller/extractors/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/extractors/vulnerability_info.py b/sast_controller/extractors/vulnerability_info.py new file mode 100755 index 0000000..e648dc9 --- /dev/null +++ b/sast_controller/extractors/vulnerability_info.py @@ -0,0 +1,57 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ssl + +from urllib.request import urlopen +from bs4 import BeautifulSoup + +CWE_LINK = "https://cwe.mitre.org/data/definitions/" +CVE_LINK = "https://nvd.nist.gov/vuln/detail/" + + +def get_cwe_info(cwe): + """Return venerability information from Common Weakness Enumeration site by CWE code""" + try: + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + html = urlopen(CWE_LINK + cwe, context=ctx) + soup = BeautifulSoup(html, 'html.parser') + description = soup.find('div', {'id': 'Description'}).find('div', {'class': 'indent'}).text + return description + except Exception as ex: + print(f"Unable to get {CWE_LINK}{cwe}") + print(ex) + return None + + +def get_cve_info(cve): + """Return venerability information from National Vulnerability Database by CVE code""" + try: + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + html = urlopen(CVE_LINK + cve, context=ctx) + soup = BeautifulSoup(html, "lxml") + cve_description = soup.find('p', { + 'data-testid': 'vuln-description'}).text + cve_type = soup.find('li', { + 'data-testid': 'vuln-technical-details-0-link'}).text + cve_type = cve_type[:cve_type.rfind("(CWE")] + return cve_type, cve_description + except Exception as ex: + print(f"Unable to get {CVE_LINK}{cve}") + print(ex) + return None, None diff --git a/sast_controller/tests/convertors/__init__.py b/sast_controller/tests/convertors/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/tests/convertors/checkmarx_report.xml b/sast_controller/tests/convertors/checkmarx_report.xml new file mode 100755 index 0000000..08c91c0 --- /dev/null +++ b/sast_controller/tests/convertors/checkmarx_report.xml @@ -0,0 +1,73 @@ + + + + + + + code/src/MyApp.Api.Web/Controllers/ArticlesController.cs + 553 + 101 + 1 + article + + 7 + + + 553 + public IActionResult PostTranslateDjml([FromBody] JsonApiResponse<ArticleData, MetaArticle> article, [FromServices] IContentTranslation contentTranslation) + + + + + code/src/MyApp.Api.Web/Controllers/ArticlesController.cs + 555 + 56 + 2 + article + + 7 + + + 555 + var xml = contentTranslation.ArticleToDjml(article); + + + + + + + + + code/src/MyApp.Api.Web/Controllers/ArticlesController.cs + 467 + 67 + 1 + id + + 2 + + + 467 + public async Task<IActionResult> GetDjmlByOriginId(string id, [FromHeader(Name= "x-api-key")] string apiKey) + + + + + code/src/MyApp.Api.Web/Controllers/ArticlesController.cs + 474 + 82 + 2 + id + + 2 + + + 474 + var article = await _articleGateway.GetDjmlByExternalIdAsync(id); + + + + + + + diff --git a/sast_controller/tests/convertors/snyk_dotnet.json b/sast_controller/tests/convertors/snyk_dotnet.json new file mode 100755 index 0000000..28932f2 --- /dev/null +++ b/sast_controller/tests/convertors/snyk_dotnet.json @@ -0,0 +1,239 @@ +[ + { + "ok": false, + "vulnerabilities": [ + { + "CVSSv3": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N", + "alternativeIds": [], + "creationTime": "2017-07-19T00:00:00Z", + "credit": [ + "David Fernandez" + ], + "cvssScore": 7.5, + "description": "## Overview\n[`System.Net.Security`](https://www.nuget.org/packages/System.Net.Security) provides types, such as System.Net.Security.SslStream, that uses SSL/TLS protocols to provide secure network communication between client and server endpoints.\n\nAffected versions of this package are vulnerable to Denial of Service (DoS) attacks.\n\nA denial of service vulnerability exists when the `ASP.NET Core` fails to properly validate web requests.\n\n**NOTE:** Microsoft has not commented on third-party claims that the issue is that the `TextEncoder.EncodeCore` function in the `System.Text.Encodings.Web` package in ASP.NET Core Mvc before 1.0.4 and 1.1.x before 1.1.3 allows remote attackers to cause a denial of service by leveraging failure to properly calculate the length of 4-byte characters in the Unicode Non-Character range.\n\n## Details\nDenial of Service (DoS) describes a family of attacks, all aimed at making a system inaccessible to its intended and legitimate users.\r\n\r\nUnlike other vulnerabilities, DoS attacks usually do not aim at breaching security. Rather, they are focused on making websites and services unavailable to genuine users resulting in downtime.\r\n\r\nOne popular Denial of Service vulnerability is DDoS (a Distributed Denial of Service), an attack that attempts to clog network pipes to the system by generating a large volume of traffic from many machines.\r\n\r\nWhen it comes to open source libraries, DoS vulnerabilities allow attackers to trigger such a crash or crippling of the service by using a flaw either in the application code or from the use of open source libraries.\r\n\r\nTwo common types of DoS vulnerabilities:\r\n\r\n* High CPU/Memory Consumption- An attacker sending crafted requests that could cause the system to take a disproportionate amount of time to process. For example, [commons-fileupload:commons-fileupload](SNYK-JAVA-COMMONSFILEUPLOAD-30082).\r\n\r\n* Crash - An attacker sending crafted requests that could cause the system to crash. For Example, [npm `ws` package](npm:ws:20171108)\n\n## Remediation\nUpgrade `System.Net.Security` to version 4.0.1, 4.3.1 or higher.\n\n## References\n- [NVD](https://nvd.nist.gov/vuln/detail/2017-0247)\n- [David Fernandez Blog](https://www.sidertia.com/Home/Community/Blog/2017/05/18/ASPNET-Core-Unicode-Non-Char-Encoding-DoS)\n- [Microsoft Security Advisory](https://technet.microsoft.com/en-us/library/security/4021279.aspx)\n- [GitHub Security Advisory](https://github.com/aspnet/Announcements/issues/239)\n", + "disclosureTime": "2017-05-09T00:00:00Z", + "functions": [], + "id": "SNYK-DOTNET-SYSTEMNETSECURITY-60069", + "identifiers": { + "CVE": [ + "CVE-2017-0247" + ], + "CWE": [ + "CWE-254" + ] + }, + "language": "dotnet", + "methods": [], + "modificationTime": "2018-11-18T11:50:50.092784Z", + "moduleName": "system.net.security", + "packageManager": "nuget", + "packageName": "System.Net.Security", + "patches": [], + "publicationTime": "2017-05-09T00:00:00Z", + "references": [ + { + "title": "NVD", + "url": "https://nvd.nist.gov/vuln/detail/2017-0247" + }, + { + "title": "David Fernandez Blog", + "url": "https://www.sidertia.com/Home/Community/Blog/2017/05/18/ASPNET-Core-Unicode-Non-Char-Encoding-DoS" + }, + { + "title": "Microsoft Security Advisory", + "url": "https://technet.microsoft.com/en-us/library/security/4021279.aspx" + }, + { + "title": "GitHub Security Advisory", + "url": "https://github.com/aspnet/Announcements/issues/239" + } + ], + "semver": { + "vulnerable": [ + "[,4.0.1), [4.3,4.3.1)" + ] + }, + "severity": "medium", + "title": "Denial of Service (DoS)", + "from": [ + "test_proj_nameWeb@1.0.0", + "CacheManager.StackExchange.Redis@1.1.2", + "StackExchange.Redis.StrongName@1.2.6", + "System.Net.Security@4.3.0" + ], + "upgradePath": [], + "isUpgradable": false, + "isPatchable": false, + "name": "System.Net.Security", + "version": "4.3.0" + }, + { + "CVSSv3": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N", + "alternativeIds": [], + "creationTime": "2017-07-19T00:00:00Z", + "credit": [ + "David Fernandez" + ], + "cvssScore": 7.5, + "description": "## Overview\n[`System.Net.Security`](https://www.nuget.org/packages/System.Net.Security) provides types, such as System.Net.Security.SslStream, that uses SSL/TLS protocols to provide secure network communication between client and server endpoints.\n\nAffected versions of this package are vulnerable to Denial of Service (DoS) attacks.\n\nA denial of service vulnerability exists when the `ASP.NET Core` fails to properly validate web requests.\n\n**NOTE:** Microsoft has not commented on third-party claims that the issue is that the `TextEncoder.EncodeCore` function in the `System.Text.Encodings.Web` package in ASP.NET Core Mvc before 1.0.4 and 1.1.x before 1.1.3 allows remote attackers to cause a denial of service by leveraging failure to properly calculate the length of 4-byte characters in the Unicode Non-Character range.\n\n## Details\nDenial of Service (DoS) describes a family of attacks, all aimed at making a system inaccessible to its intended and legitimate users.\r\n\r\nUnlike other vulnerabilities, DoS attacks usually do not aim at breaching security. Rather, they are focused on making websites and services unavailable to genuine users resulting in downtime.\r\n\r\nOne popular Denial of Service vulnerability is DDoS (a Distributed Denial of Service), an attack that attempts to clog network pipes to the system by generating a large volume of traffic from many machines.\r\n\r\nWhen it comes to open source libraries, DoS vulnerabilities allow attackers to trigger such a crash or crippling of the service by using a flaw either in the application code or from the use of open source libraries.\r\n\r\nTwo common types of DoS vulnerabilities:\r\n\r\n* High CPU/Memory Consumption- An attacker sending crafted requests that could cause the system to take a disproportionate amount of time to process. For example, [commons-fileupload:commons-fileupload](SNYK-JAVA-COMMONSFILEUPLOAD-30082).\r\n\r\n* Crash - An attacker sending crafted requests that could cause the system to crash. For Example, [npm `ws` package](npm:ws:20171108)\n\n## Remediation\nUpgrade `System.Net.Security` to version 4.0.1, 4.3.1 or higher.\n\n## References\n- [NVD](https://nvd.nist.gov/vuln/detail/2017-0247)\n- [David Fernandez Blog](https://www.sidertia.com/Home/Community/Blog/2017/05/18/ASPNET-Core-Unicode-Non-Char-Encoding-DoS)\n- [Microsoft Security Advisory](https://technet.microsoft.com/en-us/library/security/4021279.aspx)\n- [GitHub Security Advisory](https://github.com/aspnet/Announcements/issues/239)\n", + "disclosureTime": "2017-05-09T00:00:00Z", + "functions": [], + "id": "SNYK-DOTNET-SYSTEMNETSECURITY-60069", + "identifiers": { + "CVE": [ + "CVE-2017-0247" + ], + "CWE": [ + "CWE-254" + ] + }, + "language": "dotnet", + "methods": [], + "modificationTime": "2018-11-18T11:50:50.092784Z", + "moduleName": "system.net.security", + "packageManager": "nuget", + "packageName": "System.Net.Security", + "patches": [], + "publicationTime": "2017-05-09T00:00:00Z", + "references": [ + { + "title": "NVD", + "url": "https://nvd.nist.gov/vuln/detail/2017-0247" + }, + { + "title": "David Fernandez Blog", + "url": "https://www.sidertia.com/Home/Community/Blog/2017/05/18/ASPNET-Core-Unicode-Non-Char-Encoding-DoS" + }, + { + "title": "Microsoft Security Advisory", + "url": "https://technet.microsoft.com/en-us/library/security/4021279.aspx" + }, + { + "title": "GitHub Security Advisory", + "url": "https://github.com/aspnet/Announcements/issues/239" + } + ], + "semver": { + "vulnerable": [ + "[,4.0.1), [4.3,4.3.1)" + ] + }, + "severity": "high", + "title": "Denial of Service (DoS)", + "from": [ + "test_proj_nameWeb@1.0.0", + "CacheManager.StackExchange.Redis@1.1.2", + "StackExchange.Redis.StrongName@1.2.6", + "System.Net.Security5@4.3.0" + ], + "upgradePath": [], + "isUpgradable": false, + "isPatchable": false, + "name": "System.Net.Security", + "version": "4.3.0" + } + ], + "dependencyCount": 330, + "org": "testesorg", + "licensesPolicy": { + }, + "isPrivate": true, + "packageManager": "nuget", + "policy": "# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.\nversion: v1.13.3\nignore: {}\npatch: {}\n", + "ignoreSettings": null, + "summary": "132 vulnerable dependency paths", + "filesystemPolicy": false, + "filtered": { + "ignore": [], + "patch": [] + }, + "uniqueCount": 23, + "path": "/code/src/test_proj_nameWeb" + }, + { + "ok": false, + "vulnerabilities": [ + { + "CVSSv3": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N", + "alternativeIds": [], + "creationTime": "2017-07-19T00:00:00Z", + "credit": [ + "David Fernandez" + ], + "cvssScore": 7.5, + "description": "## Overview\n[`System.Net.Security`](https://www.nuget.org/packages/System.Net.Security) provides types, such as System.Net.Security.SslStream, that uses SSL/TLS protocols to provide secure network communication between client and server endpoints.\n\nAffected versions of this package are vulnerable to Denial of Service (DoS) attacks.\n\nA denial of service vulnerability exists when the `ASP.NET Core` fails to properly validate web requests.\n\n**NOTE:** Microsoft has not commented on third-party claims that the issue is that the `TextEncoder.EncodeCore` function in the `System.Text.Encodings.Web` package in ASP.NET Core Mvc before 1.0.4 and 1.1.x before 1.1.3 allows remote attackers to cause a denial of service by leveraging failure to properly calculate the length of 4-byte characters in the Unicode Non-Character range.\n\n## Details\nDenial of Service (DoS) describes a family of attacks, all aimed at making a system inaccessible to its intended and legitimate users.\r\n\r\nUnlike other vulnerabilities, DoS attacks usually do not aim at breaching security. Rather, they are focused on making websites and services unavailable to genuine users resulting in downtime.\r\n\r\nOne popular Denial of Service vulnerability is DDoS (a Distributed Denial of Service), an attack that attempts to clog network pipes to the system by generating a large volume of traffic from many machines.\r\n\r\nWhen it comes to open source libraries, DoS vulnerabilities allow attackers to trigger such a crash or crippling of the service by using a flaw either in the application code or from the use of open source libraries.\r\n\r\nTwo common types of DoS vulnerabilities:\r\n\r\n* High CPU/Memory Consumption- An attacker sending crafted requests that could cause the system to take a disproportionate amount of time to process. For example, [commons-fileupload:commons-fileupload](SNYK-JAVA-COMMONSFILEUPLOAD-30082).\r\n\r\n* Crash - An attacker sending crafted requests that could cause the system to crash. For Example, [npm `ws` package](npm:ws:20171108)\n\n## Remediation\nUpgrade `System.Net.Security` to version 4.0.1, 4.3.1 or higher.\n\n## References\n- [NVD](https://nvd.nist.gov/vuln/detail/2017-0247)\n- [David Fernandez Blog](https://www.sidertia.com/Home/Community/Blog/2017/05/18/ASPNET-Core-Unicode-Non-Char-Encoding-DoS)\n- [Microsoft Security Advisory](https://technet.microsoft.com/en-us/library/security/4021279.aspx)\n- [GitHub Security Advisory](https://github.com/aspnet/Announcements/issues/239)\n", + "disclosureTime": "2017-05-09T00:00:00Z", + "functions": [], + "id": "SNYK-DOTNET-SYSTEMNETSECURITY-60069", + "identifiers": { + "CVE": [ + "CVE-2017-0247" + ], + "CWE": [ + "CWE-254" + ] + }, + "language": "dotnet", + "methods": [], + "modificationTime": "2018-11-18T11:50:50.092784Z", + "moduleName": "system.net.security1", + "packageManager": "nuget", + "packageName": "System.Net.Security1", + "patches": [], + "publicationTime": "2017-05-09T00:00:00Z", + "references": [ + { + "title": "NVD", + "url": "https://nvd.nist.gov/vuln/detail/2017-0247" + }, + { + "title": "David Fernandez Blog", + "url": "https://www.sidertia.com/Home/Community/Blog/2017/05/18/ASPNET-Core-Unicode-Non-Char-Encoding-DoS" + }, + { + "title": "Microsoft Security Advisory", + "url": "https://technet.microsoft.com/en-us/library/security/4021279.aspx" + }, + { + "title": "GitHub Security Advisory", + "url": "https://github.com/aspnet/Announcements/issues/239" + } + ], + "semver": { + "vulnerable": [ + "[,4.0.1), [4.3,4.3.1)" + ] + }, + "severity": "medium", + "title": "Denial of Service (DoS)", + "from": [ + "test_proj_nameWeb@1.0.0", + "CacheManager.StackExchange.Redis@1.1.2", + "StackExchange.Redis.StrongName@1.2.6", + "System.Net.Security1@4.3.0" + ], + "upgradePath": [], + "isUpgradable": false, + "isPatchable": false, + "name": "System.Net.Security1", + "version": "4.3.0" + } + ], + "dependencyCount": 330, + "org": "testesorg", + "licensesPolicy": { + }, + "isPrivate": true, + "packageManager": "nuget", + "policy": "# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.\nversion: v1.13.3\nignore: {}\npatch: {}\n", + "ignoreSettings": null, + "summary": "132 vulnerable dependency paths", + "filesystemPolicy": false, + "filtered": { + "ignore": [], + "patch": [] + }, + "uniqueCount": 23, + "path": "/code/src/test_proj_nameWeb" + } +] diff --git a/sast_controller/tests/convertors/snyk_report.json b/sast_controller/tests/convertors/snyk_report.json new file mode 100755 index 0000000..7b732fc --- /dev/null +++ b/sast_controller/tests/convertors/snyk_report.json @@ -0,0 +1,81 @@ + { + "ok": false, + "vulnerabilities": [ + { + "CVSSv3": "CVSS:3.0/AV:L/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", + "alternativeIds": [ + "SNYK-JS-BRACEEXPANSION-10483" + ], + "creationTime": "2017-04-26T09:19:21.663000Z", + "credit": [ + "kamael" + ], + "cvssScore": 6.2, + "description": "## Overview\n[`brace-expansion`](https://www.npmjs.com/package/brace-expansion) is a package that performs brace expansion as known from sh/bash.\nAffected versions of this package are vulnerable to Regular Expression Denial of Service (ReDoS) attacks.\n\n## Details\nDenial of Service (DoS) describes a family of attacks, all aimed at making a system inaccessible to its original and legitimate users. There are many types of DoS attacks, ranging from trying to clog the network pipes to the system by generating a large volume of traffic from many machines (a Distributed Denial of Service - DDoS - attack) to sending crafted requests that cause a system to crash or take a disproportional amount of time to process.\n\nThe Regular expression Denial of Service (ReDoS) is a type of Denial of Service attack. Regular expressions are incredibly powerful, but they aren't very intuitive and can ultimately end up making it easy for attackers to take your site down.\n\nLet's take the following regular expression as an example:\n```js\nregex = /A(B|C+)+D/\n```\n\nThis regular expression accomplishes the following:\n- `A` The string must start with the letter 'A'\n- `(B|C+)+` The string must then follow the letter A with either the letter 'B' or some number of occurrences of the letter 'C' (the `+` matches one or more times). The `+` at the end of this section states that we can look for one or more matches of this section.\n- `D` Finally, we ensure this section of the string ends with a 'D'\n\nThe expression would match inputs such as `ABBD`, `ABCCCCD`, `ABCBCCCD` and `ACCCCCD`\n\nIt most cases, it doesn't take very long for a regex engine to find a match:\n\n```bash\n$ time node -e '/A(B|C+)+D/.test(\"ACCCCCCCCCCCCCCCCCCCCCCCCCCCCD\")'\n0.04s user 0.01s system 95% cpu 0.052 total\n\n$ time node -e '/A(B|C+)+D/.test(\"ACCCCCCCCCCCCCCCCCCCCCCCCCCCCX\")'\n1.79s user 0.02s system 99% cpu 1.812 total\n```\n\nThe entire process of testing it against a 30 characters long string takes around ~52ms. But when given an invalid string, it takes nearly two seconds to complete the test, over ten times as long as it took to test a valid string. The dramatic difference is due to the way regular expressions get evaluated.\n\nMost Regex engines will work very similarly (with minor differences). The engine will match the first possible way to accept the current character and proceed to the next one. If it then fails to match the next one, it will backtrack and see if there was another way to digest the previous character. If it goes too far down the rabbit hole only to find out the string doesn't match in the end, and if many characters have multiple valid regex paths, the number of backtracking steps can become very large, resulting in what is known as _catastrophic backtracking_.\n\nLet's look at how our expression runs into this problem, using a shorter string: \"ACCCX\". While it seems fairly straightforward, there are still four different ways that the engine could match those three C's:\n1. CCC\n2. CC+C\n3. C+CC\n4. C+C+C.\n\nThe engine has to try each of those combinations to see if any of them potentially match against the expression. When you combine that with the other steps the engine must take, we can use [RegEx 101 debugger](https://regex101.com/debugger) to see the engine has to take a total of 38 steps before it can determine the string doesn't match.\n\nFrom there, the number of steps the engine must use to validate a string just continues to grow.\n\n| String | Number of C's | Number of steps |\n| -------|-------------:| -----:|\n| ACCCX | 3 | 38\n| ACCCCX | 4 | 71\n| ACCCCCX | 5 | 136\n| ACCCCCCCCCCCCCCX | 14 | 65,553\n\n\nBy the time the string includes 14 C's, the engine has to take over 65,000 steps just to see if the string is valid. These extreme situations can cause them to work very slowly (exponentially related to input size, as shown above), allowing an attacker to exploit this and can cause the service to excessively consume CPU, resulting in a Denial of Service.\n\n\n## Remediation\nUpgrade `brace-expansion` to version 1.1.7 or higher.\n\n## References\n- [GitHub PR](https://github.com/juliangruber/brace-expansion/pull/35)\n- [GitHub Issue](https://github.com/juliangruber/brace-expansion/issues/33)\n- [GitHub Commit](https://github.com/juliangruber/brace-expansion/pull/35/commits/b13381281cead487cbdbfd6a69fb097ea5e456c3)\n", + "disclosureTime": "2017-03-01T22:00:00Z", + "functions": [], + "id": "npm:brace-expansion:20170302", + "identifiers": { + "ALTERNATIVE": [ + "SNYK-JS-BRACEEXPANSION-10483" + ], + "CVE": [ + "CVE-2017-18077" + ], + "CWE": [ + "CWE-400" + ], + "NSP": [ + 338 + ] + }, + "language": "js", + "methods": [], + "modificationTime": "2018-11-22T10:10:07.912837Z", + "moduleName": "brace-expansion", + "packageManager": "npm", + "packageName": "brace-expansion", + "patches": [], + "publicationTime": "2017-04-26T09:19:21.663000Z", + "references": [ + { + "title": "GitHub Commit", + "url": "https://github.com/juliangruber/brace-expansion/pull/35/commits/b13381281cead487cbdbfd6a69fb097ea5e456c3" + }, + { + "title": "GitHub Issue", + "url": "https://github.com/juliangruber/brace-expansion/issues/33" + }, + { + "title": "GitHub PR", + "url": "https://github.com/juliangruber/brace-expansion/pull/35" + } + ], + "semver": { + "vulnerable": [ + "<1.1.7" + ] + }, + "severity": "medium", + "title": "Regular Expression Denial of Service (ReDoS)", + "from": [ + "MyAPP@0.0.1", + "@nyc.dotted@7.1.0", + "glob@7.0.5", + "minimatch@3.0.2", + "brace-expansion@1.1.6" + ], + "upgradePath": [ + false, + "@nyc.dotted@8.1.0", + "glob@7.0.5", + "minimatch@3.0.2", + "brace-expansion@1.1.7" + ], + "isUpgradable": true, + "isPatchable": false, + "name": "brace-expansion", + "version": "1.1.6" + } + ] + } diff --git a/sast_controller/tests/convertors/test_checkmarx_report.py b/sast_controller/tests/convertors/test_checkmarx_report.py new file mode 100755 index 0000000..374b270 --- /dev/null +++ b/sast_controller/tests/convertors/test_checkmarx_report.py @@ -0,0 +1,177 @@ +import copy +import os +import unittest +from unittest import mock + +from sast_controller.converters import CheckmarxReport + +BUG_BAR_CSS = { + "Cross-site Scripting (XSS)": { + "is_issue": "", + "risk_rating": "High", + "jira_priority": "Major", + "burp": "", + "grouped": "", + "cxsast": "Cross-site scripting (reflected);Reflected_XSS_All_Clients;Stored_XSS", + "description": {}, + "recommendation": {} + }, +} + +BUG_BAR_SQLI = { + "SQL Injection": { + "is_issue": "", + "risk_rating": "Critical", + "jira_priority": "Major", + "burp": "SQL Injection", + "grouped": "", + "cxsast": "SQL Injection in Content Provider;SQL_Injection" + }, +} + +EXPECTED_REPORT = [{ + 'Attachments': [], + 'CVE': '', + 'CWE': '[CWE-79|https://cwe.mitre.org/data/definitions/79]', + 'Description': ' Cross-site Scripting (XSS)\n' + ' GROUP: CSharp_High_Risk\n' + ' CATEGORY: A7-Cross-Site Scripting (XSS)\n' + ' *Code*:\n' + ' ``` public IActionResult PostTranslateDjml([FromBody] ' + 'JsonApiResponse article, ```', + 'Instances': 'File code/src/MyApp.Api.Web/Controllers/ArticlesController.cs', + 'Issue Confidence': 'Certain', + 'Issue Name': 'Cross-site Scripting (XSS).code/src/MyApp.Api.Web/Controllers/ArticlesController.cs', + 'Issue Priority': 'Major', + 'Issue Severity': 'High', + 'Issue Tool': 'Checkmarx', + 'Jira Name': 'Cross-site Scripting (XSS)', + 'Links': 'https://sast.mysite.com/CxWebClient/ViewerMain.aspx?scanid=1027717&projectid=3076&pathid=11', + 'Overview': '', + 'Paths': '', + 'RP Comment': '', + 'RP Defect Type': 'To Investigate', + 'Recommendations': 'Please review and modify vulnerable code in line 553 of ArticlesController.cs', + 'References': 'Line 553 in file ' + '[code/src/MyApp.Api.Web/Controllers/ArticlesController.cs|https://sast.mysite.com/CxWebClient/' + 'ViewerMain.aspx?scanid=1027717&projectid=3076&pathid=11]', + 'Repo': 'https://github.com/myrepo', + 'Snippet': 'public IActionResult PostTranslateDjml([FromBody] ' + 'JsonApiResponse article, ' + '[FromServices] IContentTranslation contentTranslation)', + 'Steps To Reproduce': '', + 'Tags': [ + {'TestType': 'sast'}, + {'Provider': 'Reapsaw'}, + {'Tool': 'Checkmarx'}], + 'URLs': '', + 'error_string': 'Cross-site Scripting (XSS) 79\n' + 'code/src/MyApp.Api.Web/Controllers/ArticlesController.cs'}, { + 'Attachments': [], + 'CVE': '', + 'CWE': '[CWE-79|https://cwe.mitre.org/data/definitions/79]', + 'Description': ' Cross-site Scripting (XSS)\n' + ' GROUP: CSharp_High_Risk\n' + ' CATEGORY: A7-Cross-Site Scripting (XSS)\n' + ' *Code*:\n' + ' ``` public async Task ' + 'GetDjmlByOriginId(string id, [FromHeader(Name= ' + '"x-api-key")] string ```', + 'Instances': 'File code/src/MyApp.Api.Web/Controllers/ArticlesController.cs', + 'Issue Confidence': 'Certain', + 'Issue Name': 'Cross-site Scripting (XSS).code/src/MyApp.Api.Web/Controllers/ArticlesController.cs', + 'Issue Priority': 'Major', + 'Issue Severity': 'High', + 'Issue Tool': 'Checkmarx', + 'Jira Name': 'Cross-site Scripting (XSS)', + 'Links': 'https://sast.mysite.com/CxWebClient/ViewerMain.aspx?scanid=1027717&projectid=3076&pathid=12', + 'Overview': '', + 'Paths': '', + 'RP Comment': 'Oleksii C my_project, [Monday, July 29, 2019 10:36:13 AM]: ' + 'Changed status to Confirmed\r\n' + 'Oleksii C my_project, [Monday, July 29, 2019 10:35:47 AM]: ' + 'Changed status to Not Exploitable', + 'RP Defect Type': 'Product Bug', + 'Recommendations': 'Please review and modify vulnerable code in line 553 of ArticlesController.cs', + 'References': 'Line 467 in file ' + '[code/src/MyApp.Api.Web/Controllers/ArticlesController.cs|https://sast.mysite.com/CxWebClient/' + 'ViewerMain.aspx?scanid=1027717&projectid=3076&pathid=12]', + 'Repo': 'https://github.com/myrepo', + 'Snippet': 'public async Task GetDjmlByOriginId(string ' + 'id, [FromHeader(Name= "x-api-key")] string apiKey)', + 'Steps To Reproduce': '', + 'Tags': [ + {'TestType': 'sast'}, + {'Provider': 'Reapsaw'}, + {'Tool': 'Checkmarx'}], + 'URLs': '', + 'error_string': 'Cross-site Scripting (XSS) 79\n' + 'code/src/MyApp.Api.Web/Controllers/ArticlesController.cs'} +] + + +class TestCheckmarxReport(unittest.TestCase): + def setUp(self): + return_json_patcher = mock.patch('sast_controller.bug_bar.bug_bar.read_json') + self.mock_return_json = return_json_patcher.start() + self.addCleanup(return_json_patcher.stop) + self.maxDiff = None + + @mock.patch.dict(os.environ, {'REPO': 'https://github.com/myrepo'}) + @mock.patch('sast_controller.drivers.cx.Checkmarx.Checkmarx') + def test_report(self, cx_klass): + self.mock_return_json.return_value = BUG_BAR_CSS + cx_report = CheckmarxReport.CheckmarxReport( + os.path.dirname(os.path.abspath(__file__)) + '/checkmarx_report.xml') + self.assertEqual(EXPECTED_REPORT, cx_report.report) + self.assertEqual({'Checkmarx': set()}, cx_report.new_items) + + @mock.patch.dict(os.environ, {'REPO': 'https://github.com/myrepo'}) + @mock.patch('sast_controller.drivers.cx.Checkmarx.Checkmarx') + def test_report_not_in_bug_bar(self, cx_klass): + self.mock_return_json.return_value = BUG_BAR_SQLI + cx_report = CheckmarxReport.CheckmarxReport( + os.path.dirname(os.path.abspath(__file__)) + '/checkmarx_report.xml') + self.assertEqual([], cx_report.report) + self.assertEqual({'Checkmarx': {'Reflected_XSS_All_Clients'}}, cx_report.new_items) + + @mock.patch.dict(os.environ, {'REPO': 'https://github.com/myrepo'}) + @mock.patch('sast_controller.drivers.cx.Checkmarx.Checkmarx') + def test_report_not_an_issue(self, cx_klass): + new_bug_bar = copy.deepcopy(BUG_BAR_CSS) + new_bug_bar['Cross-site Scripting (XSS)']['is_issue'] = 'FALSE' + self.mock_return_json.return_value = new_bug_bar + cx_report = CheckmarxReport.CheckmarxReport( + os.path.dirname(os.path.abspath(__file__)) + '/checkmarx_report.xml') + self.assertEqual([], cx_report.report) + self.assertEqual({'Checkmarx': set()}, cx_report.new_items) + + @mock.patch.dict(os.environ, {'REPO': 'https://github.com/myrepo', 'BRANCH': 'develop'}) + @mock.patch('sast_controller.drivers.cx.Checkmarx.Checkmarx') + def test_report_git(self, cx_klass): + self.mock_return_json.return_value = BUG_BAR_CSS + cx_report = CheckmarxReport.CheckmarxReport( + os.path.dirname(os.path.abspath(__file__)) + '/checkmarx_report.xml') + expected = copy.deepcopy(EXPECTED_REPORT) + for _ in expected: + _['Instances'] = \ + 'File ' \ + 'https://github.com/myrepo/blob/develop/code/src/MyApp.Api.Web/Controllers/ArticlesController.cs' + self.assertEqual(expected, cx_report.report) + self.assertEqual({'Checkmarx': set()}, cx_report.new_items) + + @mock.patch('sast_controller.drivers.cx.Checkmarx.Checkmarx') + def test_report_bug_bar_desc_csharp(self, cx_klass): + # possible languages in CX: javascript ; csharp; java; scala + expected = "test custom description" + expected_rec = "test custom rec" + test = copy.deepcopy(BUG_BAR_CSS) + test["Cross-site Scripting (XSS)"]['description'] = {"csharp": expected} + test["Cross-site Scripting (XSS)"]['recommendation'] = {"csharp": expected_rec} + self.mock_return_json.return_value = test + cx_report = CheckmarxReport.CheckmarxReport( + os.path.dirname(os.path.abspath(__file__)) + '/checkmarx_report.xml') + + item = cx_report.report[0] + self.assertIn(expected, item['Description']) + self.assertEqual(expected_rec, item['Recommendations']) diff --git a/sast_controller/tests/convertors/test_convertor.py b/sast_controller/tests/convertors/test_convertor.py new file mode 100755 index 0000000..d4eb759 --- /dev/null +++ b/sast_controller/tests/convertors/test_convertor.py @@ -0,0 +1,122 @@ +import os +import unittest +from unittest import mock + +from sast_controller.converters import Converter +from sast_controller.converters import SnykReport + +BUG_BAR = { + "Vulnerable Software": { + "risk_rating": "High", + "jira_priority": "Major", + "burp": "", + "cxsast": "", + "dev": "", + "infra": "", + "notes": "", + "": "" + } +} + + +EXPECTED_ERR_MSG = '@nyc.dotted' +EXPECTED_INFO_MSG = ( + 'h3.*Instances:*\n' + '@nyc.dotted\n' + 'h3.*Recommendations:*\n' + '\n' + 'Upgrade `@nyc.dotted` to version 8.1.0 or higher\n' + 'h3.*Overview:*\n' + '{panel:title=Regular Expression Denial of Service (ReDoS)}*Description*: \n' + '*Vulnerable Package:* brace-expansion\n' + '*Current Version:* 1.1.6\n' + '*Vulnerable Version(s):* <1.1.7\n' + ' \n' + ' *Remediation:*\n' + 'Upgrade `brace-expansion` to version 1.1.7 or higher.\n' + '\n' + '\n' + ' Overview\n' + '[`brace-expansion`](https://www.npmjs.com/package/brace-expansion) is a ' + 'package that performs brace expansion as known from sh/bash.\n' + 'Affected versions of this package are vulnerable to Regular Expression ' + 'Denial of Service (ReDoS) attacks.\n' + '\n' + '*References*: \n' + '- [GitHub PR](https://github.com/juliangruber/brace-expansion/pull/35)\n' + '- [GitHub Issue](https://github.com/juliangruber/brace-expansion/issues/33)\n' + '- [GitHub ' + 'Commit](https://github.com/juliangruber/brace-expansion/pull/35/commits/' + 'b13381281cead487cbdbfd6a69fb097ea5e456c3)\n' + '\n' + '*Paths*: \n' + 'MyAPP>@nyc.dotted@7.1.0>glob@7.0.5>minimatch@3.0.2>brace-expansion@1.1.6\n' + '\n' + '{panel}\n' +) + +TEST_ISSUE = {'Issue Name': 'Prototype Pollution.nyc', + 'Issue Tool': 'Snyk', + 'Steps To Reproduce': '', + 'Issue Priority': 'Major', + 'Issue Severity': 'Medium', + 'Issue Confidence': 'Certain', + 'Recommendations': 'Upgrade `nyc` to version 11.7.2 or higher', + 'Paths': 'MyApp>nyc@7.1.0>istanbul-lib-instrument@1.1.0-alpha.4>babel-generator@6.11.4' + '>babel-types@6.11.1>lodash@4.13.1\n\n' + 'MyApp>nyc@7.1.0>istanbul-lib-instrument@1.1.0-alpha.4>' + 'babel-traverse@6.11.4>lodash@4.13.1\n\n' + 'MyApp>nyc@7.1.0>istanbul-lib-instrument@1.1.0-alpha.4>babel-traverse@6.11.4' + '>babel-types@6.11.1>lodash@4.13.1', + 'Description': '*Vulnerable Package:* lodash\n' + '*Current Version:* 4.13.1\n*Vulnerable Version(s):* <4.17.5\n \n *Remediation:*\n' + 'Upgrade `lodash` to version 4.17.5 or higher.\n\n\n Overview\n' + 'Affected versions of this package are vulnerable to Prototype ' + 'Pollution. \nThe utilities function allow modification of the `Object` prototype.\n\n\n ', + 'upgrades': [False, 'nyc@7.1.0', 'istanbul-lib-instrument@1.1.0', 'babel-template@6.9.0', + 'lodash@4.17.5'], 'language': 'js', 'RP Defect Type': 'No Defect'} + +EXPECTED_INFO_MESSAGE = ( + '{panel:title=Prototype Pollution}*Description*: \n' + '*Vulnerable Package:* lodash\n' + '*Current Version:* 4.13.1\n' + '*Vulnerable Version(s):* <4.17.5\n \n' + ' *Remediation:*\n' + 'Upgrade `lodash` to version 4.17.5 or higher.\n\n\n' + ' Overview\n' + 'Affected versions of this package are vulnerable to Prototype Pollution. \n' + 'The utilities function allow modification of the `Object` prototype.\n\n' + '*Paths*: \n' + 'MyApp>nyc@7.1.0>istanbul-lib-instrument@1.1.0-alpha.4' + '>babel-generator@6.11.4>babel-types@6.11.1>lodash@4.13.1\n' + 'MyApp>nyc@7.1.0>istanbul-lib-instrument@1.1.0-alpha.4' + '>babel-traverse@6.11.4>babel-types@6.11.1>lodash@4.13.1\n' + 'MyApp>nyc@7.1.0>istanbul-lib-instrument@1.1.0-alpha.4' + '>babel-traverse@6.11.4>lodash@4.13.1\n\n' + '{panel}' +) + + +class TestConverter(unittest.TestCase): + def setUp(self): + return_json_patcher = mock.patch('sast_controller.bug_bar.bug_bar.read_json') + self.mock_return_json = return_json_patcher.start() + self.addCleanup(return_json_patcher.stop) + self.maxDiff = None + + def test_get_rp_items_snyk(self): + self.mock_return_json.return_value = BUG_BAR + snyk_report_file = os.path.dirname(os.path.abspath(__file__)) + '/snyk_report.json' + models = {snyk_report_file: SnykReport.SnykReport} + converter = Converter.Converter(models) + items = converter.get_rp_items() + self.assertEqual('Medium', items[0].severity) + self.assertEqual('Major', items[0].priority) + self.assertEqual('Certain', items[0].confidence) + self.assertEqual([], items[0].attachments) + self.assertEqual({'RP Defect Type': 'No Defect', 'RP Comment': ''}, items[0].defect_type_info) + self.assertEqual(EXPECTED_ERR_MSG, items[0].msgs[0].message) + self.assertEqual(EXPECTED_INFO_MSG, items[0].msgs[1].message) + + def test_get_get_info_msg(self): + self.assertEqual(EXPECTED_INFO_MESSAGE, Converter.Converter.get_info_msg(TEST_ISSUE, '')) diff --git a/sast_controller/tests/convertors/test_snyk_report.py b/sast_controller/tests/convertors/test_snyk_report.py new file mode 100755 index 0000000..325a39a --- /dev/null +++ b/sast_controller/tests/convertors/test_snyk_report.py @@ -0,0 +1,305 @@ +import copy +import json +import os +import unittest +from unittest import mock + +from sast_controller.bug_bar import bug_bar +from sast_controller.converters import SnykReport +from json import loads + +BUG_BAR = { + "Vulnerable Software": { + "risk_rating": "High", + "jira_priority": "Major", + "burp": "", + "cxsast": "", + "dev": "", + "infra": "", + "notes": "", + "": "" + } +} + + +def get_bug_bar_mock(file_path=None, default=None): + return BUG_BAR + + +EXPECTED_REPORT = [{ + 'Attachments': [], + 'CVE': '', + 'CWE': 'CWE-400', + 'Description': '*Vulnerable Package:* brace-expansion\n' + '*Current Version:* 1.1.6\n' + '*Vulnerable Version(s):* <1.1.7\n' + ' \n' + ' *Remediation:*\n' + 'Upgrade `brace-expansion` to version 1.1.7 or higher.\n' + '\n' + '\n' + ' Overview\n' + '[`brace-expansion`](https://www.npmjs.com/package/brace-expansion) ' + 'is a package that performs brace expansion as known ' + 'from sh/bash.\n' + 'Affected versions of this package are vulnerable to ' + 'Regular Expression Denial of Service (ReDoS) attacks.\n' + '\n' + '\n' + ' ', + 'Instances': '@nyc.dotted', + 'Issue Confidence': 'Certain', + 'Issue Name': 'Regular Expression Denial of Service (ReDoS).@nyc.dotted', + 'Issue Priority': 'Major', + 'Issue Severity': 'Medium', + 'Issue Tool': 'Snyk', + 'Jira Name': 'Vulnerable Software', + 'Overview': '', + 'Paths': 'MyAPP>@nyc.dotted@7.1.0>glob@7.0.5>minimatch@3.0.2>brace-expansion@1.1.6', + 'Recommendations': 'Upgrade `@nyc.dotted` to version 8.1.0 or higher', + 'References': '\n' + '- [GitHub ' + 'PR](https://github.com/juliangruber/brace-expansion/pull/35)\n' + '- [GitHub ' + 'Issue](https://github.com/juliangruber/brace-expansion/issues/33)\n' + '- [GitHub ' + 'Commit](https://github.com/juliangruber/brace-expansion/pull/35/commits/b13381281cead487cbdbfd6a69' + 'fb097ea5e456c3)\n', + 'Repo': '', + 'Steps To Reproduce': '', + 'Tags': [{'TestType': 'sast'}, {'Provider': 'Reapsaw'}, {'Tool': 'Snyk'}], + 'URLs': '', + 'error_string': '@nyc.dotted', + 'language': 'js', + 'top_level_module': '@nyc.dotted', + 'upgrades': [False, + '@nyc.dotted@8.1.0', + 'glob@7.0.5', + 'minimatch@3.0.2', + 'brace-expansion@1.1.7'], + 'RP Defect Type': 'No Defect'}] + + +class TestSnykReport(unittest.TestCase): + def setUp(self): + original_read_json = bug_bar.read_json + bug_bar.read_json = get_bug_bar_mock + self.addCleanup(setattr, bug_bar, 'read_json', original_read_json) + self.maxDiff = None + + def test_report_upgrade_recommendation(self): + snyk_report = SnykReport.SnykReport(os.path.dirname(os.path.abspath(__file__)) + '/snyk_report.json') + self.assertEqual(EXPECTED_REPORT, snyk_report.report) + + @mock.patch('sast_controller.converters.SnykReport.SnykReport.__init__') + def test_report_reinstall_recommendation(self, report_constructor): + report_constructor.return_value = None + snyk_report = SnykReport.SnykReport('test') + with open(os.path.dirname(os.path.abspath(__file__)) + '/snyk_report.json', 'r') as report_file: + report_json = json.loads(report_file.read()) + report_json['vulnerabilities'][0]['upgradePath'][1] = report_json['vulnerabilities'][0]['from'][1] + snyk_report.report = report_json + snyk_report.new_items = dict() + snyk_report.report = snyk_report._canonify() + expected_report = copy.deepcopy(EXPECTED_REPORT) + expected_report[0]['upgrades'][1] = '@nyc.dotted@7.1.0' + expected_report[0]['Recommendations'] = ( + 'Your dependencies are out of date. Please remove your `node_modules` directory and lock file, run ' + '`npm install` and commit new lock file to your repo. Note, this will likely make a lot of changes to ' + 'lock file.') + self.assertEqual(expected_report, snyk_report.report) + + @mock.patch('sast_controller.converters.SnykReport.SnykReport.__init__') + def test_report_no_reinstall_if_not_js(self, report_constructor): + report_constructor.return_value = None + snyk_report = SnykReport.SnykReport('test') + with open(os.path.dirname(os.path.abspath(__file__)) + '/snyk_report.json', 'r') as report_file: + report_json = json.loads(report_file.read()) + report_json['vulnerabilities'][0]['upgradePath'][1] = report_json['vulnerabilities'][0]['from'][1] + report_json['vulnerabilities'][0]['language'] = 'scala' + snyk_report.report = report_json + snyk_report.new_items = dict() + snyk_report.report = snyk_report._canonify() + self.assertEqual([], snyk_report.report) + + def test_report_dotnet_severity(self): + snyk_report = SnykReport.SnykReport(os.path.dirname(os.path.abspath(__file__)) + '/snyk_dotnet.json') + self.assertEqual(snyk_report.report[0]['Issue Severity'], 'High') + self.assertEqual(1, len(snyk_report.report)) + + def test_report_dotnet_grouping(self): + with open(os.path.dirname(os.path.abspath(__file__)) + '/snyk_dotnet.json') as f: + data = loads(f.read()) + # update severity + data[0]['vulnerabilities'][1]['severity'] = 'low' + # changed upgrade path to test grouping + data[0]['vulnerabilities'][0]['from'][1] = 'test_grouping' + snyk_report = SnykReport.SnykReport(os.path.dirname(os.path.abspath(__file__)) + '/snyk_dotnet.json') + snyk_report.report = data + snyk_report.report = snyk_report._canonify() + self.assertEqual(snyk_report.report[0]['Issue Severity'], 'Medium') + self.assertEqual(2, len(snyk_report.report)) + + def test_report_example_grouping(self): + with open(os.path.dirname(os.path.abspath(__file__)) + '/snyk_report.json') as f: + data = loads(f.read()) + # update severity + + test_data = "**Example:**\r\n```js\r\nqs.parse('toString=foo', { allowPrototypes: false })```" + # added code snippet + data['vulnerabilities'][0]['description'] = '## Overview\r\n[`qs`]' + test_data + snyk_report = SnykReport.SnykReport(os.path.dirname(os.path.abspath(__file__)) + '/snyk_report.json') + snyk_report.report = data + snyk_report.report = snyk_report._canonify() + + self.assertIn('Overview\r\n[`qs`]', snyk_report.report[0]['Description']) + + self.assertNotIn('**Example:**', snyk_report.report[0]['Description']) + + @mock.patch('builtins.open') + @mock.patch('json.loads') + def test_get_exact_top_deps(self, json_loads, open_file): + json_loads.return_value = { + 'dependencies': { + 'exact_dep': '1.1.0', + 'dep_a': '~1.0.0', + 'dep_b': '^1.0.0', + 'dep_c': '>=3.3.0', + 'dep_d': '<=3.3.0' + } + } + self.assertEqual(['exact_dep@1.1.0'], SnykReport.SnykReport.get_exact_top_deps()) + + @mock.patch('sast_controller.converters.SnykReport.SnykReport.get_exact_top_deps') + def test_filter_vulnerable_paths(self, get_top_deps): + get_top_deps.return_value = ['A@1.0.0', 'B@1.0.0'] + vulnerabilities = [ + { + 'Paths': { + 'this_package_name@1.0.0>A@1.0.0>E@1.0.0>F@1.0.0' + }, + 'Recommendations': 'Re-install...' + }, + { + 'Paths': { + 'this_package_name@1.0.0>B@1.0.0>E@1.0.0>F@1.0.0' + }, + 'Recommendations': 'Re-install...' + }, + { + 'Paths': { + 'this_package_name@1.0.0>C@1.0.0>A@1.0.0>E@1.0.0>F@1.0.0', + 'this_package_name@1.0.0>C@1.0.0>G@2.2.2', + }, + 'Recommendations': 'Re-install...' + }, + { + 'Paths': { + 'this_package_name@1.0.0>H@1.0.0>B@1.0.0>E@1.0.0>F@1.0.0' + }, + 'Recommendations': 'Re-install...' + }, + { + 'Paths': { + 'this_package_name@1.0.0>I@1.0.0>B@2.0.0>E@2.0.0>F@2.0.0' + }, + 'Recommendations': 'Re-install...' + } + ] + expected_vulns = copy.deepcopy(vulnerabilities) + expected_vulns[2]['Paths'].remove('this_package_name@1.0.0>C@1.0.0>A@1.0.0>E@1.0.0>F@1.0.0') + del expected_vulns[3] + filtered_vulns = SnykReport.SnykReport.filter_vulnerable_paths(vulnerabilities) + self.assertEqual(4, len(filtered_vulns)) + self.assertEqual(expected_vulns, filtered_vulns) + + @mock.patch('sast_controller.converters.SnykReport.SnykReport.get_item') + @mock.patch('sast_controller.converters.SnykReport.SnykReport.__init__') + def test_group_vulnerabilities(self, report_constructor, get_item_mock): + report_constructor.return_value = None + snyk_report = SnykReport.SnykReport('test') + + def get_item(item): + return item + get_item_mock.side_effect = get_item + vulnerabilities = [ + { + 'from': ['thisapp', 'A@1.0.0', 'A_A@1.0.0', 'A_A_A@1.0.0'], + 'upgrades': [False, 'A@1.0.1', 'A_A@1.0.0', 'A_A_A@1.0.0'], + 'language': 'js', + 'top_level_module': 'A', + 'Recommendations': 'Update A_A_A to some version', + 'Issue Severity': 'High', + 'Paths': {'A@1.0.1>A_A@1.0.0>A_A_A@1.0.0'} + }, + { + 'from': ['thisapp', 'A@1.0.0', 'A_A@1.0.0', 'A_A_A@1.0.0'], + 'upgrades': [False, 'A@1.0.1', 'B_B@1.0.0', 'A_A_A@1.0.0'], + 'language': 'js', + 'top_level_module': 'A', + 'Recommendations': 'Update A_A_A to some version', + 'Issue Severity': 'Medium', + 'Paths': {'A@1.0.1>B_B@1.0.0>A_A_A@1.0.0'} + }, + { + 'from': ['thisapp', 'A@1.0.0', 'B_B@1.0.0', 'D_D_D@1.0.0'], + 'upgrades': [False, 'A@1.0.1', 'B_B@1.0.0', 'D_D_D@1.0.0'], + 'language': 'js', + 'top_level_module': 'A', + 'Recommendations': 'Update D_D_D to some version', + 'Issue Severity': 'High', + 'Paths': {'A@1.0.1>A_A@1.0.0>D_D_D@1.0.0'} + }, + { + 'from': ['thisapp', 'B@1.0.0', 'B_B@1.0.0', 'C_C_C@1.0.0'], + 'upgrades': [False, 'B@1.0.0', 'B_B@1.0.0', 'C_C_C@1.0.0'], + 'language': 'js', + 'top_level_module': 'B', + 'Recommendations': 'Update C_C_C to some version', + 'Issue Severity': 'Medium' + }, + { + 'from': ['thisapp', 'B@1.0.0', 'B_B@1.0.0', 'C_C_C@1.0.0'], + 'upgrades': [], + 'language': 'js', + 'top_level_module': 'B', + 'Recommendations': 'Update C_C_C to some version', + 'Issue Severity': 'Medium' + } + ] + grouped_vulns = snyk_report.group_vulnerabilities(vulnerabilities) + expected_groped_vulns = [ + { + 'from': ['thisapp', 'A@1.0.0', 'A_A@1.0.0', 'A_A_A@1.0.0'], + 'Issue Severity': 'High', + 'Paths': {'A@1.0.1>A_A@1.0.0>A_A_A@1.0.0', 'A@1.0.1>B_B@1.0.0>A_A_A@1.0.0'}, + 'RP Defect Type': 'Product Bug', + 'Recommendations': 'Update A_A_A to some version', + 'language': 'js', + 'top_level_module': 'A', + 'upgrades': [False, 'A@1.0.1', 'A_A@1.0.0', 'A_A_A@1.0.0'] + }, + { + 'from': ['thisapp', 'A@1.0.0', 'B_B@1.0.0', 'D_D_D@1.0.0'], + 'Issue Severity': 'High', + 'Paths': {'A@1.0.1>A_A@1.0.0>D_D_D@1.0.0'}, + 'RP Defect Type': 'Product Bug', + 'Recommendations': 'Update D_D_D to some version', + 'language': 'js', + 'top_level_module': 'A', + 'upgrades': [False, 'A@1.0.1', 'B_B@1.0.0', 'D_D_D@1.0.0'] + }, + { + 'from': ['thisapp', 'B@1.0.0', 'B_B@1.0.0', 'C_C_C@1.0.0'], + 'Issue Severity': 'Medium', + 'RP Defect Type': 'No Defect', + 'Recommendations': 'Update C_C_C to some version', + 'language': 'js', + 'top_level_module': 'B', + 'upgrades': [False, 'B@1.0.0', 'B_B@1.0.0', 'C_C_C@1.0.0'] + } + + + ] + self.assertEqual(3, len(grouped_vulns)) + self.assertEqual(expected_groped_vulns, grouped_vulns) diff --git a/sast_controller/tests/drivers/__init__.py b/sast_controller/tests/drivers/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/sast_controller/tests/drivers/jira_overview.html b/sast_controller/tests/drivers/jira_overview.html new file mode 100755 index 0000000..72fde08 --- /dev/null +++ b/sast_controller/tests/drivers/jira_overview.html @@ -0,0 +1,87 @@ + + + + + + + + + + +
+
+ + + + + + +
+
Failure to Preserve Web Page Structure ('Cross-site Scripting') +
+
+
+ + + +
+
+
+
+ + + + + + +
Weakness ID: 79 (Weakness Base)Status: Usable
+
+
+
Description
+
+
+

Description Summary

+
The software does not sufficiently validate, filter, escape, + and/or encode user-controllable input before it is placed in output that is used + as a web page that is served to other users. +
+

Extended Description

+
+

Cross-site scripting (XSS) vulnerabilities occur when:

+
+

1. Untrusted data enters a web application, typically from a web + request.

+

2. The web application dynamically generates a web page that contains + this untrusted data.

+

3. During page generation, the application does not prevent the data + from containing content that is executable by a web browser, such as + JavaScript, HTML tags, HTML attributes, mouse events, Flash, ActiveX, + etc.

+

4. A victim visits the generated web page through a web browser, which + contains malicious script that was injected using the untrusted + data.

+

5. Since the script comes from a web page that was sent by the web + server, the victim's web browser executes the malicious script in the + context of the web server's domain.

+

6. This effectively violates the intention of the web browser's + same-origin policy, which states that scripts in one domain should not + be able to access resources or run code in a different domain.

+
+

There are three main kinds of XSS:

+

In many cases, the attack can be launched without the victim even being + aware of it. Even with careful users, attackers frequently use a variety of + methods to encode the malicious portion of the attack, such as URL encoding + or Unicode, so the request looks less suspicious.

+
+
+
+
+
+ + + + + diff --git a/sast_controller/tests/drivers/jira_recommendation.html b/sast_controller/tests/drivers/jira_recommendation.html new file mode 100755 index 0000000..bbb2bd0 --- /dev/null +++ b/sast_controller/tests/drivers/jira_recommendation.html @@ -0,0 +1,56 @@ + + + +

+ Client_DOM_Stored_XSS

+

+ Risk

+

+ What might happen

+
+
An attacker could use legitimate access to the application to submit engineered data to the application’s database. When another user subsequently accesses this data, web pages may be rewritten and malicious scripts may be activated.
+
+
+

+ Cause

+

+ How does it happen

+
+
The application creates web pages that include data from the application’s database. The data is embedded directly in the page's HTML, causing the browser to display it as part of the web page. This data may have originated in input from another user. If the data includes HTML fragments or Javascript, these are displayed too, and the user cannot tell that this is not the intended page. The vulnerability is the result of embedding arbitrary database data without first encoding it in a format that would prevent the browser from treating it like HTML instead of plain text.
+
+
+

+ General Recommendations

+

+ How to avoid it

+
+
    +
  1. Validate all dynamic data, regardless of source. Validation should be based on a whitelist: accept only data fitting a specified structure, rather than reject bad patterns. Check for:
  2. +
      +
    • Data type
    • +
    • Size
    • +
    • Range
    • +
    • Format
    • +
    • Expected values
    • +
    +
  3. Validation is not a replacement for encoding. Fully encode all dynamic data, regardless of source, before embedding it in output. Encoding should be context-sensitive. For example:
  4. +
      +
    • HTML encoding for HTML content
    • +
    • HTML attribute encoding for data output to attribute values
    • +
    • Javascript encoding for server-generated Javascript.
    • +
    +
  5. Consider using either the ESAPI encoding library, or its built-in functions. For earlier versions of ASP.NET, consider using the AntiXSS library.
  6. +
  7. In the Content-Type HTTP response header, explicitly define character encoding (charset) for the entire page.
  8. +
  9. Set the httpOnly flag on the session cookie, to prevent XSS exploits from stealing the cookie.
  10. +
+
+
+

+ Source Code Examples

+

+ CSharp

+

+ Data obtained from the execution of an SQL command is output to a label


+for more information :http://cwe.mitre.org/data/definitions/79.html + diff --git a/sast_controller/tests/drivers/test_checkmarx.py b/sast_controller/tests/drivers/test_checkmarx.py new file mode 100755 index 0000000..9fdc9af --- /dev/null +++ b/sast_controller/tests/drivers/test_checkmarx.py @@ -0,0 +1,177 @@ +import logging +import os +import unittest +from unittest import mock + +from sast_controller.drivers.cx import Checkmarx + + +class TestCheckmarx(unittest.TestCase): + def setUp(self): + logging.disable(logging.CRITICAL) + env_vars = { + 'CX_URL': 'www.checkmarx.com', + 'OWNER': 'cx_owner', + 'PASSWORD': 'cx_password', + 'CX_WSDL_VERSION': '1' + + } + env_patcher = mock.patch.dict(os.environ, env_vars) + env_patcher.start() + self.addCleanup(env_patcher.stop) + + cx_connection_patcher = mock.patch('sast_controller.drivers.cx.Checkmarx.CheckmarxConnection') + self.cx_connection_class = cx_connection_patcher.start() + self.addCleanup(cx_connection_patcher.stop) + + @mock.patch.object(Checkmarx.Checkmarx, 'get_associated_groups') + @mock.patch.object(Checkmarx.Checkmarx, 'find_project_by_name') + def test_checkmarx(self, find_project_by_name, get_associated_groups): + sdk_client = mock.MagicMock() + web_client = mock.MagicMock() + + def get_client(client_type=None): + if client_type is None: + return sdk_client + return web_client + cx_connection = self.cx_connection_class() + cx_connection.get_client.side_effect = get_client + + cx_client = Checkmarx.Checkmarx(project='test_project') + + self.cx_connection_class.assert_called_with('www.checkmarx.com', 'cx_owner', 'cx_password') + self.assertEqual('{http://Checkmarx.com/1}', cx_client.version) + cx_connection.get_client.assert_called() + find_project_by_name.assert_called_with('test_project') + get_associated_groups.assert_called() + self.assertEqual(sdk_client, cx_client.client) + self.assertEqual(web_client, cx_client.web_portal_client) + self.assertEqual(True, cx_client.valid) + + def test_get_type(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + get_type_func = cx_client.client.get_type.return_value = mock.MagicMock() + ret = cx_client.get_type('cx_type', 'arg1', 'arg2') + cx_client.client.get_type.assert_called_with('{http://Checkmarx.com/1}cx_type') + get_type_func.assert_called_with('arg1', 'arg2') + self.assertIsNotNone(ret) + + @mock.patch.object(Checkmarx.Checkmarx, 'get_type') + def test_report_type(self, get_type): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.report_type() + get_type.assert_called_with('CxWSReportType', 'XML') + self.assertIsNotNone(ret) + + @mock.patch.object(Checkmarx.Checkmarx, 'report_type') + @mock.patch.object(Checkmarx.Checkmarx, 'get_type') + def test_report_request(self, get_type, report_type): + cx_client = Checkmarx.Checkmarx(project='test_project') + report_type_mock = mock.Mock() + report_type.return_value = report_type_mock + ret = cx_client.report_request('2') + get_type.assert_called_with('CxWSReportRequest', report_type_mock, '2') + self.assertIsNotNone(ret) + + def test_get_result_description(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_result_description('1', '2') + cx_client.web_portal_client.service.GetResultDescription.assert_called_with(cx_client.session, '1', '2') + self.assertIsNotNone(ret) + + def test_get_query_description_by_query_id(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_query_description_by_query_id('22') + cx_client.web_portal_client.service.GetQueryDescriptionByQueryId.assert_called_with(cx_client.session, '22') + self.assertIsNotNone(ret) + + def test_get_cwe_description(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_cwe_description('22') + cx_client.web_portal_client.service.GetCWEDescription.assert_called_with(cx_client.session, '22') + self.assertIsNotNone(ret) + + @mock.patch.object(Checkmarx, 'extract_zip') + @mock.patch.object(Checkmarx.Checkmarx, 'get_type') + def test_run_scan(self, get_type, extract_zip): + cx_client = Checkmarx.Checkmarx(project='test_project') + get_type_func = mock.Mock() + get_type.return_value = get_type_func + ret = cx_client.run_scan('/root/path/to/project', incremental_scan=False) + get_type_calls = [mock.call('ProjectSettings'), + mock.call('SourceCodeSettings'), + mock.call('LocalCodeContainer'), + mock.call('SourceFilterPatterns')] + get_type.assert_has_calls(get_type_calls) + cx_client.client.get_type.assert_called_with(f'{cx_client.version}CliScanArgs') + self.assertIsNotNone(ret) + + @mock.patch.object(Checkmarx.Checkmarx, 'report_request') + def test_create_scan_report(self, report_request): + cx_client = Checkmarx.Checkmarx(project='test_project') + report_request_value = mock.Mock() + report_request.return_value = report_request_value + ret = cx_client.create_scan_report('23') + cx_client.client.service.CreateScanReport.assert_called_with(cx_client.session, report_request_value) + report_request.assert_called_with('23') + self.assertIsNotNone(ret) + + def test_get_scan_report_status(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_scan_report_status('123') + cx_client.client.service.GetScanReportStatus.assert_called_with(cx_client.session, '123') + self.assertIsNotNone(ret) + + def test_get_status_of_single_run(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_status_of_single_run('122') + cx_client.client.service.GetStatusOfSingleScan.assert_called_with(cx_client.session, '122') + self.assertIsNotNone(ret) + + def test_get_project_scanned_display_data(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_project_scanned_display_data() + cx_client.client.service.GetProjectScannedDisplayData.assert_called_with(cx_client.session) + self.assertIsNotNone(ret) + + def test_get_scan_report(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_scan_report('1') + cx_client.client.service.GetScanReport.assert_called_with(cx_client.session, '1') + self.assertIsNotNone(ret) + + def test_get_associated_groups(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_associated_groups() + cx_client.client.service.GetAssociatedGroupsList.assert_called_with(cx_client.session) + self.assertIsNotNone(ret) + + def test_get_projects_display_data(self): + cx_client = Checkmarx.Checkmarx(project='test_project') + ret = cx_client.get_projects_display_data() + cx_client.client.service.GetProjectsDisplayData.assert_called_with(cx_client.session) + self.assertIsNotNone(ret) + + @mock.patch.object(Checkmarx.Checkmarx, 'get_type') + def test_set_new_project_config(self, get_type): + cx_client = Checkmarx.Checkmarx(project='test_project') + cx_client.set_new_project_config(mock.Mock(), mock.Mock()) + get_type.assert_called_with('ProjectSettings') + + @mock.patch.object(Checkmarx.Checkmarx, 'get_projects_display_data') + def test_find_project_by_name(self, get_projects_display_data): + cx_client = Checkmarx.Checkmarx(project='test_project') + + class DummyProject(): + ProjectName = 'test_project' + projectID = '42' + + dummy_project = DummyProject() + project_config = mock.Mock() + cx_client.client.service.GetProjectConfiguration.return_value = mock.Mock(ProjectConfig=project_config) + get_projects_display_data.return_value = [dummy_project] + + ret = cx_client.find_project_by_name('test_project') + get_projects_display_data.assert_called() + cx_client.client.service.GetProjectConfiguration.assert_called_with(cx_client.session, '42') + self.assertEqual(project_config, ret) diff --git a/sast_controller/tests/drivers/test_checkmarx_connection.py b/sast_controller/tests/drivers/test_checkmarx_connection.py new file mode 100755 index 0000000..e92f390 --- /dev/null +++ b/sast_controller/tests/drivers/test_checkmarx_connection.py @@ -0,0 +1,45 @@ +import unittest +from unittest import mock + +from sast_controller.drivers.cx import CheckmarxConnection + + +class TestCheckmarxConnection(unittest.TestCase): + def setUp(self): + requests_session_patcher = mock.patch('sast_controller.drivers.cx.CheckmarxConnection.Session') + self.requests_session_class = requests_session_patcher.start() + self.addCleanup(requests_session_patcher.stop) + + zeep_client_patcher = mock.patch('sast_controller.drivers.cx.CheckmarxConnection.Client') + self.zeep_client_class = zeep_client_patcher.start() + self.addCleanup(zeep_client_patcher.stop) + + zeep_transport_patcher = mock.patch('sast_controller.drivers.cx.CheckmarxConnection.Transport') + self.zeep_transport_class = zeep_transport_patcher.start() + self.addCleanup(zeep_transport_patcher.stop) + + def test_checkmarx_connection(self): + CheckmarxConnection.CheckmarxConnection('hostname', 'username', 'password') + self.requests_session_class.assert_called() + self.zeep_transport_class.assert_called_with(session=self.requests_session_class()) + self.zeep_client_class.assert_called_with('hostname/cxwebinterface/cxwsresolver.asmx?wsdl', + transport=self.zeep_transport_class()) + + def test_client_url(self): + cx_conn = CheckmarxConnection.CheckmarxConnection('hostname', 'username', 'password') + cx_conn._resolver_client.service.GetWebServiceUrl('SDK', 1).ServiceURL = 'service_url' + cx_conn._resolver_client.service.GetWebServiceUrl.assert_called_with('SDK', 1) + self.assertEqual('service_url', cx_conn.get_client_url()) + + cx_conn._resolver_client.service.GetWebServiceUrl('SDK_2', 1).ServiceURL = 'service_url_2' + cx_conn._resolver_client.service.GetWebServiceUrl.assert_called_with('SDK_2', 1) + self.assertEqual('service_url_2', cx_conn.get_client_url()) + + def test_get_client(self): + cx_conn = CheckmarxConnection.CheckmarxConnection('hostname', 'username', 'password') + cx_conn._resolver_client.service.GetWebServiceUrl('SDK', 1).ServiceURL = 'service_url' + client = cx_conn.get_client() + self.zeep_client_class.assert_called_with('service_url?wsdl', transport=cx_conn.transport, strict=False) + zeep_client = self.zeep_client_class() + zeep_client.service.Login.assert_called_with({'User': 'username', 'Pass': 'password'}, 1033) + self.assertEqual(client, cx_conn.clients['SDK']) diff --git a/sast_controller/tests/drivers/test_jira_client.py b/sast_controller/tests/drivers/test_jira_client.py new file mode 100755 index 0000000..fd29277 --- /dev/null +++ b/sast_controller/tests/drivers/test_jira_client.py @@ -0,0 +1,55 @@ +import unittest +from unittest import mock +from unittest.mock import MagicMock + +from sast_controller.bin.json_to_jira import send_to_jira + +JSON_ITEMS = [ + { + "Issue Name": "Secrets Exposed.src/builder.js", + "Issue Priority": "Major", + "Issue Severity": "High", + "Description": "While adding general comments is very useful, some programmers tend to leave important data, " + "such as: filenames related to the web application, old links or links which were not meant to " + "be browsed by users, old code fragments, etc.", + "Tags": [ + { + "TestType": "sast" + }, + { + "Tool": "Checkmarx" + } + ], + "Jira Name": "Secrets Exposed", + "Jira Description": "While adding general comments is very useful, some programmers tend to leave important " + "data, such as: filenames related to the web application, old links or links which were " + "not meant to be browsed by users, old code fragments, etc.", + } +] + +EXPECTED_ISSUE_DATA = { + 'project': {'key': 'DBG'}, + 'summary': 'Secrets Exposed', + 'description': 'While adding general comments is very useful, some programmers tend to leave important ' + 'data, such as: filenames related to the web application, old links or links which were ' + 'not meant to be browsed by users, old code fragments, etc.', + 'issuetype': {'name': 'Vulnerability'}, + 'assignee': {'name': 'testuser'}, + 'priority': {'name': 'Critical'}, + 'labels': ['sast', 'Checkmarx'] +} + + +class TestJiraReport(unittest.TestCase): + @mock.patch('sast_controller.drivers.jira.baseClient.JiraBaseClient.connect') + def test_create_issue(self, jira_client): + with mock.patch('jira.JIRA') as MockJIRA: + instance = MockJIRA.return_value + instance.create_issue.return_value = MagicMock(key='DBG-1') + instance.check_project.return_value = True + jira_client.return_value = MockJIRA() + + send_to_jira('DBG', 'testuser', 'Vulnerability', json_items=JSON_ITEMS) + + instance.search_issues.assert_called_with('project = DBG') + instance.create_issue.assert_called_with(fields=EXPECTED_ISSUE_DATA) diff --git a/sast_controller/tests/drivers/test_utils.py b/sast_controller/tests/drivers/test_utils.py new file mode 100755 index 0000000..cd8f3b9 --- /dev/null +++ b/sast_controller/tests/drivers/test_utils.py @@ -0,0 +1,48 @@ +import os +import unittest +from unittest import mock + +from sast_controller.drivers.cx import utils + +JIRA_RECOMMENDATION = '# Validate all dynamic data, regardless of source. Validation should be based on a whitelist: ' \ + 'accept only data fitting a specified structure, rather than reject bad patterns. ' \ + 'Check for:\n#* Data type\n#* Size\n#* Range\n#* Format\n#* Expected values\n# Validation is ' \ + 'not a replacement for encoding. Fully encode all dynamic data, regardless of source, before ' \ + 'embedding it in output. Encoding should be context-sensitive. For example:\n#* HTML encoding ' \ + 'for HTML content\n#* HTML attribute encoding for data output to attribute values\n#* ' \ + 'Javascript encoding for server-generated Javascript.\n# Consider using either the ESAPI ' \ + 'encoding library, or its built-in functions. For earlier versions of ASP.NET, consider using ' \ + 'the AntiXSS library.\n# In the Content-Type HTTP response header, explicitly define character ' \ + 'encoding (charset) for the entire page.\n# Set the httpOnly flag on the session cookie, to ' \ + 'prevent XSS exploits from stealing the cookie.\n\n' + + +JIRA_OVERVIEW = 'The software does not sufficiently validate, filter, escape, and/or encode user-controllable input ' \ + 'before it is placed in output that is used as a web page that is served to other users.' + + +class TestUtils(unittest.TestCase): + + def test_get_jira_recommendations(self): + cx_client = mock.MagicMock() + file_name = os.path.dirname(os.path.abspath(__file__)) + '/jira_recommendation.html' + with open(file_name, 'r') as recommendation_file: + recommendation = recommendation_file.read() + cx_client.get_query_description_by_query_id.return_value = recommendation + jira_recommendation = utils.get_jira_recommendations(cx_client, '123') + self.assertEqual(JIRA_RECOMMENDATION, jira_recommendation) + + def test_get_jira_overview(self): + cx_client = mock.MagicMock() + file_name = os.path.dirname(os.path.abspath(__file__)) + '/jira_overview.html' + with open(file_name, 'r') as overview_file: + overview = overview_file.read() + cx_client.get_cwe_description.return_value = overview + jira_overview = utils.get_jira_overview(cx_client, '123') + self.assertEqual(JIRA_OVERVIEW, jira_overview) + + def test_get_jira_tag(self): + self.assertEqual('', utils.get_jira_tag('script')) + self.assertEqual('#', utils.get_jira_tag('ol')) + self.assertEqual('*', utils.get_jira_tag('ul')) + self.assertEqual('', utils.get_jira_tag('li')) diff --git a/setup.py b/setup.py new file mode 100755 index 0000000..08cf5bc --- /dev/null +++ b/setup.py @@ -0,0 +1,42 @@ +# Copyright (c) 2018 Dow Jones & Company, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from setuptools import setup, find_packages + +setup( + name='DowJones SAST Container Core', + version='1.0.0', + description='Core component', + long_description='', + url='https://www.dowjons.com', + license='Apache License 2.0', + author='DowJones', + author_email='artem.rozumenko@dowjones.com', + packages=find_packages(), + install_requires=['bandit==1.5.1', 'junit-xml==1.8', 'requests==2.21.0', 'zeep==2.5.0', 'PyYAML==3.12', + 'bs4==0.0.1', 'slackclient==1.2.1', 'jira==1.0.15', 'sonarqube-api==1.3.1', + 'configparser==3.5.0', 'PyJWT==1.6.4', 'cryptography==2.2.2', + 'xmltodict==0.11.0', 'junit2html==21', 'markdownify==0.4.1', 'node-semver==0.6.1'], + include_package_data=True, + entry_points={ + 'console_scripts': [ + 'bugbar_to_json = sast_controller.bug_bar.bugbar_to_json:main', + 'create_jira_tickets = sast_controller.bin.create_jira_tickets:main', + 'generate_reports = sast_controller.bin.generate_reports:main', + 'notifications = sast_controller.bin.notifications:main', + 'scan = sast_controller.bin.scan:main', + 'push_to_jira = sast_controller.bin.json_to_jira:main', + ] + }, +) diff --git a/tox.ini b/tox.ini new file mode 100755 index 0000000..23ffa37 --- /dev/null +++ b/tox.ini @@ -0,0 +1,27 @@ +[tox] +envlist = py36 +skipsdist = true + +[testenv] +deps = flake8 +commands = flake8 sast_controller/ + +[testenv:install] +basepython = python3.6 +commands = python setup.py install + +[testenv:test] +basepython = python3.6 +deps = flake8 + -r{toxinidir}/requirements.txt +commands = + flake8 sast_controller/ + python -m unittest discover sast_controller/tests/ + +[flake8] +max-line-length = 120 +show-source = true +# W605 invalid escape sequence, should be investigated and removed +# W503 Line break occurred before a binary operator, this seems to be deprecated and contradicts W504 +ignore = W605, W503 +max-complexity=25