Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add P&E database credentials, update dependencies, upgrade aws terraform provider #1428

Merged
merged 30 commits into from
Apr 6, 2022
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
7aea62a
Add Pe db credentials to stage.tfvars worker and vars.tf
aloftus23 Mar 17, 2022
783c5de
Merge branch 'master' into add-pe-db-creds
aloftus23 Mar 17, 2022
d03af92
Add sixgill logging
aloftus23 Mar 17, 2022
29c9083
Add await to writefile in hibp and domMasq sync
aloftus23 Mar 17, 2022
e8f3069
Fix pe-scripts writefile and env passing
aloftus23 Mar 17, 2022
c2ac5f1
Merge branch 'master' into add-pe-db-creds
aloftus23 Mar 17, 2022
9fe1f4c
update mitmproxy to 8.0.0
aloftus23 Mar 31, 2022
c527a19
Update cryptography to support mitmproxy 8.0.0
aloftus23 Mar 31, 2022
68e7f62
Add cryptography dep: cargo
aloftus23 Mar 31, 2022
8b44b2f
Create directories to store temporary json data for PE
aloftus23 Mar 31, 2022
12c6dd2
Fix PE environment variables
aloftus23 Mar 31, 2022
407f6fb
Directly pass env vars for PE scripts
aloftus23 Mar 31, 2022
56d908a
Simplify env passing
aloftus23 Mar 31, 2022
e64dcd3
Increase hibp and dnstwist memory
aloftus23 Mar 31, 2022
96214e0
Clean up env var passing and logging
aloftus23 Apr 1, 2022
52a6af3
Add pe db creds to worker.tf
aloftus23 Apr 4, 2022
9e7b05c
Update main.tf for Terraform AWS Provider version 4.0.0
aloftus23 Apr 4, 2022
d5de54d
Update main.tf to handle terraform aws provider 4.0
aloftus23 Apr 4, 2022
8619c81
fix typo
aloftus23 Apr 4, 2022
f6a4801
mitmproxy update and added pe creds
aloftus23 Apr 4, 2022
c7ecae3
hibpSyncFiles folder
aloftus23 Apr 4, 2022
cc9196a
make version 4 (tf v4) to worker and frontend
aloftus23 Apr 4, 2022
2625318
Run terraform formatting
aloftus23 Apr 4, 2022
61313a3
Use aws_iam_role_policy_attachment instead of aws_iam_policy_attachment
aloftus23 Apr 4, 2022
6c02a3b
Update to Terraform AWS Provider v4
aloftus23 Apr 5, 2022
01b721a
remove source from aws provider
aloftus23 Apr 5, 2022
c6a3225
remove alias setting from aws provider
aloftus23 Apr 5, 2022
64b8feb
use for_each to attach multiple policies to accessor instance
aloftus23 Apr 5, 2022
3df713d
Revert back to old policy config
aloftus23 Apr 5, 2022
e537722
Fix prod output failure and remove prevent_destroy
aloftus23 Apr 6, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion backend/Dockerfile.worker
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ FROM node:14-alpine3.14

WORKDIR /app

RUN apk add --update --no-cache wget build-base curl git unzip openssl-dev linux-headers python3=3.9.5-r2 python3-dev py3-pip ruby=2.7.5-r0 ruby-dev zlib-dev libffi-dev libxml2-dev libxslt-dev postgresql-dev gcc musl-dev py3-pandas py3-scikit-learn
RUN apk add --update --no-cache wget build-base curl git unzip openssl-dev linux-headers python3=3.9.5-r2 python3-dev py3-pip ruby=2.7.5-r0 ruby-dev zlib-dev libffi-dev libxml2-dev libxslt-dev postgresql-dev gcc musl-dev py3-pandas py3-scikit-learn cargo

RUN npm install -g pm2@4 wait-port@0.2.9

Expand Down
9 changes: 9 additions & 0 deletions backend/env.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ staging:
PE_DB_NAME: ${ssm:/crossfeed/staging/PE_DB_NAME~true}
PE_DB_USERNAME: ${ssm:/crossfeed/staging/PE_DB_USERNAME~true}
PE_DB_PASSWORD: ${ssm:/crossfeed/staging/PE_DB_PASSWORD~true}
SIXGILL_CLIENT_ID: ${ssm:/crossfeed/staging/SIXGILL_CLIENT_ID~true}
SIXGILL_CLIENT_SECRET: ${ssm:/crossfeed/staging/SIXGILL_CLIENT_SECRET~true}
PE_SHODAN_API_KEYS: ${ssm:/crossfeed/staging/PE_SHODAN_API_KEYS~true}
JWT_SECRET: ${ssm:/crossfeed/staging/APP_JWT_SECRET~true}
LOGIN_GOV_REDIRECT_URI: ${ssm:/crossfeed/staging/LOGIN_GOV_REDIRECT_URI~true}
LOGIN_GOV_BASE_URL: ${ssm:/crossfeed/staging/LOGIN_GOV_BASE_URL~true}
Expand Down Expand Up @@ -41,6 +44,12 @@ prod:
DB_NAME: ${ssm:/crossfeed/prod/DATABASE_NAME~true}
DB_USERNAME: ${ssm:/crossfeed/prod/DATABASE_USER~true}
DB_PASSWORD: ${ssm:/crossfeed/prod/DATABASE_PASSWORD~true}
PE_DB_NAME: ${ssm:/crossfeed/prod/PE_DB_NAME~true}
PE_DB_USERNAME: ${ssm:/crossfeed/prod/PE_DB_USERNAME~true}
PE_DB_PASSWORD: ${ssm:/crossfeed/prod/PE_DB_PASSWORD~true}
SIXGILL_CLIENT_ID: ${ssm:/crossfeed/prod/SIXGILL_CLIENT_ID~true}
SIXGILL_CLIENT_SECRET: ${ssm:/crossfeed/prod/SIXGILL_CLIENT_SECRET~true}
PE_SHODAN_API_KEYS: ${ssm:/crossfeed/staging/PE_SHODAN_API_KEYS~true}
JWT_SECRET: ${ssm:/crossfeed/prod/APP_JWT_SECRET~true}
LOGIN_GOV_REDIRECT_URI: ${ssm:/crossfeed/prod/LOGIN_GOV_REDIRECT_URI~true}
LOGIN_GOV_BASE_URL: ${ssm:/crossfeed/prod/LOGIN_GOV_BASE_URL~true}
Expand Down
4 changes: 4 additions & 0 deletions backend/src/api/scans.ts
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,8 @@ export const SCAN_SCHEMA: ScanSchema = {
type: 'fargate',
isPassive: true,
global: false,
cpu: '2048',
memory: '16384',
description:
'Finds emails that have appeared in breaches related to a given domain'
},
Expand All @@ -182,6 +184,8 @@ export const SCAN_SCHEMA: ScanSchema = {
type: 'fargate',
isPassive: true,
global: false,
cpu: '2048',
memory: '16384',
description:
'Domain name permutation engine for detecting similar registered domains.'
},
Expand Down
5 changes: 3 additions & 2 deletions backend/src/tasks/peDomMasq.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { CommandOptions } from './ecs-client';
import { spawnSync } from 'child_process';
import { connectToDatabase, Vulnerability } from '../models';
import * as path from 'path';
import { writeFileSync } from 'fs';
import { promises as fs } from 'fs';
import { getPeEnv } from './helpers/getPeEnv';

const DOM_MASQ_DIRECTORY = '/app/worker/pe_scripts/peDomMasq';
Expand All @@ -21,7 +21,8 @@ export const handler = async (commandOptions: CommandOptions) => {
.andWhere("vulnerability.source = 'dnstwist'")
.getRawMany();
const input_path = path.join(DOM_MASQ_DIRECTORY, organizationId + '.json');
writeFileSync(input_path, JSON.stringify(data));

await fs.writeFile(input_path, JSON.stringify(data));

const child = spawnSync(
'python3',
Expand Down
1 change: 1 addition & 0 deletions backend/worker/pe_scripts/hibpSyncFiles/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
This folder stores temporary json data when passing data to sync_hibp_pe.py.
1 change: 1 addition & 0 deletions backend/worker/pe_scripts/peDomMasq/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
This folder stores temporary json data when passing data to sync_dnstwist_pe.py.
4 changes: 1 addition & 3 deletions backend/worker/pe_scripts/sixgill/run_cybersixgill.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@
back = timedelta(days=16)
from_date = (to_date - back).strftime("%Y-%m-%d %H:%M:%S")
to_date = to_date.strftime("%Y-%m-%d %H:%M:%S")
print(to_date)
print(from_date)


def cve(cveid):
Expand Down Expand Up @@ -109,7 +107,7 @@ def getDataSource(conn, source):
print("Failed fetching Cybersixgill aliases.")
print(traceback.format_exc())

"""Insert/Update Aliases into PE databse instance"""
"""Insert/Update Aliases into PE database instance"""
try:
# aliases_list = json.loads(aliases.replace("'", '"'))
alias_df = pd.DataFrame(aliases, columns=["alias"])
Expand Down
4 changes: 2 additions & 2 deletions backend/worker/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
requests-http-signature==0.2.0
requests==2.24.0
mitmproxy==7.0.3
cryptography==3.3.2
mitmproxy==8.0.0
cryptography==36.0.2
pytest==6.0.1
scrapy==2.6.1
dnstwist==20201228
Expand Down
6 changes: 3 additions & 3 deletions docs/src/documentation-pages/dev/pe.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ The local database will contain the entire schema. The only table with any data
Before deploying. Generate a secure secret value for a database password, then run the following commands on the terraformer instance:

```
aws ssm put-parameter --name "/crossfeed/staging/PE_DATABASE_NAME" --value "pe" --type "SecureString"
aws ssm put-parameter --name "/crossfeed/staging/PE_DATABASE_USER" --value "pe" --type "SecureString"
aws ssm put-parameter --name "/crossfeed/staging/PE_DATABASE_PASSWORD" --value "[generated secret password]" --type "SecureString"
aws ssm put-parameter --name "/crossfeed/staging/PE_DB_NAME" --value "pe" --type "SecureString"
aws ssm put-parameter --name "/crossfeed/staging/PE_DB_USER" --value "pe" --type "SecureString"
aws ssm put-parameter --name "/crossfeed/staging/PE_DB_PASSWORD" --value "[generated secret password]" --type "SecureString"
```

### Sync DB
Expand Down
41 changes: 24 additions & 17 deletions infrastructure/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -26,28 +26,35 @@ resource "aws_ssm_parameter" "stage_api_domain" {

resource "aws_s3_bucket" "logging_bucket" {
bucket = var.logging_bucket_name
acl = "private"
tags = {
Project = var.project
Stage = var.stage
}
}

server_side_encryption_configuration {
rule {
resource "aws_s3_bucket_acl" "logging_bucket" {
bucket = aws_s3_bucket.logging_bucket.id
acl = "private"
}
resource "aws_s3_bucket_server_side_encryption_configuration" "logging_bucket" {
bucket = aws_s3_bucket.logging_bucket.id
rule {
apply_server_side_encryption_by_default {
sse_algorithm = "AES256"
}
sse_algorithm = "AES256"
}
}
}

versioning {
enabled = true
mfa_delete = false
}

logging {
target_bucket = var.logging_bucket_name
target_prefix = "logging_bucket/"
resource "aws_s3_bucket_versioning" "logging_bucket" {
bucket = aws_s3_bucket.logging_bucket.id
versioning_configuration {
status = "Enabled"
}
}

tags = {
Project = var.project
Stage = var.stage
}
resource "aws_s3_bucket_logging" "logging_bucket" {
target_bucket = aws_s3_bucket.logging_bucket.id
target_prefix = "logging_bucket/"
}


3 changes: 3 additions & 0 deletions infrastructure/prod.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ ssm_db_name = "/crossfeed/prod/DATABASE_NAME"
ssm_db_host = "/crossfeed/prod/DATABASE_HOST"
ssm_db_username = "/crossfeed/prod/DATABASE_USER"
ssm_db_password = "/crossfeed/prod/DATABASE_PASSWORD"
ssm_pe_db_name = "/crossfeed/prod/PE_DB_NAME"
ssm_pe_db_username = "/crossfeed/prod/PE_DB_USERNAME"
ssm_pe_db_password = "/crossfeed/prod/PE_DB_PASSWORD"
ssm_matomo_db_password = "/crossfeed/prod/MATOMO_DATABASE_PASSWORD"
ssm_worker_signature_public_key = "/crossfeed/prod/WORKER_SIGNATURE_PUBLIC_KEY"
ssm_worker_signature_private_key = "/crossfeed/prod/WORKER_SIGNATURE_PRIVATE_KEY"
Expand Down
3 changes: 3 additions & 0 deletions infrastructure/stage.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ ssm_db_name = "/crossfeed/staging/DATABASE_NAME"
ssm_db_host = "/crossfeed/staging/DATABASE_HOST"
ssm_db_username = "/crossfeed/staging/DATABASE_USER"
ssm_db_password = "/crossfeed/staging/DATABASE_PASSWORD"
ssm_pe_db_name = "/crossfeed/staging/PE_DB_NAME"
ssm_pe_db_username = "/crossfeed/staging/PE_DB_USERNAME"
ssm_pe_db_password = "/crossfeed/staging/PE_DB_PASSWORD"
ssm_matomo_db_password = "/crossfeed/staging/MATOMO_DATABASE_PASSWORD"
ssm_worker_signature_public_key = "/crossfeed/staging/WORKER_SIGNATURE_PUBLIC_KEY"
ssm_worker_signature_private_key = "/crossfeed/staging/WORKER_SIGNATURE_PRIVATE_KEY"
Expand Down
14 changes: 14 additions & 0 deletions infrastructure/vars.tf
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,20 @@ variable "ssm_db_host" {
default = "/crossfeed/staging/DATABASE_HOST"
}

variable "ssm_pe_db_name" {
type = string
default = "/crossfeed/staging/PE_DB_NAME"
}

variable "ssm_pe_db_username" {
type = string
default = "/crossfeed/staging/PE_DB_USERNAME"
}

variable "ssm_pe_db_password" {
type = string
default = "/crossfeed/staging/PE_DB_PASSWORD"
}
variable "ssm_lambda_sg" {
type = string
default = "/crossfeed/staging/SG_ID"
Expand Down
21 changes: 21 additions & 0 deletions infrastructure/worker.tf
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,9 @@ resource "aws_iam_role_policy" "worker_task_execution_role_policy" {
"${aws_ssm_parameter.crossfeed_send_db_name.arn}",
"${data.aws_ssm_parameter.db_username.arn}",
"${data.aws_ssm_parameter.db_password.arn}",
"${data.aws_ssm_parameter.pe_db_name.arn}",
"${data.aws_ssm_parameter.pe_db_username.arn}",
"${data.aws_ssm_parameter.pe_db_password.arn}",
"${data.aws_ssm_parameter.worker_signature_public_key.arn}",
"${data.aws_ssm_parameter.worker_signature_private_key.arn}",
"${data.aws_ssm_parameter.censys_api_id.arn}",
Expand Down Expand Up @@ -216,6 +219,18 @@ resource "aws_ecs_task_definition" "worker" {
"name": "DB_PASSWORD",
"valueFrom": "${data.aws_ssm_parameter.db_password.arn}"
},
{
"name": "PE_DB_NAME",
"valueFrom": "${data.aws_ssm_parameter.pe_db_name.arn}"
},
{
"name": "PE_DB_USERNAME",
"valueFrom": "${data.aws_ssm_parameter.pe_db_username.arn}"
},
{
"name": "PE_DB_PASSWORD",
"valueFrom": "${data.aws_ssm_parameter.pe_db_password.arn}"
},
{
"name": "CENSYS_API_ID",
"valueFrom": "${data.aws_ssm_parameter.censys_api_id.arn}"
Expand Down Expand Up @@ -308,6 +323,12 @@ data "aws_ssm_parameter" "sixgill_client_id" { name = var.ssm_sixgill_client_id

data "aws_ssm_parameter" "sixgill_client_secret" { name = var.ssm_sixgill_client_secret }

data "aws_ssm_parameter" "pe_db_name" { name = var.ssm_pe_db_name }

data "aws_ssm_parameter" "pe_db_username" { name = var.ssm_pe_db_username }

data "aws_ssm_parameter" "pe_db_password" { name = var.ssm_pe_db_password }

data "aws_ssm_parameter" "lg_api_key" { name = var.ssm_lg_api_key }

data "aws_ssm_parameter" "lg_workspace_name" { name = var.ssm_lg_workspace_name }
Expand Down