diff --git a/backend/env.yml b/backend/env.yml index 220fce06..f188883c 100644 --- a/backend/env.yml +++ b/backend/env.yml @@ -54,11 +54,7 @@ staging: STAGE: staging PE_FARGATE_CLUSTER_NAME: pe-staging-worker PE_FARGATE_TASK_DEFINITION_NAME: pe-staging-worker - SHODAN_QUEUE_URL: ${ssm:/crossfeed/staging/SHODAN_QUEUE_URL} - DNSTWIST_QUEUE_URL: ${ssm:/crossfeed/staging/DNSTWIST_QUEUE_URL} - HIBP_QUEUE_URL: ${ssm:/crossfeed/staging/HIBP_QUEUE_URL} - INTELX_QUEUE_URL: ${ssm:/crossfeed/staging/INTELX_QUEUE_URL} - CYBERSIXGILL_QUEUE_URL: ${ssm:/crossfeed/staging/CYBERSIXGILL_QUEUE_URL} + QUEUE_URL: ${ssm:/crossfeed/staging/QUEUE_URL} EMAIL_BUCKET_NAME: cisa-crossfeed-staging-html-email prod: @@ -105,11 +101,7 @@ prod: STAGE: prod PE_FARGATE_CLUSTER_NAME: pe-prod-worker PE_FARGATE_TASK_DEFINITION_NAME: pe-prod-worker - SHODAN_QUEUE_URL: ${ssm:/crossfeed/prod/SHODAN_QUEUE_URL} - DNSTWIST_QUEUE_URL: ${ssm:/crossfeed/prod/DNSTWIST_QUEUE_URL} - HIBP_QUEUE_URL: ${ssm:/crossfeed/prod/HIBP_QUEUE_URL} - INTELX_QUEUE_URL: ${ssm:/crossfeed/prod/INTELX_QUEUE_URL} - CYBERSIXGILL_QUEUE_URL: ${ssm:/crossfeed/prod/CYBERSIXGILL_QUEUE_URL} + QUEUE_URL: ${ssm:/crossfeed/staging/QUEUE_URL} EMAIL_BUCKET_NAME: cisa-crossfeed-staging-html-email dev-vpc: diff --git a/backend/package.json b/backend/package.json index bc563f50..5335364b 100644 --- a/backend/package.json +++ b/backend/package.json @@ -112,11 +112,10 @@ "lint:fix": "eslint '**/*.{ts,tsx,js,jsx}' --fix", "pesyncdb": "docker-compose exec -T backend npx ts-node src/tools/run-pesyncdb.ts", "scan-exec": "docker-compose exec -T backend npx ts-node src/tools/run-scanExecution.ts", - "send-message": "node sendMessage.js", "syncdb": "docker-compose exec -T backend npx ts-node src/tools/run-syncdb.ts", "syncmdl": "docker-compose exec -T backend npx ts-node src/tools/run-syncmdl.ts", "test": "jest --detectOpenHandles", "test-python": "pytest" }, "version": "1.0.0" -} +} \ No newline at end of file diff --git a/backend/sendMessage.js b/backend/sendMessage.js deleted file mode 100644 index e868adb8..00000000 --- a/backend/sendMessage.js +++ /dev/null @@ -1,32 +0,0 @@ -// sendMessage.js -const amqp = require('amqplib'); - -async function sendMessageToQueue(message, queue) { - const connection = await amqp.connect('amqp://localhost'); - const channel = await connection.createChannel(); - - await channel.assertQueue(queue, { durable: true }); - - // Simulate sending a message to the queue - channel.sendToQueue(queue, Buffer.from(JSON.stringify(message)), { - persistent: true - }); - - console.log('Message sent:', message); - - setTimeout(() => { - connection.close(); - }, 500); -} - -// Simulate sending a message -const message = { - scriptType: 'dnstwist', - org: 'DHS' -}; -const queue = 'dnstwistQueue'; -sendMessageToQueue(message, queue); -sendMessageToQueue(message, queue); -sendMessageToQueue(message, queue); -sendMessageToQueue(message, queue); -sendMessageToQueue(message, queue); diff --git a/backend/serverless.yml b/backend/serverless.yml index caf161d3..9c6fa74d 100644 --- a/backend/serverless.yml +++ b/backend/serverless.yml @@ -112,35 +112,35 @@ resources: Type: AWS::SQS::Queue Properties: QueueName: ${self:provider.stage}-shodan-queue - VisibilityTimeout: 300 + VisibilityTimeout: 18000 # 5 hours MaximumMessageSize: 262144 # 256 KB MessageRetentionPeriod: 604800 # 7 days DnstwistQueue: Type: AWS::SQS::Queue Properties: QueueName: ${self:provider.stage}-dnstwist-queue - VisibilityTimeout: 300 + VisibilityTimeout: 18000 # 5 hours MaximumMessageSize: 262144 # 256 KB MessageRetentionPeriod: 604800 # 7 days HibpQueue: Type: AWS::SQS::Queue Properties: QueueName: ${self:provider.stage}-hibp-queue - VisibilityTimeout: 300 + VisibilityTimeout: 18000 # 5 hours MaximumMessageSize: 262144 # 256 KB MessageRetentionPeriod: 604800 # 7 days IntelxQueue: Type: AWS::SQS::Queue Properties: QueueName: ${self:provider.stage}-intelx-queue - VisibilityTimeout: 300 + VisibilityTimeout: 18000 # 5 hours MaximumMessageSize: 262144 # 256 KB MessageRetentionPeriod: 604800 # 7 days CybersixgillQueue: Type: AWS::SQS::Queue Properties: QueueName: ${self:provider.stage}-cybersixgill-queue - VisibilityTimeout: 300 + VisibilityTimeout: 18000 # 5 hours MaximumMessageSize: 262144 # 256 KB MessageRetentionPeriod: 604800 # 7 days diff --git a/backend/src/tasks/pesyncdb.ts b/backend/src/tasks/pesyncdb.ts index aa22c078..a363d505 100644 --- a/backend/src/tasks/pesyncdb.ts +++ b/backend/src/tasks/pesyncdb.ts @@ -6576,6 +6576,9 @@ VALUES ('WhoisXML', 'DNS lookpus', '2022-03-14'); INSERT INTO public.data_source(name, description, last_run) VALUES ('findomain', 'Domain enumerator', '2022-03-14'); +INSERT INTO public.data_source(name, description, last_run) +VALUES ('IntelX', 'Credentials', '2022-03-14'); + INSERT INTO public.data_source(name, description, last_run) VALUES ('Sublist3r', 'Domain Permutations', '2022-03-14'); diff --git a/backend/src/tasks/scanExecution.ts b/backend/src/tasks/scanExecution.ts index 3bbfa35e..63d39cbe 100644 --- a/backend/src/tasks/scanExecution.ts +++ b/backend/src/tasks/scanExecution.ts @@ -4,6 +4,8 @@ import { integer } from 'aws-sdk/clients/cloudfront'; const ecs = new AWS.ECS(); let docker: any; +const QUEUE_URL = process.env.QUEUE_URL!; +const SCAN_LIST = ['dnstwist', 'hibp', 'intelx', 'cybersixgill', 'shodan']; if (process.env.IS_LOCAL) { const Docker = require('dockerode'); @@ -15,19 +17,33 @@ const toSnakeCase = (input) => input.replace(/ /g, '-'); async function startDesiredTasks( scanType: string, desiredCount: integer, - queueUrl: string + shodanApiKeyList: string[] = [] ) { + const queueUrl = QUEUE_URL + `${scanType}-queue`; try { // ECS can only start 10 tasks at a time. Split up into batches - const batchSize = 10; + let batchSize = 10; + if (scanType == 'shodan') { + batchSize = 1; + } let remainingCount = desiredCount; while (remainingCount > 0) { + let shodan_api_key = ''; + if (shodanApiKeyList.length > 0) { + shodan_api_key = shodanApiKeyList[remainingCount - 1]; + } const currentBatchCount = Math.min(remainingCount, batchSize); if (process.env.IS_LOCAL) { // If running locally, use RabbitMQ and Docker instead of SQS and ECS console.log('Starting local containers'); - await startLocalContainers(currentBatchCount, scanType, queueUrl); + console.log(queueUrl); + await startLocalContainers( + currentBatchCount, + scanType, + queueUrl, + shodan_api_key + ); } else { await ecs .runTask({ @@ -55,6 +71,10 @@ async function startDesiredTasks( { name: 'SERVICE_QUEUE_URL', value: queueUrl + }, + { + name: 'PE_SHODAN_API_KEYS', + value: shodan_api_key } ] } @@ -75,7 +95,8 @@ async function startDesiredTasks( async function startLocalContainers( count: number, scanType: string, - queueUrl: string + queueUrl: string, + shodan_api_key: string = '' ) { // Start 'count' number of local Docker containers for (let i = 0; i < count; i++) { @@ -117,7 +138,7 @@ async function startLocalContainers( `SIXGILL_CLIENT_ID=${process.env.SIXGILL_CLIENT_ID}`, `SIXGILL_CLIENT_SECRET=${process.env.SIXGILL_CLIENT_SECRET}`, `INTELX_API_KEY=${process.env.INTELX_API_KEY}`, - `PE_SHODAN_API_KEYS=${process.env.PE_SHODAN_API_KEYS}`, + `PE_SHODAN_API_KEYS=${shodan_api_key}`, `WORKER_SIGNATURE_PUBLIC_KEY=${process.env.WORKER_SIGNATURE_PUBLIC_KEY}`, `WORKER_SIGNATURE_PRIVATE_KEY=${process.env.WORKER_SIGNATURE_PRIVATE_KEY}`, `ELASTICSEARCH_ENDPOINT=${process.env.ELASTICSEARCH_ENDPOINT}`, @@ -141,6 +162,8 @@ async function startLocalContainers( export const handler: Handler = async (event) => { let desiredCount: integer; let scanType: string; + + // Check if desired count was passed if (event.desiredCount) { desiredCount = event.desiredCount; } else { @@ -148,6 +171,7 @@ export const handler: Handler = async (event) => { desiredCount = 1; } + // Check if scan type was passed if (event.scanType) { scanType = event.scanType; } else { @@ -156,39 +180,38 @@ export const handler: Handler = async (event) => { } try { + // If scanType is shodan, check if API keys were passed and split up if (scanType === 'shodan') { - await startDesiredTasks( - scanType, - desiredCount, - process.env.SHODAN_QUEUE_URL! - ); - } else if (scanType === 'dnstwist') { - await startDesiredTasks( - scanType, - desiredCount, - process.env.DNSTWIST_QUEUE_URL! - ); - } else if (scanType === 'hibp') { - await startDesiredTasks( - scanType, - desiredCount, - process.env.HIBP_QUEUE_URL! - ); - } else if (scanType === 'intelx') { - await startDesiredTasks( - scanType, - desiredCount, - process.env.INTELX_QUEUE_URL! - ); - } else if (scanType === 'cybersixgill') { - await startDesiredTasks( - scanType, - desiredCount, - process.env.CYBERSIXGILL_QUEUE_URL! - ); + let shodanApiKeyList: string[]; + if (event.apiKeyList) { + shodanApiKeyList = event.apiKeyList + .split(',') + .map((value) => value.trim()); + } else { + console.error( + 'apiKeyList must be provided for shodan and be a comma-separated string' + ); + return 'Failed no apiKeyList'; + } + // Check if there are enough keys for the desired number of tasks + if (shodanApiKeyList.length >= desiredCount) { + console.log( + 'The number of API keys is greater than or equal to desiredCount.' + ); + } else { + console.error( + 'The number of API keys is less than desired Fargate tasks.' + ); + return 'Failed no apiKeyList'; + } + await startDesiredTasks(scanType, desiredCount, shodanApiKeyList); + + // Run the rest of the scans normally + } else if (SCAN_LIST.includes(scanType)) { + await startDesiredTasks(scanType, desiredCount); } else { console.log( - 'Shodan, DNSTwist, HIBP, and Cybersixgill are the only script types available right now.' + 'Shodan, DNSTwist, HIBP, IntelX, and Cybersixgill are the only script types available right now. Must be all lowercase.' ); } } catch (error) { diff --git a/backend/src/tools/run-scanExecution.ts b/backend/src/tools/run-scanExecution.ts index 8e0fccab..86664858 100644 --- a/backend/src/tools/run-scanExecution.ts +++ b/backend/src/tools/run-scanExecution.ts @@ -1,10 +1,48 @@ // Script to execute the scanExecution function import { handler as scanExecution } from '../tasks/scanExecution'; +const amqp = require('amqplib'); -async function localScanExecution() { +async function localScanExecution(scan_type, desired_count, apiKeyList = '') { console.log('Starting...'); - const payload = { scanType: 'dnstwist', desiredCount: 3 }; + const payload = { + scanType: scan_type, + desiredCount: desired_count, + apiKeyList: apiKeyList + }; scanExecution(payload, {} as any, () => null); } -localScanExecution(); +async function sendMessageToQueue(message, queue) { + const connection = await amqp.connect('amqp://rabbitmq'); + const channel = await connection.createChannel(); + + await channel.assertQueue(queue, { durable: true }); + + // Simulate sending a message to the queue + channel.sendToQueue(queue, Buffer.from(JSON.stringify(message)), { + persistent: true + }); + + console.log('Message sent:', message); + + setTimeout(() => { + connection.close(); + }, 500); +} + +// Simulate sending a message +const SCAN_TYPE = 'dnstwist'; +const DESIRED_COUNT = 1; +const ORG_LIST = ['DHS', 'DOI']; +const QUEUE = `staging-${SCAN_TYPE}-queue`; +const API_KEY_LIST = ''; + +for (const org of ORG_LIST) { + const message = { + scriptType: SCAN_TYPE, + org: org + }; + sendMessageToQueue(message, QUEUE); +} + +localScanExecution(SCAN_TYPE, DESIRED_COUNT, API_KEY_LIST); diff --git a/backend/worker/pe-worker-entry.sh b/backend/worker/pe-worker-entry.sh index 1579bdcd..de7bb5ca 100755 --- a/backend/worker/pe-worker-entry.sh +++ b/backend/worker/pe-worker-entry.sh @@ -33,13 +33,13 @@ while true; do MESSAGE=$(echo "$RESPONSE" | jq -r '.[0].payload') MESSAGE=${MESSAGE//\\\"/\"} echo "MESSAGE: $MESSAGE" - else echo "Running live SQS logic..." MESSAGE=$(aws sqs receive-message --queue-url "$SERVICE_QUEUE_URL" --output json --max-number-of-messages 1) echo "MESSAGE: $MESSAGE" fi + # Check if there are no more messages. If no more, then exit Fargate container if [ -z "$MESSAGE" ] || [ "$MESSAGE" == "null" ]; then echo "No more messages in the queue. Exiting." @@ -72,7 +72,7 @@ while true; do # Run the pe-source command eval "$COMMAND" \ - && cat /app/pe_reports_logging.log + && cat /app/pe_reports_logging.log > /app/pe_reports_logging.log # Delete the processed message from the queue if [ "$IS_LOCAL" = true ]; then diff --git a/dev.env.example b/dev.env.example index ea62188c..56dc76ad 100644 --- a/dev.env.example +++ b/dev.env.example @@ -98,12 +98,8 @@ PE_DB_USERNAME=pe PE_DB_PASSWORD=password -SHODAN_QUEUE_URL =shodanQueue +QUEUE_URL=staging- PE_SHODAN_API_KEYS= -DNSTWIST_QUEUE_URL=dnstwistQueue -HIBP_QUEUE_URL=hibpQueue -INTELX_QUEUE_URL=intelxQueue -CYBERSIXGILL_QUEUE_URL=cybersixgillQueue PE_FARGATE_CLUSTER_NAME=pe-staging-worker PE_FARGATE_TASK_DEFINITION_NAME=pe-staging-worker diff --git a/docs/src/documentation-pages/dev/quickstart.md b/docs/src/documentation-pages/dev/quickstart.md index 03335c7c..ebd06f22 100644 --- a/docs/src/documentation-pages/dev/quickstart.md +++ b/docs/src/documentation-pages/dev/quickstart.md @@ -62,22 +62,14 @@ This quickstart describes the initial setup required to run an instance of Cross npm run pesyncdb ``` -4. Send messages to RabbitMQ queue. First, edit backend/nodeMessage.js to run the desired scan and - organization. Then run below:" - - ```bash - cd backend - npm run send-message - ``` - -5. Invoke scans by running below. You can edit the backend/src/tools/run-scanExecution.ts to run the desired scan type." +4. Invoke scans by running below. You can edit the backend/src/tools/run-scanExecution.ts to run the desired scan type." ```bash cd backend npm run scan-exec ``` -6. Observe logs in docker containers. +5. Observe logs in docker containers. ### Running tests diff --git a/infrastructure/prod.tfvars b/infrastructure/prod.tfvars index f5bd49d6..cbf20ca3 100644 --- a/infrastructure/prod.tfvars +++ b/infrastructure/prod.tfvars @@ -54,11 +54,6 @@ ssm_sixgill_client_id = "/crossfeed/prod/SIXGILL_CLIENT_ID" ssm_sixgill_client_secret = "/crossfeed/prod/SIXGILL_CLIENT_SECRET" ssm_lg_api_key = "/crossfeed/prod/LG_API_KEY" ssm_lg_workspace_name = "/crossfeed/prod/LG_WORKSPACE_NAME" -ssm_shodan_queue_url = "/crossfeed/prod/SHODAN_QUEUE_URL" -ssm_dnstwist_queue_url = "/crossfeed/prod/DNSTWIST_QUEUE_URL" -ssm_hibp_queue_url = "/crossfeed/prod/HIBP_QUEUE_URL" -ssm_intelx_queue_url = "/crossfeed/prod/INTELX_QUEUE_URL" -ssm_cybersixgill_queue_url = "/crossfeed/prod/CYBERSIXGILL_QUEUE_URL" ssm_pe_api_key = "/crossfeed/prod/PE_API_KEY" ssm_cf_api_key = "/crossfeed/prod/CF_API_KEY" db_group_name = "crossfeed-prod-db-group" diff --git a/infrastructure/stage.tfvars b/infrastructure/stage.tfvars index dd0faf8c..f21d03d5 100644 --- a/infrastructure/stage.tfvars +++ b/infrastructure/stage.tfvars @@ -55,11 +55,6 @@ ssm_sixgill_client_secret = "/crossfeed/staging/SIXGILL_CLIENT_SECRET ssm_intelx_api_key = "/crossfeed/staging/INTELX_API_KEY" ssm_lg_api_key = "/crossfeed/staging/LG_API_KEY" ssm_lg_workspace_name = "/crossfeed/staging/LG_WORKSPACE_NAME" -ssm_shodan_queue_url = "/crossfeed/staging/SHODAN_QUEUE_URL" -ssm_dnstwist_queue_url = "/crossfeed/staging/DNSTWIST_QUEUE_URL" -ssm_hibp_queue_url = "/crossfeed/staging/HIBP_QUEUE_URL" -ssm_intelx_queue_url = "/crossfeed/staging/INTELX_QUEUE_URL" -ssm_cybersixgill_queue_url = "/crossfeed/staging/CYBERSIXGILL_QUEUE_URL" ssm_pe_api_key = "/crossfeed/staging/PE_API_KEY" ssm_cf_api_key = "/crossfeed/staging/CF_API_KEY" db_group_name = "crossfeed-staging-db-group" diff --git a/infrastructure/vars.tf b/infrastructure/vars.tf index 811e5f99..eda0e98f 100644 --- a/infrastructure/vars.tf +++ b/infrastructure/vars.tf @@ -352,36 +352,6 @@ variable "ssm_lg_workspace_name" { default = "/crossfeed/staging/LG_WORKSPACE_NAME" } -variable "ssm_shodan_queue_url" { - description = "ssm_shodan_queue_url" - type = string - default = "/crossfeed/staging/SHODAN_QUEUE_URL" -} - -variable "ssm_dnstwist_queue_url" { - description = "ssm_dnstwist_queue_url" - type = string - default = "/crossfeed/staging/DNSTWIST_QUEUE_URL" -} - -variable "ssm_hibp_queue_url" { - description = "ssm_hibp_queue_url" - type = string - default = "/crossfeed/staging/HIBP_QUEUE_URL" -} - -variable "ssm_intelx_queue_url" { - description = "ssm_intelx_queue_url" - type = string - default = "/crossfeed/staging/INTELX_QUEUE_URL" -} - -variable "ssm_cybersixgill_queue_url" { - description = "ssm_cybersixgill_queue_url" - type = string - default = "/crossfeed/staging/CYBERSIXGILL_QUEUE_URL" -} - variable "db_group_name" { description = "db_group_name" type = string diff --git a/infrastructure/worker.tf b/infrastructure/worker.tf index 22358854..bdb4af91 100644 --- a/infrastructure/worker.tf +++ b/infrastructure/worker.tf @@ -89,11 +89,6 @@ resource "aws_iam_role_policy" "worker_task_execution_role_policy" { "${data.aws_ssm_parameter.sixgill_client_secret.arn}", "${data.aws_ssm_parameter.lg_api_key.arn}", "${data.aws_ssm_parameter.lg_workspace_name.arn}", - "${data.aws_ssm_parameter.shodan_queue_url.arn}", - "${data.aws_ssm_parameter.dnstwist_queue_url.arn}", - "${data.aws_ssm_parameter.hibp_queue_url.arn}", - "${data.aws_ssm_parameter.intelx_queue_url.arn}", - "${data.aws_ssm_parameter.cybersixgill_queue_url.arn}", "${aws_ssm_parameter.es_endpoint.arn}", "${data.aws_ssm_parameter.pe_api_key.arn}", "${data.aws_ssm_parameter.cf_api_key.arn}" @@ -375,16 +370,6 @@ data "aws_ssm_parameter" "worker_signature_public_key" { name = var.ssm_worker_s data "aws_ssm_parameter" "worker_signature_private_key" { name = var.ssm_worker_signature_private_key } -data "aws_ssm_parameter" "shodan_queue_url" { name = var.ssm_shodan_queue_url } - -data "aws_ssm_parameter" "dnstwist_queue_url" { name = var.ssm_dnstwist_queue_url } - -data "aws_ssm_parameter" "hibp_queue_url" { name = var.ssm_hibp_queue_url } - -data "aws_ssm_parameter" "intelx_queue_url" { name = var.ssm_intelx_queue_url } - -data "aws_ssm_parameter" "cybersixgill_queue_url" { name = var.ssm_cybersixgill_queue_url } - data "aws_ssm_parameter" "pe_api_key" { name = var.ssm_pe_api_key } data "aws_ssm_parameter" "cf_api_key" { name = var.ssm_cf_api_key }