Skip to content

fix: optimize endpoint page #68

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Nov 2, 2022
2 changes: 2 additions & 0 deletions backend/src/data-source.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import { addUniqueConstraintApiEndpoint1666678487137 } from "migrations/16666784
import { dropAnalyzedColumnFromApiTrace1666752646836 } from "migrations/1666752646836-drop-analyzed-column-from-api-trace"
import { addIndexForDataField1666941075032 } from "migrations/1666941075032-add-index-for-data-field"
import { addIsgraphqlColumnApiEndpoint1667095325334 } from "migrations/1667095325334-add-isgraphql-column-api-endpoint"
import { addApiEndpointUuidIndexForAlert1667259254414 } from "migrations/1667259254414-add-apiEndpointUuid-index-for-alert"

export const AppDataSource: DataSource = new DataSource({
type: "postgres",
Expand Down Expand Up @@ -49,6 +50,7 @@ export const AppDataSource: DataSource = new DataSource({
dropAnalyzedColumnFromApiTrace1666752646836,
addIndexForDataField1666941075032,
addIsgraphqlColumnApiEndpoint1667095325334,
addApiEndpointUuidIndexForAlert1667259254414,
],
migrationsRun: runMigration,
logging: false,
Expand Down
34 changes: 20 additions & 14 deletions backend/src/jobs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,12 @@ import {
} from "services/jobs"
import runAllTests from "services/testing/runAllTests"
import { logAggregatedStats } from "services/logging"
import { DateTime } from "luxon"

const log = (logMessage: string, newLine?: boolean) =>
console.log(
`${newLine ? "\n" : ""}${DateTime.utc().toString()} ${logMessage}`,
)

const main = async () => {
const datasource = await AppDataSource.initialize()
Expand All @@ -25,63 +31,63 @@ const main = async () => {
const logAggregateStatsSem = semaphore(1)
const checkForUnauthenticatedSem = semaphore(1)

schedule.scheduleJob("30 * * * *", () => {
schedule.scheduleJob("*/10 * * * *", () => {
generateSpecSem.take(async () => {
console.log("\nGenerating OpenAPI Spec Files...")
log("Generating OpenAPI Spec Files...", true)
await generateOpenApiSpec()
console.log("Finished generating OpenAPI Spec Files.")
log("Finished generating OpenAPI Spec Files.")
generateSpecSem.leave()
})
})

schedule.scheduleJob("30 * * * * ", () => {
checkForUnauthenticatedSem.take(async () => {
console.log("\nChecking for Unauthenticated Endpoints")
log("Checking for Unauthenticated Endpoints", true)
await checkForUnauthenticatedEndpoints()
console.log("Finished checking for Unauthenticated Endpoints")
log("Finished checking for Unauthenticated Endpoints")
checkForUnauthenticatedSem.leave()
})
})

// Offset by 15 minutes past every 4th hour, so that there isn't any excess database slowdown
schedule.scheduleJob("15 * * * *", () => {
unsecuredAlertsSem.take(async () => {
console.log("\nGenerating Alerts for Unsecured Endpoints")
log("Generating Alerts for Unsecured Endpoints", true)
await monitorEndpointForHSTS()
console.log("Finished generating alerts for Unsecured Endpoints.")
log("Finished generating alerts for Unsecured Endpoints.")
unsecuredAlertsSem.leave()
})
})

schedule.scheduleJob("30 * * * *", () => {
testsSem.take(async () => {
console.log("\nRunning Tests...")
log("Running Tests...", true)
await runAllTests()
console.log("Finished running tests.")
log("Finished running tests.")
testsSem.leave()
})
})

schedule.scheduleJob("*/10 * * * *", () => {
clearApiTracesSem.take(async () => {
console.log("\nClearing Api Trace data...")
log("Clearing Api Trace data...", true)
await clearApiTraces()
console.log("Finished clearing Api Trace data.")
log("Finished clearing Api Trace data.")
clearApiTracesSem.leave()
})
})

if ((process.env.DISABLE_LOGGING_STATS || "false").toLowerCase() == "false") {
schedule.scheduleJob("0 */6 * * *", () => {
logAggregateStatsSem.take(async () => {
console.log("\nLogging Aggregated Stats...")
log("Logging Aggregated Stats...", true)
await logAggregatedStats()
console.log("Finished Logging Aggregated Stats.")
log("Finished Logging Aggregated Stats.")
logAggregateStatsSem.leave()
})
})
} else {
console.log("\nLogging Aggregated Stats Disabled...")
log("Logging Aggregated Stats Disabled...", true)
}

process.on("SIGINT", () => {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { MigrationInterface, QueryRunner } from "typeorm"

export class addApiEndpointUuidIndexForAlert1667259254414
implements MigrationInterface
{
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX IF NOT EXISTS "apiEndpointUuid_alert" ON "alert" ("apiEndpointUuid")`,
)
}

public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX IF EXISTS "apiEndpointUuid_alert"`)
}
}
2 changes: 2 additions & 0 deletions backend/src/models/alert.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import {
Column,
CreateDateColumn,
Entity,
Index,
ManyToOne,
PrimaryGeneratedColumn,
UpdateDateColumn,
Expand All @@ -27,6 +28,7 @@ export class Alert extends BaseEntity {
riskScore: RiskScore

@Column()
@Index("apiEndpointUuid_alert")
apiEndpointUuid: string

@ManyToOne(() => ApiEndpoint, apiEndpoint => apiEndpoint.alerts)
Expand Down
68 changes: 48 additions & 20 deletions backend/src/services/get-endpoints/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ import {
ApiEndpointTest,
ApiTrace,
AggregateTraceDataHourly,
Alert,
DataField,
OpenApiSpec,
} from "models"
import {
GetEndpointParams,
Expand All @@ -17,6 +20,28 @@ import Error404NotFound from "errors/error-404-not-found"
import { getRiskScore } from "utils"
import { getEndpointsCountQuery, getEndpointsQuery } from "./queries"

const GET_DATA_FIELDS_QUERY = `
SELECT
uuid,
"dataClasses"::text[],
"falsePositives"::text[],
"scannerIdentified"::text[],
"dataType",
"dataTag",
"dataSection",
"createdAt",
"updatedAt",
"dataPath",
"apiEndpointUuid"
FROM
data_field
WHERE
"apiEndpointUuid" = $1
ORDER BY
"dataTag" ASC,
"dataPath" ASC
`

export class GetEndpointsService {
static async updateIsAuthenticated(
apiEndpointUuid: string,
Expand Down Expand Up @@ -118,29 +143,29 @@ export class GetEndpointsService {
const queryRunner = AppDataSource.createQueryRunner()
try {
await queryRunner.connect()
const endpoint = await queryRunner.manager.findOne(ApiEndpoint, {
select: {
alerts: {
uuid: true,
status: true,
},
},
where: { uuid: endpointId },
relations: {
dataFields: true,
openapiSpec: true,
alerts: true,
},
order: {
dataFields: {
dataTag: "ASC",
dataPath: "ASC",
},
},
})
const endpoint = await queryRunner.manager
.createQueryBuilder()
.from(ApiEndpoint, "endpoint")
.where("uuid = :id", { id: endpointId })
.getRawOne()
if (!endpoint) {
throw new Error404NotFound("Endpoint does not exist.")
}
const alerts = await queryRunner.manager
.createQueryBuilder()
.select(["uuid", "status"])
.from(Alert, "alert")
.where(`"apiEndpointUuid" = :id`, { id: endpointId })
.getRawMany()
const dataFields: DataField[] = await queryRunner.query(
GET_DATA_FIELDS_QUERY,
[endpointId],
)
const openapiSpec = await queryRunner.manager
.createQueryBuilder()
.from(OpenApiSpec, "spec")
.where("name = :name", { name: endpoint.openapiSpecName })
.getRawOne()
const traces = await queryRunner.manager.find(ApiTrace, {
where: { apiEndpointUuid: endpoint.uuid },
order: { createdAt: "DESC" },
Expand All @@ -151,6 +176,9 @@ export class GetEndpointsService {
})
return {
...endpoint,
alerts,
dataFields,
openapiSpec,
traces: [...traces],
tests: tests as Array<Test>,
}
Expand Down
Loading