Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

No unchecked index access #15153

Closed
wants to merge 12 commits into from
2 changes: 1 addition & 1 deletion packages/backend-core/src/cache/tests/user.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ describe("user cache", () => {
jest.spyOn(UserDB, "bulkGet")

await config.doInTenant(() =>
getUsers([userIdsToRequest[0], userIdsToRequest[3]])
getUsers([userIdsToRequest[0]!, userIdsToRequest[3]!])
)
;(UserDB.bulkGet as jest.Mock).mockClear()

Expand Down
3 changes: 3 additions & 0 deletions packages/backend-core/src/db/couch/DatabaseImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,9 @@ export class DatabaseImpl implements Database {
): Promise<AllDocsResponse<T>> {
return this.performCall(db => {
const [database, view] = viewName.split("/")
if (!database || !view) {
throw new Error(`Invalid view name: ${viewName}`)
}
return () => db.view(database, view, params)
})
}
Expand Down
6 changes: 3 additions & 3 deletions packages/backend-core/src/db/lucene.ts
Original file line number Diff line number Diff line change
Expand Up @@ -256,12 +256,12 @@ export class QueryBuilder<T> {

compressFilters(filters: Record<string, string[]>) {
const compressed: typeof filters = {}
for (let key of Object.keys(filters)) {
for (const [key, filter] of Object.entries(filters)) {
const finalKey = removeKeyNumbering(key)
if (compressed[finalKey]) {
compressed[finalKey] = compressed[finalKey].concat(filters[key])
compressed[finalKey] = compressed[finalKey].concat(filter)
} else {
compressed[finalKey] = filters[key]
compressed[finalKey] = filter
}
}
// add prefixes back
Expand Down
40 changes: 22 additions & 18 deletions packages/backend-core/src/db/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,32 +173,36 @@ export async function dbExists(dbName: any) {
)
}

export interface PaginationOpts<T> {
paginate?: boolean
property?: keyof T
getKey?: (doc: T) => string | undefined
}

export interface PaginationResult<T> {
data: T[]
hasNextPage: boolean
nextPage?: string
}

export function pagination<T>(
data: T[],
pageSize: number,
{
paginate,
property,
getKey,
}: {
paginate: boolean
property: string
getKey?: (doc: T) => string | undefined
} = {
paginate: true,
property: "_id",
}
) {
opts?: PaginationOpts<T>
): PaginationResult<T> {
const {
paginate = true,
property = "_id" as keyof T,
getKey = (d: T) => d[property]?.toString(),
} = opts || {}

if (!paginate) {
return { data, hasNextPage: false }
}
const hasNextPage = data.length > pageSize
let nextPage = undefined
if (!getKey) {
getKey = (doc: any) => (property ? doc?.[property] : doc?._id)
}
let nextPage: string | undefined = undefined
if (hasNextPage) {
nextPage = getKey(data[pageSize])
nextPage = getKey(data[pageSize]!)
}
return {
data: data.slice(0, pageSize),
Expand Down
8 changes: 6 additions & 2 deletions packages/backend-core/src/docIds/ids.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,16 @@ export function generateUserMetadataID(globalId: string) {
/**
* Breaks up the ID to get the global ID.
*/
export function getGlobalIDFromUserMetadataID(id: string) {
export function getGlobalIDFromUserMetadataID(id: string): string {
const prefix = `${DocumentType.ROW}${SEPARATOR}${InternalTable.USER_METADATA}${SEPARATOR}`
if (!id || !id.includes(prefix)) {
return id
}
return id.split(prefix)[1]
const split = id.split(prefix)
if (!split[1]) {
return id
}
return split[1]
}

/**
Expand Down
3 changes: 1 addition & 2 deletions packages/backend-core/src/events/publishers/table.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,8 @@ async function updated(oldTable: Table, newTable: Table) {
let defaultValues, aiColumn

// check that new fields have been added
for (const key in newTable.schema) {
for (const [key, newColumn] of Object.entries(newTable.schema)) {
if (!oldTable.schema[key]) {
const newColumn = newTable.schema[key]
if ("default" in newColumn && newColumn.default != null) {
defaultValues = true
}
Expand Down
8 changes: 5 additions & 3 deletions packages/backend-core/src/features/features.ts
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,9 @@ export function parseEnvFlags(flags: string): EnvFlagEntry[] {
const split = flags.split(",").map(x => x.split(":"))
const result: EnvFlagEntry[] = []
for (const [tenantId, ...features] of split) {
if (!tenantId) {
continue
}
for (let feature of features) {
let value = true
if (feature.startsWith("!")) {
Expand All @@ -131,9 +134,8 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
}

defaults(): FlagValues<T> {
return Object.keys(this.flagSchema).reduce((acc, key) => {
const typedKey = key as keyof T
acc[typedKey] = this.flagSchema[key].defaultValue
return Object.entries(this.flagSchema).reduce((acc, [key, flag]) => {
acc[key as keyof T] = flag.defaultValue
return acc
}, {} as FlagValues<T>)
}
Expand Down
2 changes: 1 addition & 1 deletion packages/backend-core/src/logging/system.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export function getSingleFileMaxSizeInfo(totalMaxSize: string) {
return undefined
}

const size = +match[1]
const size = +match[1]!
const unit = match[2]
if (size === 1) {
switch (unit) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ describe("plugins", () => {
const plugin = structures.plugins.plugin()

function getEnrichedPluginUrls() {
const enriched = plugins.enrichPluginURLs([plugin])[0]
const enriched = plugins.enrichPluginURLs([plugin])[0]!
return {
jsUrl: enriched.jsUrl!,
iconUrl: enriched.iconUrl!,
Expand Down
20 changes: 11 additions & 9 deletions packages/backend-core/src/objectStore/objectStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -377,11 +377,10 @@ export async function retrieveDirectory(bucketName: string, path: string) {
let streams = await Promise.all(
objects.map(obj => getReadStream(bucketName, obj.Key!))
)
let count = 0
const writePromises: Promise<Error>[] = []
for (let obj of objects) {
for (const [i, obj] of objects.entries()) {
const filename = obj.Key!
const stream = streams[count++]
const stream = streams[i]!
const possiblePath = filename.split("/")
const dirs = possiblePath.slice(0, possiblePath.length - 1)
const possibleDir = join(writePath, ...dirs)
Expand Down Expand Up @@ -562,6 +561,10 @@ export async function getObjectMetadata(
}
}

const bucketPathRegex = new RegExp(
`^${SIGNED_FILE_PREFIX}/(?<bucket>[^/]+)/(?<path>.+)$`
)

/*
Given a signed url like '/files/signed/tmp-files-attachments/app_123456/myfile.txt' extract
the bucket and the path from it
Expand All @@ -570,13 +573,12 @@ export function extractBucketAndPath(
url: string
): { bucket: string; path: string } | null {
const baseUrl = url.split("?")[0]
if (!baseUrl) {
return null
}

const regex = new RegExp(
`^${SIGNED_FILE_PREFIX}/(?<bucket>[^/]+)/(?<path>.+)$`
)
const match = baseUrl.match(regex)

if (match && match.groups) {
const match = baseUrl.match(bucketPathRegex)
if (match && match.groups && match.groups.bucket && match.groups.path) {
const { bucket, path } = match.groups
return { bucket, path }
}
Expand Down
57 changes: 29 additions & 28 deletions packages/backend-core/src/redis/redis.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,11 @@ if (env.MOCK_REDIS) {
}

function pickClient(selectDb: number) {
return CLIENTS[selectDb]
const client = CLIENTS[selectDb]
if (!client) {
throw new Error(`No Redis client for db: ${selectDb}`)
}
return client
}

function connectionError(timeout: NodeJS.Timeout, err: Error | string) {
Expand All @@ -63,9 +67,8 @@ function init(selectDb = DEFAULT_SELECT_DB) {
const RedisCore = env.MOCK_REDIS && MockRedis ? MockRedis : Redis
let timeout: NodeJS.Timeout
CLOSED = false
let client = pickClient(selectDb)
// already connected, ignore
if (client && CONNECTED) {
if (CLIENTS[selectDb] && CONNECTED) {
return
}
// testing uses a single in memory client
Expand All @@ -79,13 +82,10 @@ function init(selectDb = DEFAULT_SELECT_DB) {
}
}, STARTUP_TIMEOUT_MS)

// disconnect any lingering client
if (client) {
client.disconnect()
}
const { host, port } = getRedisConnectionDetails()
const opts = getRedisOptions()

let client: Redis
if (CLUSTERED) {
client = new RedisCore.Cluster([{ host, port }], opts)
} else {
Expand Down Expand Up @@ -205,8 +205,11 @@ class RedisWrapper {
key = `${db}${SEPARATOR}${key}`
let stream
if (CLUSTERED) {
let node = (this.getClient() as never as Cluster).nodes("master")
stream = node[0].scanStream({ match: key + "*", count: 100 })
let node = (this.getClient() as never as Cluster).nodes("master")[0]
if (!node) {
throw new Error("No master node found in Redis cluster")
}
stream = node.scanStream({ match: key + "*", count: 100 })
} else {
stream = (this.getClient() as Redis).scanStream({
match: key + "*",
Expand Down Expand Up @@ -244,31 +247,29 @@ class RedisWrapper {
}

async bulkGet<T>(keys: string[]) {
const db = this._db
if (keys.length === 0) {
return {}
}
const prefixedKeys = keys.map(key => addDbPrefix(db, key))

const prefixedKeys = keys.map(key => addDbPrefix(this._db, key))
let response = await this.getClient().mget(prefixedKeys)
if (Array.isArray(response)) {
let final: Record<string, T> = {}
let count = 0
for (let result of response) {
if (result) {
let parsed
try {
parsed = JSON.parse(result)
} catch (err) {
parsed = result
}
final[keys[count]] = parsed
}
count++
}
return final
} else {
if (!Array.isArray(response)) {
throw new Error(`Invalid response: ${response}`)
}

let final: Record<string, T> = {}
for (const [i, result] of response.entries()) {
if (!result) {
continue
}
const key = keys[i]!
try {
final[key] = JSON.parse(result)
} catch (err) {
final[key] = result as T
}
}
return final
}

async store(key: string, value: any, expirySeconds: number | null = null) {
Expand Down
50 changes: 15 additions & 35 deletions packages/backend-core/src/redis/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,52 +61,32 @@ export enum SelectableDatabase {
UNUSED_14 = 15,
}

export function getRedisConnectionDetails() {
let password = env.REDIS_PASSWORD
let url: string[] | string = env.REDIS_URL.split("//")
// get rid of the protocol
url = url.length > 1 ? url[1] : url[0]
// check for a password etc
url = url.split("@")
if (url.length > 1) {
// get the password
password = url[0].split(":")[1]
url = url[1]
} else {
url = url[0]
}
const [host, port] = url.split(":")

const portNumber = parseInt(port)
return {
host,
password,
// assume default port for redis if invalid found
port: isNaN(portNumber) ? 6379 : portNumber,
}
export function getRedisConnectionDetails(): Redis.RedisOptions {
// The URL class will return most things as the empty string if not present,
// so we add `|| undefined` to not confuse the Redis client.
const url = new URL(env.REDIS_URL)
const host = url.hostname || undefined
const password = url.password || env.REDIS_PASSWORD
const username = url.username || undefined
const port = parseInt(url.port) || 6379 // NaN is falsey, parseInt('') is NaN
return { host, password, port, username }
}

export function getRedisOptions() {
const { host, password, port } = getRedisConnectionDetails()
let redisOpts: Redis.RedisOptions = {
const opts: Redis.RedisOptions = {
...getRedisConnectionDetails(),
connectTimeout: CONNECT_TIMEOUT_MS,
port: port,
host,
password,
}
let opts: Redis.ClusterOptions | Redis.RedisOptions = redisOpts
if (env.REDIS_CLUSTERED) {
opts = {
return {
connectTimeout: CONNECT_TIMEOUT_MS,
redisOptions: {
...redisOpts,
tls: {},
},
redisOptions: { ...opts, tls: {} },
slotsRefreshTimeout: SLOT_REFRESH_MS,
dnsLookup: (address: string, callback: any) => callback(null, address),
} as Redis.ClusterOptions
} else {
return opts
}
return opts
}

export function addDbPrefix(db: string, key: string) {
Expand Down
3 changes: 3 additions & 0 deletions packages/backend-core/src/security/encryption.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,9 @@ export function decrypt(
secretOption: SecretOption = SecretOption.API
) {
const [salt, encrypted] = input.split(SEPARATOR)
if (!salt || !encrypted) {
throw new Error("Invalid input to decrypt")
}
const saltBuffer = Buffer.from(salt, "hex")
const stretched = stretchString(getSecret(secretOption), saltBuffer)
const decipher = crypto.createDecipheriv(ALGO, stretched, saltBuffer)
Expand Down
3 changes: 3 additions & 0 deletions packages/backend-core/src/security/roles.ts
Original file line number Diff line number Diff line change
Expand Up @@ -514,6 +514,9 @@ export async function getAllRoles(appId?: string): Promise<RoleDoc[]> {
// need to combine builtin with any DB record of them (for sake of permissions)
for (let builtinRoleId of externalBuiltinRoles) {
const builtinRole = builtinRoles[builtinRoleId]
if (!builtinRole) {
throw new Error(`Role ${builtinRoleId} not found`)
}
const dbBuiltin = roles.filter(dbRole =>
roleIDsAreEqual(dbRole._id!, builtinRoleId)
)[0]
Expand Down
Loading
Loading