Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

3.0: Always use dbname #361

Merged
merged 12 commits into from
Mar 19, 2024
8 changes: 4 additions & 4 deletions lib/db/clickhouse.js
Original file line number Diff line number Diff line change
Expand Up @@ -1319,7 +1319,7 @@ const samplesReadTable = {
}
logger.info('checking last timestamp')
const v1EndTime = await axios.post(`${getClickhouseUrl()}/?database=${UTILS.DATABASE_NAME()}`,
`SELECT max(timestamp_ns) as ts FROM ${tableName} format JSON`)
`SELECT max(timestamp_ns) as ts FROM ${UTILS.DATABASE_NAME()}.${tableName} format JSON`)
if (!v1EndTime.data.rows) {
samplesReadTable.v1 = false
return
Expand All @@ -1339,7 +1339,7 @@ const samplesReadTable = {
settingsVersions: async function () {
const versions = await rawRequest(
`SELECT argMax(name, inserted_at) as _name, argMax(value, inserted_at) as _value
FROM settings${dist} WHERE type == 'update' GROUP BY fingerprint HAVING _name != '' FORMAT JSON`,
FROM ${UTILS.DATABASE_NAME()}.settings${dist} WHERE type == 'update' GROUP BY fingerprint HAVING _name != '' FORMAT JSON`,
null,
UTILS.DATABASE_NAME()
)
Expand Down Expand Up @@ -1385,7 +1385,7 @@ const getSettings = async (names, database) => {
'short-hash'))
const settings = await rawRequest(`SELECT argMax(name, inserted_at) as _name,
argMax(value, inserted_at) as _value
FROM settings${dist} WHERE fingerprint IN (${fps.join(',')}) GROUP BY fingerprint HAVING _name != '' FORMAT JSON`,
FROM ${database}.settings${dist} WHERE fingerprint IN (${fps.join(',')}) GROUP BY fingerprint HAVING _name != '' FORMAT JSON`,
null, database)
return settings.data.data.reduce((sum, cur) => {
sum[cur._name] = cur._value
Expand All @@ -1403,7 +1403,7 @@ const getSettings = async (names, database) => {
*/
const addSetting = async (type, name, value, database) => {
const fp = UTILS.fingerPrint(JSON.stringify({ type: type, name: name }), false, 'short-hash')
return rawRequest('INSERT INTO settings (fingerprint, type, name, value, inserted_at) FORMAT JSONEachRow',
return rawRequest(`INSERT INTO ${UTILS.DATABASE_NAME()}.settings (fingerprint, type, name, value, inserted_at) FORMAT JSONEachRow`,
JSON.stringify({
fingerprint: fp,
type: type,
Expand Down
24 changes: 13 additions & 11 deletions lib/db/clickhouse_alerting.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
const axios = require('axios')
const { DATABASE_NAME } = require('../utils')
const UTILS = require('../utils')
const { getClickhouseUrl } = require('./clickhouse')
const { getClickhouseUrl, rawRequest } = require('./clickhouse')
const Sql = require('@cloki/clickhouse-sql')
const { clusterName } = require('../../common')
const onCluster = clusterName ? `ON CLUSTER ${clusterName}` : ''
Expand All @@ -19,14 +19,13 @@ const dist = clusterName ? '_dist' : ''
module.exports.getAlertRule = async (ns, group, name) => {
const fp = getRuleFP(ns, group, name)
const mark = Math.random()
const res = await axios.post(getClickhouseUrl(),
const res = await rawRequest(
'SELECT fingerprint, argMax(name, inserted_at) as name, argMax(value, inserted_at) as value ' +
`FROM ${DATABASE_NAME()}.settings${dist} ` +
`WHERE fingerprint = ${fp} AND ${mark} == ${mark} ` +
'GROUP BY fingerprint ' +
'HAVING name != \'\' ' +
'FORMAT JSON'
)
'FORMAT JSON', null, DATABASE_NAME())
if (!res.data.data.length) {
return undefined
}
Expand All @@ -49,10 +48,11 @@ module.exports.putAlertRule = async (namespace, group, rule) => {
const groupName = JSON.stringify({ type: 'alert_group', ns: namespace, group: group.name })
const groupFp = getGroupFp(namespace, group.name)
const groupVal = JSON.stringify({ name: group.name, interval: group.interval })
await axios.post(getClickhouseUrl(),
`INSERT INTO ${DATABASE_NAME()}.settings${dist} (fingerprint, type, name, value, inserted_at) FORMAT JSONEachRow \n` +
await rawRequest(
`INSERT INTO ${DATABASE_NAME()}.settings${dist} (fingerprint, type, name, value, inserted_at) FORMAT JSONEachRow`,
JSON.stringify({ fingerprint: ruleFp, type: 'alert_rule', name: ruleName, value: JSON.stringify(ruleVal), inserted_at: Date.now() * 1000000 }) + '\n' +
JSON.stringify({ fingerprint: groupFp, type: 'alert_group', name: groupName, value: groupVal, inserted_at: Date.now() * 1000000 })
JSON.stringify({ fingerprint: groupFp, type: 'alert_group', name: groupName, value: groupVal, inserted_at: Date.now() * 1000000 }),
DATABASE_NAME()
)
}

Expand All @@ -66,8 +66,9 @@ module.exports.putAlertRule = async (namespace, group, rule) => {
module.exports.getLastCheck = async (ns, group, rule, id) => {
const fp = getRuleFP(ns, group, rule)
id = id || 0
const resp = await axios.post(getClickhouseUrl(),
`SELECT max(mark) as maxmark FROM ${DATABASE_NAME()}._alert_view_${fp}_mark WHERE id = ${id} FORMAT JSON`
const resp = await rawRequest(
`SELECT max(mark) as maxmark FROM ${DATABASE_NAME()}._alert_view_${fp}_mark WHERE id = ${id} FORMAT JSON`,
null, DATABASE_NAME()
)
if (!resp.data.data[0]) {
return 0
Expand Down Expand Up @@ -100,11 +101,12 @@ module.exports.getAlertRules = async (limit, offset) => {
const _limit = limit ? `LIMIT ${limit}` : ''
const _offset = offset ? `OFFSET ${offset}` : ''
const mark = Math.random()
const res = await axios.post(getClickhouseUrl(),
const res = await rawRequest(
'SELECT fingerprint, argMax(name, inserted_at) as name, argMax(value, inserted_at) as value ' +
`FROM ${DATABASE_NAME()}.settings${dist} ` +
`WHERE type == 'alert_rule' AND ${mark} == ${mark} ` +
`GROUP BY fingerprint HAVING name != '' ORDER BY name ${_limit} ${_offset} FORMAT JSON`)
`GROUP BY fingerprint HAVING name != '' ORDER BY name ${_limit} ${_offset} FORMAT JSON`,
null, DATABASE_NAME())
return res.data.data.map(e => {
return { rule: JSON.parse(e.value), name: JSON.parse(e.name) }
})
Expand Down
48 changes: 24 additions & 24 deletions lib/db/maintain/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,15 +110,15 @@ module.exports.rotate = async (opts) => {
return upgradeRequest({ db: db.db, useDefaultDB: true }, req)
}
if (db.samples_days + '' !== settings.v3_samples_days) {
const alterTable = 'ALTER TABLE samples_v3 {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
const rotateTable = `ALTER TABLE samples_v3 {{{OnCluster}}} MODIFY TTL toDateTime(timestamp_ns / 1000000000) + INTERVAL ${db.samples_days} DAY`
const alterTable = 'ALTER TABLE {{DB}}.samples_v3 {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
const rotateTable = `ALTER TABLE {{DB}}.samples_v3 {{{OnCluster}}} MODIFY TTL toDateTime(timestamp_ns / 1000000000) + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
await client.addSetting('rotate', 'v3_samples_days', db.samples_days + '', db.db)
}
if (db.time_series_days + '' !== settings.v3_time_series_days) {
const alterTable = 'ALTER TABLE time_series {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
const rotateTable = `ALTER TABLE time_series {{{OnCluster}}} MODIFY TTL "date" + INTERVAL ${db.time_series_days} DAY`
const alterTable = 'ALTER TABLE {{DB}}.time_series {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
const rotateTable = `ALTER TABLE {{DB}}.time_series {{{OnCluster}}} MODIFY TTL "date" + INTERVAL ${db.time_series_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
const alterView = 'ALTER TABLE time_series_gin {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
Expand All @@ -129,54 +129,54 @@ module.exports.rotate = async (opts) => {
}
if (db.storage_policy && db.storage_policy !== settings.v3_storage_policy) {
logger.debug(`Altering storage policy: ${db.storage_policy}`)
const alterTs = `ALTER TABLE time_series {{{OnCluster}}} MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterTsVw = `ALTER TABLE time_series_gin {{{OnCluster}}} MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterSm = `ALTER TABLE samples_v3 {{{OnCluster}}} MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterTs = `ALTER TABLE {{DB}}.time_series {{{OnCluster}}} MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterTsVw = `ALTER TABLE {{DB}}.time_series_gin {{{OnCluster}}} MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterSm = `ALTER TABLE {{DB}}.samples_v3 {{{OnCluster}}} MODIFY SETTING storage_policy='${db.storage_policy}'`
await _update(alterTs, null, db.db)
await _update(alterTsVw, null, db.db)
await _update(alterSm, null, db.db)
await client.addSetting('rotate', 'v3_storage_policy', db.storage_policy, db.db)
}
if (db.samples_days + '' !== settings.v1_traces_days) {
let alterTable = 'ALTER TABLE tempo_traces {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
let rotateTable = `ALTER TABLE tempo_traces {{{OnCluster}}} MODIFY TTL toDateTime(timestamp_ns / 1000000000) + INTERVAL ${db.samples_days} DAY`
let alterTable = 'ALTER TABLE {{DB}}.tempo_traces {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
let rotateTable = `ALTER TABLE {{DB}}.tempo_traces {{{OnCluster}}} MODIFY TTL toDateTime(timestamp_ns / 1000000000) + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
alterTable = 'ALTER TABLE tempo_traces_attrs_gin {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE tempo_traces_attrs_gin {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
alterTable = 'ALTER TABLE {{DB}}.tempo_traces_attrs_gin {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE {{DB}}.tempo_traces_attrs_gin {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
alterTable = 'ALTER TABLE tempo_traces_kv {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE tempo_traces_kv {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
alterTable = 'ALTER TABLE {{DB}}.tempo_traces_kv {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE {{DB}}.tempo_traces_kv {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
await client.addSetting('rotate', 'v1_traces_days', db.samples_days + '', db.db)
}
if (db.storage_policy && db.storage_policy !== settings.v1_traces_storage_policy) {
logger.debug(`Altering storage policy: ${db.storage_policy}`)
const alterTs = `ALTER TABLE tempo_traces MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterTsVw = `ALTER TABLE tempo_traces_attrs_gin MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterSm = `ALTER TABLE tempo_traces_kv MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterTs = `ALTER TABLE {{DB}}.tempo_traces MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterTsVw = `ALTER TABLE {{DB}}.tempo_traces_attrs_gin MODIFY SETTING storage_policy='${db.storage_policy}'`
const alterSm = `ALTER TABLE {{DB}}.tempo_traces_kv MODIFY SETTING storage_policy='${db.storage_policy}'`
await _update(alterTs, null, db.db)
await _update(alterTsVw, null, db.db)
await _update(alterSm, null, db.db)
await client.addSetting('rotate', 'v1_traces_storage_policy', db.storage_policy, db.db)
}
if (db.samples_days + '' !== settings.v1_profiles_days) {
let alterTable = 'ALTER TABLE profiles {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
let rotateTable = `ALTER TABLE profiles {{{OnCluster}}} MODIFY TTL toDateTime(timestamp_ns / 1000000000) + INTERVAL ${db.samples_days} DAY`
let alterTable = 'ALTER TABLE {{DB}}.profiles {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
let rotateTable = `ALTER TABLE {{DB}}.profiles {{{OnCluster}}} MODIFY TTL toDateTime(timestamp_ns / 1000000000) + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
alterTable = 'ALTER TABLE profiles_series {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE profiles_series {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
alterTable = 'ALTER TABLE {{DB}}.profiles_series {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE {{DB}}.profiles_series {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
alterTable = 'ALTER TABLE profiles_series_gin {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE profiles_series_gin {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
alterTable = 'ALTER TABLE {{DB}}.profiles_series_gin {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE {{DB}}.profiles_series_gin {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
alterTable = 'ALTER TABLE profiles_series_keys {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE profiles_series_keys {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
alterTable = 'ALTER TABLE {{DB}}.profiles_series_keys {{{OnCluster}}} MODIFY SETTING ttl_only_drop_parts = 1, merge_with_ttl_timeout = 3600, index_granularity = 8192'
rotateTable = `ALTER TABLE {{DB}}.profiles_series_keys {{{OnCluster}}} MODIFY TTL date + INTERVAL ${db.samples_days} DAY`
await _update(alterTable, null, db.db)
await _update(rotateTable, null, db.db)
await client.addSetting('rotate', 'v1_profiles_days', db.samples_days + '', db.db)
Expand Down
Loading
Loading