repo_name
stringlengths
1
62
dataset
stringclasses
1 value
lang
stringclasses
11 values
pr_id
int64
1
20.1k
owner
stringlengths
2
34
reviewer
stringlengths
2
39
diff_hunk
stringlengths
15
262k
code_review_comment
stringlengths
1
99.6k
hyperdx
github_2023
typescript
200
hyperdxio
wrn14897
@@ -247,17 +245,26 @@ export type VectorMetric = { v: number; // value }; -abstract class ParsingInterface<T> { - abstract _parse( - log: T, - ...args: any[] - ): Promise<LogStreamModel | MetricModel | RrwebEventModel>; +const convertToStringMap = (blob: JSONBlob) => { + const output: Record<string, string> = {}; + for (const [keyPath, value] of traverseJson(blob)) { + const stringifiedValue = tryJSONStringify(value); + if (!stringifiedValue) {
I think we should just check if `stringifiedValue !== null`. To me, this is easier to understand: ``` if (stringifiedValue !== null) { output[keyPath.join('.')] = stringifiedValue; } ```
hyperdx
github_2023
typescript
200
hyperdxio
wrn14897
@@ -247,17 +245,26 @@ export type VectorMetric = { v: number; // value }; -abstract class ParsingInterface<T> { - abstract _parse( - log: T, - ...args: any[] - ): Promise<LogStreamModel | MetricModel | RrwebEventModel>; +const convertToStringMap = (blob: JSONBlob) => { + const output: Record<string, string> = {}; + for (const [keyPath, value] of traverseJson(blob)) { + const stringifiedValue = tryJSONStringify(value); + if (!stringifiedValue) { + continue; + } + output[keyPath.join('.')] = stringifiedValue; + } + return output; +}; + +abstract class ParsingInterface<T, S> { + abstract _parse(log: T): Promise<S>; - async parse(logs: T[], ...args: any[]) { - const parsedLogs: any[] = []; + async parse(logs: T[]) { + const parsedLogs: S[] = []; for (const log of logs) { try { - parsedLogs.push(await this._parse(log, ...args)); + parsedLogs.push(await this._parse(log));
no need for async/await
hyperdx
github_2023
typescript
200
hyperdxio
wrn14897
@@ -229,17 +227,14 @@ export type VectorMetric = { v: number; // value }; -abstract class ParsingInterface<T> { - abstract _parse( - log: T, - ...args: any[] - ): LogStreamModel | MetricModel | RrwebEventModel; +abstract class ParsingInterface<T, S> { + abstract _parse(log: T): S; - parse(logs: T[], ...args: any[]) {
this is critical. we can't remove this `...args`
hyperdx
github_2023
typescript
200
hyperdxio
wrn14897
@@ -282,10 +277,30 @@ class VectorLogParser extends ParsingInterface<VectorLog> { } } -class VectorMetricParser extends ParsingInterface<VectorMetric> { +export const convertToStringMap = (obj: JSONBlob) => { + const mapped = mapObjectToKeyValuePairs(obj); + const converted_string_attrs: Record<string, string> = {}; + for (let i = 0; i < mapped['string.names'].length; i++) { + converted_string_attrs[mapped['string.names'][i]] = + mapped['string.values'][i]; + } + // at least nominally metrics should not have bool or number attributes, but we will + // handle and append them here just in case + for (let i = 0; i < mapped['number.names'].length; i++) { + converted_string_attrs[mapped['number.names'][i]] = + mapped['number.values'][i].toString(); + } + for (let i = 0; i < mapped['bool.names'].length; i++) { + converted_string_attrs[mapped['bool.names'][i]] = + mapped['bool.values'][i].toString(); + } + return converted_string_attrs; +}; + +class VectorMetricParser extends ParsingInterface<VectorMetric, MetricModel> { _parse(metric: VectorMetric): MetricModel { return { - _string_attributes: metric.b, + _string_attributes: convertToStringMap(metric.b),
can we rollback this for now ? I'd prefer this to be applied in following PR. ideally, this PR should only update types
hyperdx
github_2023
typescript
200
hyperdxio
wrn14897
@@ -282,10 +277,10 @@ class VectorLogParser extends ParsingInterface<VectorLog> { } } -class VectorMetricParser extends ParsingInterface<VectorMetric> { +class VectorMetricParser extends ParsingInterface<VectorMetric, MetricModel> { _parse(metric: VectorMetric): MetricModel { return { - _string_attributes: metric.b, + _string_attributes: metric.b as any,
nit: we can leave a TODO note here if you want to
hyperdx
github_2023
typescript
217
hyperdxio
MikeShi42
@@ -18,6 +19,79 @@ import styles from '../styles/LogSidePanel.module.scss'; const CHART_HEIGHT = 300; const defaultTimeRange = parseTimeQuery('Past 1h', false); +const PodDetailsProperty = React.memo( + ({ label, value }: { label: string; value?: string }) => { + if (!value) return null; + return ( + <div className="pe-4"> + <Text size="xs" color="gray.6"> + {label} + </Text> + <Text size="sm" color="gray.3"> + {value} + </Text> + </div> + ); + }, +); + +const PodDetails = ({ + podName, + dateRange, +}: { + podName: string; + dateRange: [Date, Date]; +}) => { + const { data } = api.useMultiSeriesChart({
hrmm this might be a bit expensive to pull this data as this will cause us to scan all rows in the db over the time range. We can probably use `useLogBatch` with limit 1 and extra fields set to those properties, which will just scan for 1 row of data and return immediately.
hyperdx
github_2023
others
212
hyperdxio
jaggederest
@@ -1,7 +1,18 @@ +[sinks.go_parser] +type = "http" +uri = "http://go-parser:7777" +inputs = ["go_spans"] # only send spans for now +compression = "gzip" +encoding.codec = "json" +batch.max_bytes = 10485760 # 10MB, required for rrweb payloads
Do we want rrweb spans? They should never have a db.statement right? Just double checking
hyperdx
github_2023
go
212
hyperdxio
jaggederest
@@ -0,0 +1,167 @@ +package main + +import ( + "bytes" + "compress/gzip" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "slices" + "time" + + "github.com/DataDog/go-sqllexer" + "github.com/gin-gonic/gin" + "github.com/hashicorp/go-retryablehttp" + "github.com/xwb1989/sqlparser" +) + +var ( + VERSION = "0.0.1" + PORT = os.Getenv("PORT") + AGGREGATOR_URL = os.Getenv("AGGREGATOR_API_URL") + // https://opentelemetry.io/docs/specs/semconv/database/database-spans/#:~:text=db.system%20has%20the%20following%20list%20of%20well%2Dknown%20values + NON_SQL_DB_SYSTEMS = []string{ + "adabas", + "filemaker", + "coldfusion", + "cassandra",
I think Cassandra is close enough to parse with sql-lexer, for now.
hyperdx
github_2023
go
212
hyperdxio
jaggederest
@@ -0,0 +1,167 @@ +package main + +import ( + "bytes" + "compress/gzip" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "slices" + "time" + + "github.com/DataDog/go-sqllexer" + "github.com/gin-gonic/gin" + "github.com/hashicorp/go-retryablehttp" + "github.com/xwb1989/sqlparser" +) + +var ( + VERSION = "0.0.1" + PORT = os.Getenv("PORT") + AGGREGATOR_URL = os.Getenv("AGGREGATOR_API_URL") + // https://opentelemetry.io/docs/specs/semconv/database/database-spans/#:~:text=db.system%20has%20the%20following%20list%20of%20well%2Dknown%20values + NON_SQL_DB_SYSTEMS = []string{ + "adabas", + "filemaker", + "coldfusion", + "cassandra", + "hbase", + "mongodb", + "redis", + "couchbase",
Couchbase is sql-like
hyperdx
github_2023
typescript
193
hyperdxio
wrn14897
@@ -1516,6 +1516,112 @@ export const getMultiSeriesChartLegacyFormat = async ({ }; }; +// This query needs to be generalized and replaced once use-case matures +export const getSpanPerformanceChart = async ({ + parentSpanWhere, + childrenSpanWhere, + teamId, + tableVersion, + maxNumGroups, + propertyTypeMappingsModel, + startTime, + endTime, +}: { + parentSpanWhere: string; + childrenSpanWhere: string; + tableVersion: number | undefined; + teamId: string; + maxNumGroups: number; + endTime: number; // unix in ms, + startTime: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; +}) => { + const tableName = getLogStreamTableName(tableVersion, teamId); + + const parentSpanWhereCondition = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: parentSpanWhere, + startTime, + }); + + const childrenSpanWhereCondition = await buildSearchQueryWhereCondition({
nit: can do this concurrently ?
hyperdx
github_2023
typescript
198
hyperdxio
MikeShi42
@@ -1130,8 +1138,14 @@ const buildEventSeriesQuery = async ({ const label = SqlString.escape(`${aggFn}(${field})`); const selectClause = [ - isCountFn + aggFn === AggFn.Count ? 'toFloat64(count()) as data' + : aggFn === AggFn.CountPerSec + ? "divide(count(), age('ss', MIN(timestamp), MAX(timestamp))) as data"
shouldn't it be divided by the diff of the time range given by the user? (I noticed this issue from the test values showing 0.75 for the rate instead of what I'd calculate by hand to be 0.6)
hyperdx
github_2023
typescript
198
hyperdxio
jaggederest
@@ -1130,8 +1138,35 @@ const buildEventSeriesQuery = async ({ const label = SqlString.escape(`${aggFn}(${field})`); const selectClause = [ - isCountFn + aggFn === AggFn.Count ? 'toFloat64(count()) as data' + : aggFn === AggFn.CountPerSec + ? granularity + ? SqlString.format('divide(count(), ?) as data', [
Fine for now, but I tend to think these kind of duplicates are safer as `CountPer('second')` or equivalent. Maybe a TODO here
hyperdx
github_2023
typescript
194
hyperdxio
wrn14897
@@ -233,13 +251,13 @@ abstract class ParsingInterface<T> { abstract _parse( log: T, ...args: any[] - ): LogStreamModel | MetricModel | RrwebEventModel; + ): Promise<LogStreamModel | MetricModel | RrwebEventModel>; - parse(logs: T[], ...args: any[]) { + async parse(logs: T[], ...args: any[]) { const parsedLogs: any[] = [];
Can we type this `parsedLogs` here ? I think that's the reason why ts didn't pick it up. Also its better to double check all any types in this file
hyperdx
github_2023
typescript
188
hyperdxio
MikeShi42
@@ -170,6 +171,14 @@ export const mapObjectToKeyValuePairs = ( } } + if (output['string.names'].includes('db.statement')) { + const index = output['string.names'].indexOf('db.statement'); + const value = output['string.values'][index]; + const obfuscated = await sqlObfuscator(value); + output['string.names'].push('db.statement.obfuscated');
I feel a bit iffy attaching the property to `db.statement` since `db.statement` itself is a string, so this object wouldn't be JSON-serializable anymore. I wonder if we should just call it like `db.sql.normalized` (not otel as the client themselves will attach the normalized query to `db.statement`, but seems sensible?)
hyperdx
github_2023
typescript
188
hyperdxio
MikeShi42
@@ -170,6 +171,14 @@ export const mapObjectToKeyValuePairs = ( } } + if (output['string.names'].includes('db.statement')) {
If it's useful, we can also check `db.system` https://opentelemetry.io/docs/specs/semconv/database/database-spans/#:~:text=db.system%20has%20the%20following%20list%20of%20well%2Dknown%20values.
hyperdx
github_2023
typescript
189
hyperdxio
jaggederest
@@ -269,7 +288,230 @@ export default function ServiceDashboardPage() { </Grid.Col> </Grid> </Tabs.Panel> - <Tabs.Panel value="http">HTTP Service</Tabs.Panel> + <Tabs.Panel value="http"> + <Grid> + <Grid.Col span={6}> + <Card p="md"> + <Card.Section p="md" py="xs" withBorder> + Request Error Rate + </Card.Section> + <Card.Section p="md" py="sm" h={CHART_HEIGHT}> + <HDXMultiSeriesLineChart + config={{ + dateRange, + granularity: convertDateRangeToGranularityString( + dateRange, + 60, + ), + series: [ + { + displayName: 'Error Rate %', + table: 'logs', + type: 'time', + aggFn: 'count', + where: scopeWhereQuery( + 'span.kind:"server" level:"error"', + ), + groupBy: [], + numberFormat: + ERROR_RATE_PERCENTAGE_NUMBER_FORMAT, + }, + { + table: 'logs', + type: 'time', + aggFn: 'count', + field: '', + where: scopeWhereQuery('span.kind:"server"'), + groupBy: [], + numberFormat: + ERROR_RATE_PERCENTAGE_NUMBER_FORMAT, + }, + ], + seriesReturnType: 'ratio', + }} + /> + </Card.Section> + </Card> + </Grid.Col> + <Grid.Col span={6}> + <Card p="md"> + <Card.Section p="md" py="xs" withBorder> + Request Throughput + </Card.Section> + <Card.Section p="md" py="sm" h={CHART_HEIGHT}> + <HDXMultiSeriesLineChart + config={{ + dateRange, + granularity: convertDateRangeToGranularityString( + dateRange, + 60, + ), + series: [ + { + displayName: 'Requests', + table: 'logs', + type: 'time', + aggFn: 'count', + where: scopeWhereQuery('span.kind:"server"'), + groupBy: [], + numberFormat: { + ...INTEGER_NUMBER_FORMAT, + unit: 'requests', + }, + }, + ], + seriesReturnType: 'column', + }} + /> + </Card.Section> + </Card> + </Grid.Col> + <Grid.Col span={6}> + <Card p="md"> + <Card.Section p="md" py="xs" withBorder> + 20 Top Most Time Consuming Endpoints + </Card.Section> + <Card.Section p="md" py="sm" h={CHART_HEIGHT}> + <HDXListBarChart + config={{ + dateRange, + granularity: convertDateRangeToGranularityString( + dateRange, + 60, + ), + series: [ + { + table: 'logs', + type: 'time', + aggFn: 'sum', + field: 'duration', + where: scopeWhereQuery('span.kind:"server"'), + groupBy: ['span_name'], + numberFormat: MS_NUMBER_FORMAT, + }, + ], + }} + /> + </Card.Section> + </Card> + </Grid.Col> + <Grid.Col span={6}> + <EndpointLatencyTile + dateRange={dateRange} + scopeWhereQuery={scopeWhereQuery} + /> + </Grid.Col> + <Grid.Col span={12}> + <Card p="md"> + <Card.Section p="md" py="xs" withBorder> + Endpoints + </Card.Section> + <Card.Section p="md" py="sm" h={CHART_HEIGHT}> + <HDXMultiSeriesTableChart + config={{ + groupColumnName: 'Endpoint', + dateRange, + granularity: convertDateRangeToGranularityString( + dateRange, + 60, + ), + series: [ + { + displayName: 'Throughput', + table: 'logs', + type: 'table', + aggFn: 'count', + where: scopeWhereQuery('span.kind:"server"'), + groupBy: ['span_name'], + }, + { + displayName: 'P95', + table: 'logs', + type: 'table', + aggFn: 'p95', + field: 'duration', + where: scopeWhereQuery('span.kind:"server"'), + groupBy: ['span_name'], + numberFormat: { + factor: 1, + output: 'number', + mantissa: 2, + thousandSeparated: true, + average: false, + decimalBytes: false, + unit: 'ms', + }, + }, + { + displayName: 'Median', + table: 'logs', + type: 'table', + aggFn: 'p50', + field: 'duration', + where: scopeWhereQuery('span.kind:"server"'), + groupBy: ['span_name'], + numberFormat: { + factor: 1, + output: 'number', + mantissa: 2, + thousandSeparated: true, + average: false, + decimalBytes: false, + unit: 'ms', + }, + }, + { + displayName: 'Total', + table: 'logs', + type: 'table', + aggFn: 'sum', + field: 'duration', + where: scopeWhereQuery('span.kind:"server"'), + groupBy: ['span_name'], + sortOrder: 'desc', + }, + { + displayName: 'Errors', + table: 'logs', + type: 'table', + aggFn: 'count', + field: '', + where: scopeWhereQuery( + 'span.kind:"server" level:"error"', + ), + groupBy: ['span_name'], + }, + ], + seriesReturnType: 'column', + }} + /> + </Card.Section> + </Card> + </Grid.Col> + <Grid.Col span={12}> + <Card p="md"> + <Card.Section p="md" py="xs" withBorder> + Debug
Should this Debug section still be here?
hyperdx
github_2023
typescript
184
hyperdxio
MikeShi42
@@ -1053,7 +1054,7 @@ const buildEventSeriesQuery = async ({ endTime, field, granularity, - groupBy, + groupBys,
We should continue to call this `groupBy`, since everywhere else assumes a groupBy is an array
hyperdx
github_2023
typescript
184
hyperdxio
MikeShi42
@@ -1096,14 +1097,39 @@ const buildEventSeriesQuery = async ({ ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) : ''; - const hasGroupBy = groupBy != '' && groupBy != null; const isCountFn = aggFn === AggFn.Count; - const groupByField = - hasGroupBy && - buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + const groupByFields: string[] = [];
mega nit: probably call this groupByColumnNames, fields to me usually means the user-facing field I think
hyperdx
github_2023
typescript
184
hyperdxio
MikeShi42
@@ -1096,14 +1097,36 @@ const buildEventSeriesQuery = async ({ ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) : ''; - const hasGroupBy = groupBy != '' && groupBy != null; const isCountFn = aggFn === AggFn.Count; - const groupByField = - hasGroupBy && - buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + const groupByFields: string[] = []; + for (const groupBy of groupBys) {
this feels like we should just do a `.map` and throw if the column name can't be generated (otherwise we'd get a non-sensical group value right?)
hyperdx
github_2023
typescript
182
hyperdxio
wrn14897
@@ -1299,12 +1297,6 @@ export const queryMultiSeriesChart = async ({ const rows = await client.query({ query, format: 'JSON', - clickhouse_settings: {
nit: I think we can still keep this to be consistent with other methods
hyperdx
github_2023
typescript
181
hyperdxio
MikeShi42
@@ -897,6 +899,9 @@ export default function AppNav({ fixed = false }: { fixed?: boolean }) { </Link> </div> </div> + <div className="d-flex justify-content-end align-items-end">
nit: since this isn't super important, we can probably slap a `fs-7` in the className to make it smaller
hyperdx
github_2023
typescript
40
hyperdxio
wrn14897
@@ -248,3 +248,36 @@ export function S3Icon({ style, width }: IconProps) { </svg> ); } + +export function Eye({ style, width }: IconProps) {
I think we can use bootstrap icons instead https://icons.getbootstrap.com/?q=eye
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -0,0 +1,142 @@ +import * as clickhouse from '@/clickhouse';
nit: can move this to `fixtures.ts`
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics
Not sure I fully understand this. So if Mas/Min aggs are the same for both, why do we need extra aggFn checking here ?
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -1,4 +1,14 @@ +import _ from 'lodash';
Great tests!! not required in this PR, but I'd write tests for `buildEventSeriesQuery` and `queryMultiSeriesChart` to make sure queries are properly generated
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType = 'column', + queries, +}: { + maxNumGroups: number; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: 'ratio' | 'column'; + queries: { query: string; hasGroupBy: boolean }[]; +}) => { + // For now only supports same-table series with the same groupBy + + const seriesCTEs = SqlString.raw( + 'WITH ' + queries.map((q, i) => `series_${i} AS (${q.query})`).join(',\n'), + ); + + // Only join on group bys if all queries have group bys + // TODO: This will not work for an array of group by fields + const allQueiesHaveGroupBy = queries.every(q => q.hasGroupBy); + + let leftJoin = ''; + // Join every series after the first one + for (let i = 1; i < queries.length; i++) { + leftJoin += `LEFT JOIN series_${i} AS series_${i} ON series_${i}.ts_bucket=series_0.ts_bucket${ + allQueiesHaveGroupBy ? ` AND series_${i}.group = series_0.group` : '' + }\n`; + } + + const select = + seriesReturnType === 'column' + ? queries + .map((_, i) => { + return `series_${i}.data as "series_${i}.data"`; + }) + .join(',\n') + : 'series_0.data / series_1.data as "series_0.data"'; + + // Return each series data as a separate column + const query = SqlString.format( + `? + ,raw_groups AS ( + SELECT + ?, + series_0.ts_bucket as ts_bucket, + series_0.group as group + FROM series_0 AS series_0 + ? + ), groups AS ( + SELECT *, MAX(${ + seriesReturnType === 'column' + ? `greatest(${queries + .map((_, i) => `series_${i}.data`) + .join(', ')})` + : 'series_0.data' + }) OVER (PARTITION BY group) as rank_order_by_value + FROM raw_groups + ), final AS ( + SELECT *, DENSE_RANK() OVER (ORDER BY rank_order_by_value DESC) as rank + FROM groups + ) + SELECT * + FROM final + WHERE rank <= ? + ORDER BY ts_bucket ASC + `, + [seriesCTEs, SqlString.raw(select), SqlString.raw(leftJoin), maxNumGroups], + ); + + const rows = await client.query({ + query, + format: 'JSON', + clickhouse_settings: { + additional_table_filters: buildLogStreamAdditionalFilters(
hmm....this is critical and tricky. How do we apply filters separately for log and metric tables ? for now at least we need to execute two queries and merge them afterward
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType = 'column', + queries, +}: { + maxNumGroups: number; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: 'ratio' | 'column'; + queries: { query: string; hasGroupBy: boolean }[]; +}) => { + // For now only supports same-table series with the same groupBy + + const seriesCTEs = SqlString.raw( + 'WITH ' + queries.map((q, i) => `series_${i} AS (${q.query})`).join(',\n'), + ); + + // Only join on group bys if all queries have group bys + // TODO: This will not work for an array of group by fields + const allQueiesHaveGroupBy = queries.every(q => q.hasGroupBy); + + let leftJoin = ''; + // Join every series after the first one + for (let i = 1; i < queries.length; i++) { + leftJoin += `LEFT JOIN series_${i} AS series_${i} ON series_${i}.ts_bucket=series_0.ts_bucket${ + allQueiesHaveGroupBy ? ` AND series_${i}.group = series_0.group` : '' + }\n`; + } + + const select = + seriesReturnType === 'column' + ? queries + .map((_, i) => { + return `series_${i}.data as "series_${i}.data"`; + }) + .join(',\n') + : 'series_0.data / series_1.data as "series_0.data"'; + + // Return each series data as a separate column + const query = SqlString.format( + `? + ,raw_groups AS ( + SELECT + ?, + series_0.ts_bucket as ts_bucket, + series_0.group as group + FROM series_0 AS series_0 + ? + ), groups AS ( + SELECT *, MAX(${ + seriesReturnType === 'column' + ? `greatest(${queries + .map((_, i) => `series_${i}.data`) + .join(', ')})` + : 'series_0.data' + }) OVER (PARTITION BY group) as rank_order_by_value + FROM raw_groups + ), final AS ( + SELECT *, DENSE_RANK() OVER (ORDER BY rank_order_by_value DESC) as rank + FROM groups + ) + SELECT * + FROM final + WHERE rank <= ? + ORDER BY ts_bucket ASC + `, + [seriesCTEs, SqlString.raw(select), SqlString.raw(leftJoin), maxNumGroups], + ); + + const rows = await client.query({ + query, + format: 'JSON', + clickhouse_settings: { + additional_table_filters: buildLogStreamAdditionalFilters( + tableVersion, + teamId, + ), + }, + }); + + const result = await rows.json< + ResponseJSON<{ + ts_bucket: number; + group: string; + [series_data: `series_${number}.data`]: number; + }> + >(); + return result; +}; + +export const getMultiSeriesChart = async ({ + series, + endTime, + granularity, + maxNumGroups, + propertyTypeMappingsModel, + startTime, + tableVersion, + teamId, + seriesReturnType = SeriesReturnType.Column, +}: { + series: z.infer<typeof chartSeriesSchema>[]; + endTime: number; // unix in ms, + startTime: number; // unix in ms + granularity: string | undefined; // can be undefined in the number chart + maxNumGroups: number; + propertyTypeMappingsModel?: LogsPropertyTypeMappingsModel; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: SeriesReturnType; +}) => { + let queries: { query: string; hasGroupBy: boolean }[] = []; + if ('table' in series[0] && series[0].table === 'logs') {
This assumes all series have the same table ?
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType = 'column', + queries, +}: { + maxNumGroups: number; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: 'ratio' | 'column'; + queries: { query: string; hasGroupBy: boolean }[]; +}) => { + // For now only supports same-table series with the same groupBy + + const seriesCTEs = SqlString.raw( + 'WITH ' + queries.map((q, i) => `series_${i} AS (${q.query})`).join(',\n'), + ); + + // Only join on group bys if all queries have group bys + // TODO: This will not work for an array of group by fields + const allQueiesHaveGroupBy = queries.every(q => q.hasGroupBy); + + let leftJoin = ''; + // Join every series after the first one + for (let i = 1; i < queries.length; i++) { + leftJoin += `LEFT JOIN series_${i} AS series_${i} ON series_${i}.ts_bucket=series_0.ts_bucket${ + allQueiesHaveGroupBy ? ` AND series_${i}.group = series_0.group` : '' + }\n`; + } + + const select = + seriesReturnType === 'column' + ? queries + .map((_, i) => { + return `series_${i}.data as "series_${i}.data"`; + }) + .join(',\n') + : 'series_0.data / series_1.data as "series_0.data"'; + + // Return each series data as a separate column + const query = SqlString.format( + `? + ,raw_groups AS ( + SELECT + ?, + series_0.ts_bucket as ts_bucket, + series_0.group as group + FROM series_0 AS series_0 + ? + ), groups AS ( + SELECT *, MAX(${ + seriesReturnType === 'column' + ? `greatest(${queries + .map((_, i) => `series_${i}.data`) + .join(', ')})` + : 'series_0.data' + }) OVER (PARTITION BY group) as rank_order_by_value + FROM raw_groups + ), final AS ( + SELECT *, DENSE_RANK() OVER (ORDER BY rank_order_by_value DESC) as rank + FROM groups + ) + SELECT * + FROM final + WHERE rank <= ? + ORDER BY ts_bucket ASC + `, + [seriesCTEs, SqlString.raw(select), SqlString.raw(leftJoin), maxNumGroups], + ); + + const rows = await client.query({ + query, + format: 'JSON', + clickhouse_settings: { + additional_table_filters: buildLogStreamAdditionalFilters( + tableVersion, + teamId, + ), + }, + }); + + const result = await rows.json< + ResponseJSON<{ + ts_bucket: number; + group: string; + [series_data: `series_${number}.data`]: number; + }> + >(); + return result; +}; + +export const getMultiSeriesChart = async ({ + series, + endTime, + granularity, + maxNumGroups, + propertyTypeMappingsModel, + startTime, + tableVersion, + teamId, + seriesReturnType = SeriesReturnType.Column, +}: { + series: z.infer<typeof chartSeriesSchema>[]; + endTime: number; // unix in ms, + startTime: number; // unix in ms + granularity: string | undefined; // can be undefined in the number chart + maxNumGroups: number; + propertyTypeMappingsModel?: LogsPropertyTypeMappingsModel; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: SeriesReturnType; +}) => { + let queries: { query: string; hasGroupBy: boolean }[] = []; + if ('table' in series[0] && series[0].table === 'logs') { + if (propertyTypeMappingsModel == null) { + throw new Error('propertyTypeMappingsModel is required for logs chart'); + } + + queries = await Promise.all( + series.map(s => { + if (s.type != 'time' && s.type != 'table') { + throw new Error(`Unsupported series type: ${s.type}`); + } + + return buildEventSeriesQuery({ + aggFn: s.aggFn, + endTime, + field: s.field, + granularity, + groupBy: s.groupBy[0], + maxNumGroups, + propertyTypeMappingsModel, + q: s.where, + sortOrder: s.type === 'table' ? s.sortOrder : undefined, + startTime, + tableVersion, + teamId, + }); + }), + ); + } else if ('table' in series[0] && series[0].table === 'metrics') { + queries = await Promise.all( + series.map(s => { + if (s.type != 'time' && s.type != 'table') { + throw new Error(`Unsupported series type: ${s.type}`); + } + if (s.field == null) { + throw new Error('Metric name is required'); + } + if (s.metricDataType == null) { + throw new Error('Metric data type is required'); + } + + return buildMetricSeriesQuery({ + aggFn: s.aggFn, + endTime, + name: s.field, + granularity, + groupBy: s.groupBy[0], + // maxNumGroups, + q: s.where, + // sortOrder: s.sortOrder, + startTime, + teamId, + dataType: s.metricDataType, + }); + }), + ); + } + + return queryMultiSeriesChart({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType, + queries, + }); +}; + +export const getMultiSeriesChartLegacyFormat = async ({
I think we need to test this guy to make sure it outputs the same data as the original chart method
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -0,0 +1,115 @@ +import type { Row } from '@clickhouse/client'; +import opentelemetry, { SpanStatusCode } from '@opentelemetry/api'; +import express from 'express'; +import { isNumber, omit, parseInt } from 'lodash'; +import ms from 'ms'; +import { serializeError } from 'serialize-error'; +import { z } from 'zod'; +import { validateRequest } from 'zod-express-middleware'; + +import * as clickhouse from '@/clickhouse'; +import { customColumnMapType } from '@/clickhouse/searchQueryParser'; +import { getTeam } from '@/controllers/team'; +import logger from '@/utils/logger'; +import { getLogsPatterns } from '@/utils/miner'; +import { LimitedSizeQueue } from '@/utils/queue'; +import { chartSeriesSchema } from '@/utils/zod'; + +const router = express.Router(); + +router.post( + '/series', + validateRequest({ + query: z.object({ + endTime: z.string(), + granularity: z.nativeEnum(clickhouse.Granularity).optional(), + startTime: z.string(), + seriesReturnType: z.optional(z.nativeEnum(clickhouse.SeriesReturnType)), + }),
nit: can move this to `body` later
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -336,8 +336,8 @@ export const processAlert = async (now: Date, alert: AlertDocument) => { // Logs Source let checksData: | Awaited<ReturnType<typeof clickhouse.checkAlert>> - | Awaited<ReturnType<typeof clickhouse.getLogsChart>> | Awaited<ReturnType<typeof clickhouse.getMetricsChart>>
can also move `getMetricsChart` return type
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({
I can't really read this one and it hurts my brain. I think a simple test helps (at least I can see what the output query looks like). What I can tell so far is it concats a bunch of CTE and merges them afterward
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data'
do we want to do this for other counts ?
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType = 'column', + queries, +}: { + maxNumGroups: number; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: 'ratio' | 'column';
nit: `SeriesReturnType`
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType = 'column', + queries, +}: { + maxNumGroups: number; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: 'ratio' | 'column'; + queries: { query: string; hasGroupBy: boolean }[]; +}) => { + // For now only supports same-table series with the same groupBy + + const seriesCTEs = SqlString.raw( + 'WITH ' + queries.map((q, i) => `series_${i} AS (${q.query})`).join(',\n'), + ); + + // Only join on group bys if all queries have group bys + // TODO: This will not work for an array of group by fields + const allQueiesHaveGroupBy = queries.every(q => q.hasGroupBy); + + let leftJoin = ''; + // Join every series after the first one + for (let i = 1; i < queries.length; i++) { + leftJoin += `LEFT JOIN series_${i} AS series_${i} ON series_${i}.ts_bucket=series_0.ts_bucket${ + allQueiesHaveGroupBy ? ` AND series_${i}.group = series_0.group` : '' + }\n`; + } + + const select = + seriesReturnType === 'column' + ? queries + .map((_, i) => { + return `series_${i}.data as "series_${i}.data"`; + }) + .join(',\n') + : 'series_0.data / series_1.data as "series_0.data"'; + + // Return each series data as a separate column + const query = SqlString.format( + `? + ,raw_groups AS ( + SELECT + ?, + series_0.ts_bucket as ts_bucket, + series_0.group as group + FROM series_0 AS series_0 + ? + ), groups AS ( + SELECT *, MAX(${ + seriesReturnType === 'column' + ? `greatest(${queries + .map((_, i) => `series_${i}.data`) + .join(', ')})`
style suggestion: any chance we can move this to the format 2nd arg ? (to improve readability)
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType = 'column', + queries, +}: { + maxNumGroups: number; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: 'ratio' | 'column'; + queries: { query: string; hasGroupBy: boolean }[]; +}) => { + // For now only supports same-table series with the same groupBy + + const seriesCTEs = SqlString.raw( + 'WITH ' + queries.map((q, i) => `series_${i} AS (${q.query})`).join(',\n'),
style suggestion: I'd move 'WITH' to the final query so its clear that we are building a bunch of CTEs
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,588 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate && granularity != null; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? startTime - ms(granularity) + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data' + : aggFn === AggFn.Sum + ? `SUM(value) as data` + : aggFn === AggFn.Avg + ? `AVG(value) as data` + : aggFn === AggFn.Max + ? `MAX(value) as data` + : aggFn === AggFn.Min + ? `MIN(value) as data` + : aggFn === AggFn.SumRate + ? `SUM(rate) as data` + : aggFn === AggFn.AvgRate + ? `AVG(rate) as data` + : aggFn === AggFn.MaxRate + ? `MAX(rate) as data` + : aggFn === AggFn.MinRate + ? `MIN(rate) as data` + : `quantile(${ + aggFn === AggFn.P50 || aggFn === AggFn.P50Rate + ? '0.5' + : aggFn === AggFn.P90 || aggFn === AggFn.P90Rate + ? '0.90' + : aggFn === AggFn.P95 || aggFn === AggFn.P95Rate + ? '0.95' + : '0.99' + })(${isRate ? 'rate' : 'value'}) as data`, + ); + } else { + logger.error(`Unsupported data type: ${dataType}`); + } + + // used to sum/avg/percentile Sum metrics + // max/min don't require pre-bucketing the Sum timeseries + const sumMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + min(value) as value, + _string_attributes, + name + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY + name, + _string_attributes, + timestamp + ORDER BY + _string_attributes, + timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const rateMetricSource = SqlString.format( + ` + SELECT + if( + runningDifference(value) < 0 + OR neighbor(_string_attributes, -1, _string_attributes) != _string_attributes, + nan, + runningDifference(value) + ) AS rate, + timestamp, + _string_attributes, + name + FROM (?) + WHERE isNaN(rate) = 0 + ${shouldModifyStartTime ? 'AND timestamp >= fromUnixTimestamp(?)' : ''} + `.trim(), + [ + SqlString.raw(sumMetricSource), + ...(shouldModifyStartTime ? [startTime / 1000] : []), + ], + ); + + const gaugeMetricSource = SqlString.format( + ` + SELECT + toStartOfInterval(timestamp, INTERVAL ?) as timestamp, + name, + last_value(value) as value, + _string_attributes + FROM ?? + WHERE name = ? + AND data_type = ? + AND (?) + GROUP BY name, _string_attributes, timestamp + ORDER BY timestamp ASC + `.trim(), + [granularity, tableName, name, dataType, SqlString.raw(whereClause)], + ); + + const query = SqlString.format( + ` + WITH metrics AS (?) + SELECT ? + FROM metrics + GROUP BY group, ts_bucket + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + } + `, + [ + SqlString.raw( + isRate + ? rateMetricSource + : // Max/Min aggs are the same for both Sum and Gauge metrics + dataType === 'Sum' && aggFn != AggFn.Max && aggFn != AggFn.Min + ? sumMetricSource + : gaugeMetricSource, + ), + SqlString.raw(selectClause.join(',')), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +const buildEventSeriesQuery = async ({ + aggFn, + endTime, + field, + granularity, + groupBy, + maxNumGroups, + propertyTypeMappingsModel, + q, + sortOrder, + startTime, + tableVersion, + teamId, +}: { + aggFn: AggFn; + endTime: number; // unix in ms, + field?: string; + granularity: string | undefined; // can be undefined in the number chart + groupBy: string; + maxNumGroups: number; + propertyTypeMappingsModel: LogsPropertyTypeMappingsModel; + q: string; + sortOrder?: 'asc' | 'desc'; + startTime: number; // unix in ms + tableVersion: number | undefined; + teamId: string; +}) => { + if (isRateAggFn(aggFn)) { + throw new Error('Rate is not supported in logs chart'); + } + + const tableName = getLogStreamTableName(tableVersion, teamId); + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime, + }); + + if (field == null && aggFn !== AggFn.Count) { + throw new Error( + 'Field is required for all aggregation functions except Count', + ); + } + + const selectField = + field != null + ? buildSearchColumnName(propertyTypeMappingsModel.get(field), field) + : ''; + + const hasGroupBy = groupBy != '' && groupBy != null; + const isCountFn = aggFn === AggFn.Count; + const groupByField = + hasGroupBy && + buildSearchColumnName(propertyTypeMappingsModel.get(groupBy), groupBy); + + const serializer = new SQLSerializer(propertyTypeMappingsModel); + + const label = SqlString.escape(`${aggFn}(${field})`); + + const selectClause = [ + isCountFn + ? 'toFloat64(count()) as data' + : aggFn === AggFn.Sum + ? `sum(${selectField}) as data` + : aggFn === AggFn.Avg + ? `avg(${selectField}) as data` + : aggFn === AggFn.Max + ? `max(${selectField}) as data` + : aggFn === AggFn.Min + ? `min(${selectField}) as data` + : aggFn === AggFn.CountDistinct + ? `count(distinct ${selectField}) as data` + : `quantile(${ + aggFn === AggFn.P50 + ? '0.5' + : aggFn === AggFn.P90 + ? '0.90' + : aggFn === AggFn.P95 + ? '0.95' + : '0.99' + })(${selectField}) as data`, + granularity != null + ? `toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ${granularity})) as ts_bucket` + : "'0' as ts_bucket", + groupByField ? `${groupByField} as group` : `'' as group`, // FIXME: should we fallback to use aggFn as group + `${label} as label`, + ].join(','); + + const groupByClause = `ts_bucket ${groupByField ? `, ${groupByField}` : ''}`; + + const query = SqlString.format( + ` + SELECT ? + FROM ?? + WHERE ? AND (?) ? ? + GROUP BY ? + ORDER BY ts_bucket ASC + ${ + granularity != null + ? `WITH FILL + FROM toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + TO toUnixTimestamp(toStartOfInterval(toDateTime(?), INTERVAL ?)) + STEP ?` + : '' + }${ + sortOrder === 'asc' || sortOrder === 'desc' ? `, data ${sortOrder}` : '' + } + `, + [ + SqlString.raw(selectClause), + tableName, + buildTeamLogStreamWhereCondition(tableVersion, teamId), + SqlString.raw(whereClause), + SqlString.raw( + !isCountFn && field != null + ? ` AND (${await serializer.isNotNull(field, false)})` + : '', + ), + SqlString.raw( + hasGroupBy + ? ` AND (${await serializer.isNotNull(groupBy, false)})` + : '', + ), + SqlString.raw(groupByClause), + ...(granularity != null + ? [ + startTime / 1000, + granularity, + endTime / 1000, + granularity, + ms(granularity) / 1000, + ] + : []), + ], + ); + + return { + query, + hasGroupBy, + }; +}; + +export const queryMultiSeriesChart = async ({ + maxNumGroups, + tableVersion, + teamId, + seriesReturnType = 'column', + queries, +}: { + maxNumGroups: number; + tableVersion: number | undefined; + teamId: string; + seriesReturnType?: 'ratio' | 'column'; + queries: { query: string; hasGroupBy: boolean }[]; +}) => { + // For now only supports same-table series with the same groupBy + + const seriesCTEs = SqlString.raw( + 'WITH ' + queries.map((q, i) => `series_${i} AS (${q.query})`).join(',\n'), + ); + + // Only join on group bys if all queries have group bys + // TODO: This will not work for an array of group by fields + const allQueiesHaveGroupBy = queries.every(q => q.hasGroupBy); + + let leftJoin = ''; + // Join every series after the first one + for (let i = 1; i < queries.length; i++) { + leftJoin += `LEFT JOIN series_${i} AS series_${i} ON series_${i}.ts_bucket=series_0.ts_bucket${
curious why do we need 'AS' here ?
hyperdx
github_2023
typescript
171
hyperdxio
wrn14897
@@ -904,6 +912,626 @@ export const getMetricsChart = async ({ return result; }; +export const buildMetricSeriesQuery = async ({ + aggFn, + dataType, + endTime, + granularity, + groupBy, + name, + q, + startTime, + teamId, + sortOrder, +}: { + aggFn: AggFn; + dataType: MetricsDataType; + endTime: number; // unix in ms, + granularity?: Granularity | string; + groupBy?: string; + name: string; + q: string; + startTime: number; // unix in ms + teamId: string; + sortOrder?: 'asc' | 'desc'; +}) => { + const tableName = `default.${TableName.Metric}`; + const propertyTypeMappingsModel = await buildMetricsPropertyTypeMappingsModel( + undefined, // default version + teamId, + ); + + const isRate = isRateAggFn(aggFn); + + const shouldModifyStartTime = isRate; + + // If it's a rate function, then we'll need to look 1 window back to calculate + // the initial rate value. + // We'll filter this extra bucket out later + const modifiedStartTime = shouldModifyStartTime + ? // If granularity is not defined (tables), we'll just look behind 5min + startTime - ms(granularity ?? '5 minute') + : startTime; + + const whereClause = await buildSearchQueryWhereCondition({ + endTime, + propertyTypeMappingsModel, + query: q, + startTime: modifiedStartTime, + }); + const selectClause = [ + granularity != null + ? SqlString.format( + 'toUnixTimestamp(toStartOfInterval(timestamp, INTERVAL ?)) AS ts_bucket', + [granularity], + ) + : "'0' as ts_bucket", + groupBy + ? SqlString.format(`_string_attributes[?] AS group`, [groupBy]) + : "'' AS group", + ]; + + const hasGroupBy = groupBy != '' && groupBy != null; + + if (dataType === MetricsDataType.Gauge || dataType === MetricsDataType.Sum) { + selectClause.push( + aggFn === AggFn.Count + ? 'COUNT(value) as data'
nit: I wonder if we also want to do `toFloat64` here
hyperdx
github_2023
typescript
172
hyperdxio
wrn14897
@@ -54,114 +90,78 @@ function disableAlert(alertId?: string) { // TODO do some lovely disabling of the alert here } -function AlertDetails({ - alert, - history, -}: { - alert: AlertData; - history: AlertHistory[]; -}) { +function AlertDetails({ alert }: { alert: AlertData }) { // TODO enable once disable handler is implemented above const showDisableButton = false; + return ( - <> - <div className="text-end"> + <div className={styles.alertRow}> + <Group> {alert.state === AlertState.ALERT && ( - <div className="badge bg-danger">ALERT</div> - )} - {alert.state === AlertState.OK && ( - <div className="badge bg-success">OK</div> + <Badge color="red" size="sm"> + Alert + </Badge> )} + {alert.state === AlertState.OK && <Badge size="sm">Ok</Badge>} {alert.state === AlertState.DISABLED && ( - <div className="badge bg-secondary">DISABLED</div> - )}{' '} + <Badge color="gray" size="sm"> + Disabled + </Badge> + )} + + <Stack spacing={2}> + <div> + {alert.source === 'CHART' && alert.dashboard ? ( + <Link href={`/dashboards/${alert.dashboard._id}`}> + <a className={styles.alertLink} title="Dashboard"> + <i className="bi bi-graph-up text-slate-400 me-2 fs-8" /> + {alert.dashboard.name}
I think we have information to pull out the chart name, right ? (dashboard record + chartId)
hyperdx
github_2023
typescript
164
hyperdxio
MikeShi42
@@ -208,6 +210,14 @@ const MemoChart = memo(function MemoChart({ {isClickActive != null ? ( <ReferenceLine x={isClickActive.activeLabel} stroke="#ccc" /> ) : null} + {logReferenceTimestamp != null ? ( + <ReferenceLine + x={logReferenceTimestamp} + stroke="#ff5d5b" + strokeDasharray="3 3" + label="Log"
```suggestion label="Event" ``` we should call it an event since it could be a span or log
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +72,52 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + // may want to restrict this to reasonable time bounds + const alertHistories = await AlertHistory.find({ team: teamId }); + res.json({ + data: { + alerts: alerts, + alertHistories: alertHistories, + }, + }); + } catch (e) { + next(e); + } +}); + +router.get('/:id', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + const { id } = req.params; + if (teamId == null) { + return res.sendStatus(403); + } + if (!id) { + return res.sendStatus(400); + }
can use `zod` for the validation
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +72,52 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + // may want to restrict this to reasonable time bounds + const alertHistories = await AlertHistory.find({ team: teamId });
Any reason we want to fetch this ?
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +72,52 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId });
I wonder if we want to populate associated dashboard or chart record for the chart type alerts. It really depends on what we show on the frontend side
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +72,52 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + // may want to restrict this to reasonable time bounds + const alertHistories = await AlertHistory.find({ team: teamId }); + res.json({ + data: { + alerts: alerts, + alertHistories: alertHistories, + }, + }); + } catch (e) { + next(e); + } +}); + +router.get('/:id', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + const { id } = req.params; + if (teamId == null) { + return res.sendStatus(403); + } + if (!id) { + return res.sendStatus(400); + } + const alert = (await Alert.find({ _id: id, team: teamId })).pop();
can use `findOne` instead
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +72,52 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + // may want to restrict this to reasonable time bounds + const alertHistories = await AlertHistory.find({ team: teamId }); + res.json({ + data: { + alerts: alerts, + alertHistories: alertHistories, + }, + }); + } catch (e) { + next(e); + } +}); + +router.get('/:id', isUserAuthenticated, async (req, res, next) => { + try { + const teamId = req.user?.team; + const { id } = req.params; + if (teamId == null) { + return res.sendStatus(403); + } + if (!id) { + return res.sendStatus(400); + } + const alert = (await Alert.find({ _id: id, team: teamId })).pop(); + const alertHistories = await AlertHistory.find({ alert: id, team: teamId }); + if (!alert) { + return res.sendStatus(404); + }
better check this before fetching histories
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +74,57 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + const alertHistories: any = {}; + for (const alert of alerts) { + const histories = await AlertHistory.find({ alert: alert._id }) + .sort({ createdAt: -1 }) + .limit(20); + alertHistories[alert._id.toString()] = histories; + } + res.json({ + data: { + alerts: alerts, + histories: alertHistories, + }, + }); + } catch (e) { + next(e); + } +}); + +router.get('/:id', validateGet, async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const { id } = req.params; + const alert = await Alert.findOne({ _id: id, team: teamId }); + if (!alert) { + return res.sendStatus(404); + } + const alertHistories = await AlertHistory.find({ + alert: id, + team: teamId, + }).limit(20);
should we sort here ?
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +74,57 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + const alertHistories: any = {};
nit: I would suggest to type this if possible
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +74,57 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + const alertHistories: any = {}; + for (const alert of alerts) { + const histories = await AlertHistory.find({ alert: alert._id }) + .sort({ createdAt: -1 }) + .limit(20); + alertHistories[alert._id.toString()] = histories; + }
this chunk of code can be optimized using promise all to fetch histories concurrently
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -43,6 +46,13 @@ const zAlert = z const zAlertInput = zAlert; +const getHistory = async (alertId: string, teamId: string) => { + const histories = await AlertHistory.find({ alert: alertId, team: teamId }) + .sort({ createdAt: -1 }) + .limit(20);
I wonder if we want to pull history by time frame (as an option). that's something we can think about once the frontend draft is out
hyperdx
github_2023
typescript
154
hyperdxio
MikeShi42
@@ -71,6 +81,55 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + const alertsWithHistory = await Promise.all( + alerts.map(async alert => ({ + ...alert, + history: await getHistory(alert._id.toString(), teamId.toString()),
nit: This shouldn't matter too much now, but perf could be improved with an `$in` query instead right?
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -43,6 +48,29 @@ const zAlert = z const zAlertInput = zAlert; +const getHistory = async (alert: IAlert, teamId: string) => { + const histories = await AlertHistory.find({ alert: alert._id, team: teamId }) + .sort({ createdAt: -1 }) + .limit(20); + return histories; +}; + +const getDashboard = async (alert: IAlert, teamId: string) => { + const dashboard = await Dashboard.findOne({ + _id: alert.dashboardId, + team: teamId, + }); + return dashboard; +}; + +const getLogView = async (alert: IAlert, teamId: string) => { + const logView = await LogView.findOne({ + _id: alert.logView, + team: teamId, + }); + return logView; +};
No need to add these methods. You can use `populate` method. check out the example https://github.com/hyperdxio/hyperdx/blob/ce70319186eee0f8981c49e0fe03f40e3c96780e/packages/api/src/tasks/checkAlerts.ts#L29-L37
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -497,7 +497,11 @@ export const processAlert = async (now: Date, alert: AlertDocument) => { windowSizeInMins, }); history.counts += 1; + // will overwrite if multiple alerts fire + history.lastValue = totalCount; } + // only write one lastValue if there are no alerts at all? + history.lastValue ??= totalCount;
This code is equal to one line `history.lastValue = totalCount;` outside of if condition. Now when I think about it, I think we probably want to put `{ bucketStart, totalCount }` in an array since history only represent the most recent checking point instead of bucket by bucket if that makes sense
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -0,0 +1,286 @@ +import Head from 'next/head'; +import Link from 'next/link'; +import { formatRelative } from 'date-fns'; + +import api from './api'; +import AppNav from './AppNav'; + +// stolen directly from the api alert model for now +export type AlertType = 'presence' | 'absence'; + +export enum AlertState { + ALERT = 'ALERT', + DISABLED = 'DISABLED', + INSUFFICIENT_DATA = 'INSUFFICIENT_DATA', + OK = 'OK', +} + +// follow 'ms' pkg formats +export type AlertInterval = + | '1m' + | '5m' + | '15m' + | '30m' + | '1h' + | '6h' + | '12h' + | '1d'; + +export type AlertChannel = { + type: 'webhook'; + webhookId: string; +}; + +export type AlertSource = 'LOG' | 'CHART';
Please check out `types.ts` file https://github.com/hyperdxio/hyperdx/blob/ce70319186eee0f8981c49e0fe03f40e3c96780e/packages/app/src/types.ts#L46
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +99,45 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + const alertsWithHistory = await Promise.all( + alerts.map(async alert => { + const history = (await getHistory(alert, teamId.toString())) ?? []; + if (!alert.source) throw new Error('Alert source is undefined'); + if (alert.source === 'LOG') { + const logView = await getLogView(alert, teamId.toString()); + // had to rename because logView is an ObjectID + return { + logViewObj: logView, + history, + ...alert.toObject(), + }; + } else {
I think its better to check the source here as well and throw at the end if necessary
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -71,6 +99,45 @@ const validateGroupBy = async (req, res, next) => { }; // Routes +router.get('/', async (req, res, next) => { + try { + const teamId = req.user?.team; + if (teamId == null) { + return res.sendStatus(403); + } + const alerts = await Alert.find({ team: teamId }); + const alertsWithHistory = await Promise.all( + alerts.map(async alert => { + const history = (await getHistory(alert, teamId.toString())) ?? []; + if (!alert.source) throw new Error('Alert source is undefined'); + if (alert.source === 'LOG') { + const logView = await getLogView(alert, teamId.toString()); + // had to rename because logView is an ObjectID + return { + logViewObj: logView,
I assume `logView` is the redundant info. why don't we overwrite `logView` with object here ?
hyperdx
github_2023
typescript
154
hyperdxio
wrn14897
@@ -89,7 +89,7 @@ const AlertSchema = new Schema<IAlert>( // Log alerts logView: { type: mongoose.Schema.Types.ObjectId, - ref: 'Alert', + ref: 'LogView',
Found this bug...that explains why logView couldn't be populated
hyperdx
github_2023
typescript
163
hyperdxio
wrn14897
@@ -1978,6 +1978,27 @@ function SidePanelHeader({ parsedProperties?.['process.tag.rum.sessionId'] ?? sessionId; + const headerEventTags = useMemo(() => { + return [ + ['service', logData._service], + ['host', logData._host], + ['k8s.node', parsedProperties['k8s.node.name']], + ['k8s.pod', parsedProperties['k8s.pod.name']], + ['k8s.statefulset', parsedProperties['k8s.statefulset.name']], + ['k8s.container', parsedProperties['k8s.container.name']],
maybe we want to keep '.name' so its consistent with other tags ?
hyperdx
github_2023
typescript
147
hyperdxio
wrn14897
@@ -421,7 +421,9 @@ export const processAlert = async (now: Date, alert: AlertDocument) => { series.field ) { targetDashboard = dashboard; - const startTimeMs = fns.getTime(checkStartTime); + const startTimeMs = fns.getTime( + fns.subMinutes(checkStartTime, windowSizeInMins), + );
I think we want this fix to be applied only to Sum type (rate) metrics
hyperdx
github_2023
others
147
hyperdxio
wrn14897
@@ -48,6 +48,18 @@ dev-unit: ci-unit: npx nx run-many -t ci:unit +.PHONY: dev-alerts +dev-alerts: + docker compose exec api yarn dev:task check-alerts + +.PHONY: dev-clickhouse +dev-clickhouse: + docker compose exec ch-server clickhouse-client + +.PHONY: dev-clear-alert-history +dev-clear-alert-history: + docker compose exec db mongo --eval "db.getSiblingDB('hyperdx').alerthistories.deleteMany({})" +
Can we rollback these for now ? I'd suggest to add these as alias on local. We need to specify compose file path or its gonna read the `docker-compose.yml` one which is not used for dev
hyperdx
github_2023
typescript
150
hyperdxio
MikeShi42
@@ -1342,10 +1336,149 @@ function PropertySubpanel({ }, {} as any); }, [displayedParsedProperties, propertySearchValue, search]); - const events: any[] | undefined = parsedProperties?.__events; + let events: any[] | undefined; + if (parsedProperties?.__events) { + try { + events = JSON.parse(parsedProperties?.__events); + } catch (e) { + console.warn(e); + } + } const searchInputRef = useRef<HTMLInputElement>(null); + const [jsonOptions, setJsonOptions] = useLocalStorage( + 'logviewer.jsonviewer.options', + { + normallyExpanded: true, + tabulate: true, + lineWrap: true, + useLegacyViewer: false, + }, + ); + + const getLineActions = useCallback<GetLineActions>( + ({ keyPath, value }) => { + const actions: LineAction[] = []; + + if (onPropertyAddClick != null && typeof value !== 'object') { + actions.push({ + key: 'add-to-search', + label: <i className="bi bi-plus-circle" />, + title: 'Add to Search', + onClick: () => { + onPropertyAddClick(`${keyPath.join('.')}`, value); + }, + }); + } + + if (typeof value !== 'object') { + actions.push({ + key: 'search', + label: <i className="bi bi-search" />, + title: 'Search for this value only', + onClick: () => { + router.push( + generateSearchUrl( + `${keyPath.join('.')}:${ + typeof value === 'string' ? `"${value}"` : value + }`, + ), + ); + }, + }); + } + + /* TODO: Handle bools properly (they show up as number...) */ + if (typeof value === 'number') { + actions.push({ + key: 'chart', + label: <i className="bi bi-graph-up" />, + title: 'Chart', + onClick: () => { + router.push( + generateChartUrl({ + aggFn: 'avg', + field: `${keyPath.join('.')}`, + groupBy: [], + table: 'logs', + }), + ); + }, + }); + } + + if (toggleColumn && typeof value !== 'object') { + const keyPathString = keyPath.join('.'); + actions.push({ + key: 'toggle-column', + label: <i className="bi bi-table" />, + title: displayedColumns?.includes(keyPathString) + ? `Remove ${keyPathString} column from results table` + : `Add ${keyPathString} column to results table`, + onClick: () => toggleColumn(keyPathString), + }); + } + + const handleCopyObject = () => { + const shouldCopyParent = !isNestedView; + const parentKeyPath = keyPath.slice(0, -1); + const copiedObj = shouldCopyParent + ? parentKeyPath.length === 0 + ? nestedProperties + : get(nestedProperties, parentKeyPath) + : keyPath.length === 0 + ? nestedProperties + : get(nestedProperties, keyPath); + window.navigator.clipboard.writeText( + JSON.stringify(copiedObj, null, 2), + ); + toast.success( + `Copied ${shouldCopyParent ? 'parent' : 'object'} to clipboard`, + ); + }; + + if (typeof value === 'object') { + actions.push( + isNestedView + ? { + key: 'copy-object', + label: 'Copy Object', + onClick: handleCopyObject, + } + : { + key: 'copy-parent',
It seems like there's no way to actually ask to copy a parent any more in the flat view. On the old viewer this was accomplishable by clicking the key of the object, in the flat view. Iiirc the use case was that users woud like to find a specific key/value via searching the flat view, and then copy the parent object for further use. I think we can alternatively allow users to search the nested view and copy objects there which might be cleaner.
hyperdx
github_2023
typescript
151
hyperdxio
MikeShi42
@@ -0,0 +1,39 @@ +import express from 'express'; + +import { Api404Error } from '@/utils/errors'; +import { getTeam } from '@/controllers/team'; +import { isUserAuthenticated } from '@/middleware/auth'; + +const router = express.Router(); + +router.get('/', isUserAuthenticated, async (req, res, next) => {
we can remove the auth middleware right?
hyperdx
github_2023
typescript
145
hyperdxio
jaggederest
@@ -439,7 +464,9 @@ export const processAlert = async (now: Date, alert: AlertDocument) => { let alertState = AlertState.OK; if (checksData?.rows && checksData?.rows > 0) { for (const checkData of checksData.data) { - const totalCount = parseInt(checkData.data); + const totalCount = isString(checkData.data) + ? parseInt(checkData.data)
Nit: Should always provide a radix argument for parseInt
hyperdx
github_2023
typescript
121
hyperdxio
wrn14897
@@ -1,4 +1,5 @@ import * as clickhouse from '..'; +import { describe, beforeEach, jest, it, expect } from '@jest/globals';
I don't think we need to import these from globals. did you hit any issues on local ?
hyperdx
github_2023
typescript
121
hyperdxio
wrn14897
@@ -704,6 +704,7 @@ export const getMetricsChart = async ({ startTime: number; // unix in ms teamId: string; }) => { + await redisClient.connect();
no need for this. redis client should be connected when server starts
hyperdx
github_2023
typescript
121
hyperdxio
MikeShi42
@@ -42,4 +43,29 @@ describe('clickhouse', () => { expect(clickhouse.client.insert).toHaveBeenCalledTimes(2); expect.assertions(2); }); + + it('getMetricsChart', async () => {
would be awesome to have a bit more descriptive of a test case name
hyperdx
github_2023
typescript
121
hyperdxio
MikeShi42
@@ -42,4 +43,29 @@ describe('clickhouse', () => { expect(clickhouse.client.insert).toHaveBeenCalledTimes(2); expect.assertions(2); }); + + it('getMetricsChart', async () => { + jest + .spyOn(clickhouse.client, 'query') + .mockResolvedValueOnce({ json: () => Promise.resolve({}) } as any); + + await clickhouse.getMetricsChart({ + aggFn: clickhouse.AggFn.AvgRate, + dataType: 'Sum', + endTime: Date.now(), + granularity: clickhouse.Granularity.OneHour, + name: 'test', + q: '', + startTime: Date.now() - 1000 * 60 * 60 * 24, + teamId: 'test', + }); + + expect(clickhouse.client.query).toHaveBeenCalledTimes(1); + expect(clickhouse.client.query).toHaveBeenCalledWith( + expect.objectContaining({
we can also use an snapshot/inline snapshot if that's better. Though depending on the intent I imagine this is good enough
hyperdx
github_2023
typescript
136
hyperdxio
MikeShi42
@@ -633,6 +633,8 @@ const getMetricsTagsUncached = async (teamId: string) => { SELECT format('{} - {}', name, data_type) as name, data_type, + MAX(flags) as flags,
Mega Nit: I'm wondering if [`any`](https://clickhouse.com/docs/en/sql-reference/aggregate-functions/reference/any) has a real perf improvement over `max`, in theory it should allow clickhouse to just scan one granule
hyperdx
github_2023
typescript
63
hyperdxio
MikeShi42
@@ -16,10 +16,13 @@ import pick from 'lodash/pick'; import api from './api'; import { AggFn, Granularity, convertGranularityToSeconds } from './ChartUtils'; +import useUserPreferences, { TimeFormat } from './useUserPreferences'; +import { TIME_TOKENS } from './utils'; import { semanticKeyedColor, truncateMiddle } from './utils'; import Link from 'next/link'; +
nit: extra new line
hyperdx
github_2023
typescript
63
hyperdxio
MikeShi42
@@ -37,7 +37,7 @@ export default function SearchTimeRangePicker({ setInputValue, onSearch, showLive = false, - timeFormat = '24h', + timeFormat = '12h',
minor thing - it looks like we end up overwriting this in the UI via https://github.com/hyperdxio/hyperdx/blob/f231d1f65f3e431cc72f2d6c152109c350d19c59/packages/app/src/timeQuery.ts#L18 (so after you hit search for the time range, we end up showing 24h time again instead of 12h) That being said, I don't think it's a huge deal, and something we can probably clean up once https://github.com/hyperdxio/hyperdx/pull/75 lands since it might conflict anyways.
hyperdx
github_2023
typescript
63
hyperdxio
MikeShi42
@@ -421,6 +428,17 @@ export default function TeamPage() { </Modal.Body> </Modal> </div> + <div> + <h2 className="mt-5">Time Format</h2>
I know we don't have a global personal setting place, could we maybe for now just put some subtext describing that this setting is a local setting and won't propagate across the entire team? Since this settings page is technically supposed to be the team page. Something like: ``` <div className="text-muted my-2"> Note: Only affects your own view and does not propagate to other team members. </div> ``` Don't want to block this PR on how we should be treating personal vs team settings :)
hyperdx
github_2023
others
131
hyperdxio
MikeShi42
@@ -0,0 +1,6 @@ +--- +'@hyperdx/api': minor +'@hyperdx/app': minor +--- + +refactor + feat: split metrics chart endpoint name query param + add validator
Might be worth being more explicit that we're changing an (internal) API shape
hyperdx
github_2023
typescript
126
hyperdxio
wrn14897
@@ -28,7 +28,12 @@ function formatTs({ const seconds = Math.floor((value / 1000) % 60); return `${minutes}:${seconds < 10 ? '0' : ''}${seconds}`; } else { - return format(new Date(ts), 'hh:mm:ss a'); + try { + return format(new Date(ts), 'hh:mm:ss a');
nit: we can also use `isValid` (https://date-fns.org/v2.16.1/docs/isValid) to verify date object
hyperdx
github_2023
others
119
hyperdxio
MikeShi42
@@ -1,5 +1,7 @@ @import './variables'; @import '~bootstrap/scss/bootstrap'; +@import url('https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.0/font/bootstrap-icons.css');
curious why the switch?
hyperdx
github_2023
javascript
119
hyperdxio
MikeShi42
@@ -20,5 +20,6 @@ module.exports = { 'react/display-name': 'off', '@typescript-eslint/no-explicit-any': 'off', '@typescript-eslint/no-empty-function': 'off', + 'import/no-anonymous-default-export': 'off',
☹️
hyperdx
github_2023
typescript
122
hyperdxio
MikeShi42
@@ -2135,7 +2209,10 @@ export default function LogSidePanel({ : []), ]} activeItem={displayedTab} - onClick={(v: any) => setTab(v)} + onClick={(v: any) => { + setTab(v); + throw new Error('For Kolechia');
might want to clean this one up 😄
hyperdx
github_2023
others
122
hyperdxio
MikeShi42
@@ -756,3 +756,7 @@ div.react-datepicker { opacity: 0.7; } } + +.no-underline {
You can also use `text-decoration-none` from bs
hyperdx
github_2023
others
112
hyperdxio
MikeShi42
@@ -0,0 +1,69 @@ +@import '../../styles/variables'; + +.tableWrapper { + table { + width: 100%; + + th, + td { + &:first-child { + padding-left: 24px; + } + &:last-child { + padding-right: 24px; + } + } + + th { + color: $slate-300; + font-family: Inter; + text-transform: uppercase; + font-size: 9px; + font-weight: 500; + letter-spacing: 1px; + border-bottom: 1px solid $slate-950; + padding: 6px 12px; + } + + tr { + td { + padding: 6px 12px; + border-bottom: 1px solid $slate-950;
thoughts on leaving out the border for some of the network panel stuff? it feels a bit cleaner/less busy <img width="532" alt="image" src="https://github.com/hyperdxio/hyperdx/assets/2781687/0d40177a-1fec-45e0-b9fc-d7089802f7f7">
hyperdx
github_2023
typescript
112
hyperdxio
MikeShi42
@@ -0,0 +1,333 @@ +import * as React from 'react'; +import { format } from 'date-fns'; +import { JSONTree } from 'react-json-tree'; +import type { StacktraceFrame, StacktraceBreadcrumb } from './types'; +import styles from '../styles/LogSidePanel.module.scss'; +import { CloseButton } from 'react-bootstrap'; +import { useLocalStorage } from './utils'; +import { ColumnDef, Row } from '@tanstack/react-table'; +import { TableCellButton } from './components/Table'; + +import { UNDEFINED_WIDTH } from './tableUtils'; + +export const CollapsibleSection = ({ + title, + children, + initiallyCollapsed, +}: { + title: string; + children: React.ReactNode; + initiallyCollapsed?: boolean; +}) => { + const [collapsed, setCollapsed] = React.useState(initiallyCollapsed ?? false); + + return ( + <div className="my-3"> + <div + className={`d-flex align-items-center mb-1 text-white-hover`} + role="button" + onClick={() => setCollapsed(!collapsed)} + > + <i className={`bi bi-chevron-${collapsed ? 'right' : 'down'} me-2`}></i> + <div className="fs-7 text-slate-200">{title}</div> + </div> + {collapsed ? null : <div className="mb-4">{children}</div>} + </div> + ); +}; + +export const SectionWrapper: React.FC<{ title?: React.ReactNode }> = ({ + children, + title, +}) => ( + <div className={styles.panelSectionWrapper}> + {title && <div className={styles.panelSectionWrapperTitle}>{title}</div>} + {children} + </div> +); + +/** + * Stacktrace elements + */ +export const StacktraceValue = ({ + label, + value, +}: { + label: React.ReactNode; + value: React.ReactNode; +}) => { + return ( + <div + style={{ + paddingRight: 20, + marginRight: 12, + borderRight: '1px solid #ffffff20', + }} + > + <div className="text-slate-400">{label}</div> + <div className="fs-7">{value}</div> + </div> + ); +}; + +const StacktraceRowExpandButton = ({ + onClick, + isOpen, +}: { + onClick: VoidFunction; + isOpen: boolean; +}) => { + return ( + <TableCellButton + label={isOpen ? 'Hide context' : 'Show context'} + biIcon={isOpen ? 'chevron-up' : 'chevron-down'} + onClick={onClick} + /> + ); +}; + +export const StacktraceRow = ({ row }: { row: Row<StacktraceFrame> }) => { + const [lineContextOpen, setLineContextOpen] = React.useState(false);
I think it's a bit more intuitive to have the line context open (as it feels like the most helpful part!)
hyperdx
github_2023
typescript
112
hyperdxio
MikeShi42
@@ -1975,95 +1854,68 @@ const ExceptionSubpanel = ({ // TODO: show all frames (stackable) return ( <div> - <CollapsibleSection title="Stack Trace" initiallyCollapsed={false}> - {firstException ? ( - <div> - <div className="fw-bold fs-8">{firstException.type}</div> - <div className="text-muted">{firstException.value}</div> - <div className="text-muted"> - <span>mechanism: {firstException.mechanism?.type}</span> - <span className="ms-2"> - handled:{' '} - {firstException.mechanism?.handled ? ( - <span className="text-success">true</span> - ) : ( - <span className="text-danger">false</span> - )} - </span> - </div> - {firstException.stacktrace?.frames?.reverse().map((frame, i) => ( - <div key={frame.filename + frame.lineno} className="mt-3"> - <div className="fw-bold fs-8"> - {frame.filename} in {frame.function} at line {frame.lineno}: - {frame.colno} - </div> - <pre className="mt-3"> - {frame.pre_context?.map((line, i) => ( - <div key={line} className="text-muted"> - {(frame.lineno ?? 0) - - (frame.pre_context?.length ?? 0) + - i}{' '} - {line} - </div> - ))} - {frame.context_line && ( - <div - className="fw-bold" - style={{ backgroundColor: '#1f2429' }} - > - {frame.lineno} {frame.context_line} - </div> - )} - {frame.post_context?.map((line, i) => ( - <div key={line} className="text-muted"> - {frame.lineno + i + 1} {line} - </div> - ))} - </pre> + <CollapsibleSection title="Stack Trace"> + <SectionWrapper + title={ + <> + <div className="pb-3"> + <div className="fw-bold fs-8">{firstException.type}</div> + <div className="text-muted">{firstException.value}</div> </div> - ))} - </div> - ) : ( - <div className="text-muted">No Stack Trace Found</div> - )} + <div className="d-flex gap-2 flex-wrap"> + <StacktraceValue + label="mechanism" + value={firstException.mechanism?.type} + /> + <StacktraceValue + label="handled" + value={ + firstException.mechanism?.handled ? ( + <span className="text-success">true</span> + ) : ( + <span className="text-danger">false</span> + ) + } + /> + {firstException.mechanism?.data?.function ? ( + <StacktraceValue + label="function" + value={firstException.mechanism.data.function} + /> + ) : null} + {firstException.mechanism?.data?.handler ? ( + <StacktraceValue + label="handler" + value={firstException.mechanism.data.handler} + /> + ) : null} + {firstException.mechanism?.data?.target ? ( + <StacktraceValue + label="target" + value={firstException.mechanism.data.target} + /> + ) : null} + </div> + </> + } + > + <Table + hideHeader + columns={stacktraceColumns} + data={firstException.stacktrace?.frames ?? []}
I wonder if we should reverse the frames here so that the more specific and likely user-land code is on top? <img width="1063" alt="image" src="https://github.com/hyperdxio/hyperdx/assets/2781687/3d349c7d-8966-4b09-822c-c3145e018481">
hyperdx
github_2023
typescript
112
hyperdxio
MikeShi42
@@ -1975,95 +1854,68 @@ const ExceptionSubpanel = ({ // TODO: show all frames (stackable) return ( <div> - <CollapsibleSection title="Stack Trace" initiallyCollapsed={false}> - {firstException ? ( - <div> - <div className="fw-bold fs-8">{firstException.type}</div> - <div className="text-muted">{firstException.value}</div> - <div className="text-muted"> - <span>mechanism: {firstException.mechanism?.type}</span> - <span className="ms-2"> - handled:{' '} - {firstException.mechanism?.handled ? ( - <span className="text-success">true</span> - ) : ( - <span className="text-danger">false</span> - )} - </span> - </div> - {firstException.stacktrace?.frames?.reverse().map((frame, i) => ( - <div key={frame.filename + frame.lineno} className="mt-3"> - <div className="fw-bold fs-8"> - {frame.filename} in {frame.function} at line {frame.lineno}: - {frame.colno} - </div> - <pre className="mt-3"> - {frame.pre_context?.map((line, i) => ( - <div key={line} className="text-muted"> - {(frame.lineno ?? 0) - - (frame.pre_context?.length ?? 0) + - i}{' '} - {line} - </div> - ))} - {frame.context_line && ( - <div - className="fw-bold" - style={{ backgroundColor: '#1f2429' }} - > - {frame.lineno} {frame.context_line} - </div> - )} - {frame.post_context?.map((line, i) => ( - <div key={line} className="text-muted"> - {frame.lineno + i + 1} {line} - </div> - ))} - </pre> + <CollapsibleSection title="Stack Trace"> + <SectionWrapper + title={ + <> + <div className="pb-3"> + <div className="fw-bold fs-8">{firstException.type}</div> + <div className="text-muted">{firstException.value}</div> </div> - ))} - </div> - ) : ( - <div className="text-muted">No Stack Trace Found</div> - )} + <div className="d-flex gap-2 flex-wrap"> + <StacktraceValue + label="mechanism" + value={firstException.mechanism?.type} + /> + <StacktraceValue + label="handled" + value={ + firstException.mechanism?.handled ? ( + <span className="text-success">true</span> + ) : ( + <span className="text-danger">false</span> + ) + } + /> + {firstException.mechanism?.data?.function ? ( + <StacktraceValue + label="function" + value={firstException.mechanism.data.function} + /> + ) : null} + {firstException.mechanism?.data?.handler ? ( + <StacktraceValue + label="handler" + value={firstException.mechanism.data.handler} + /> + ) : null} + {firstException.mechanism?.data?.target ? ( + <StacktraceValue + label="target" + value={firstException.mechanism.data.target} + /> + ) : null} + </div> + </> + } + > + <Table + hideHeader + columns={stacktraceColumns} + data={firstException.stacktrace?.frames ?? []} + emptyMessage="No stack trace found" + /> + </SectionWrapper> </CollapsibleSection> + <CollapsibleSection title="Breadcrumbs" initiallyCollapsed>
I also think it'd be nice if this was default open
hyperdx
github_2023
typescript
112
hyperdxio
MikeShi42
@@ -0,0 +1,333 @@ +import * as React from 'react'; +import { format } from 'date-fns'; +import { JSONTree } from 'react-json-tree'; +import type { StacktraceFrame, StacktraceBreadcrumb } from './types'; +import styles from '../styles/LogSidePanel.module.scss'; +import { CloseButton } from 'react-bootstrap'; +import { useLocalStorage } from './utils'; +import { ColumnDef, Row } from '@tanstack/react-table'; +import { TableCellButton } from './components/Table'; + +import { UNDEFINED_WIDTH } from './tableUtils'; + +export const CollapsibleSection = ({ + title, + children, + initiallyCollapsed, +}: { + title: string; + children: React.ReactNode; + initiallyCollapsed?: boolean; +}) => { + const [collapsed, setCollapsed] = React.useState(initiallyCollapsed ?? false); + + return ( + <div className="my-3"> + <div + className={`d-flex align-items-center mb-1 text-white-hover`} + role="button" + onClick={() => setCollapsed(!collapsed)} + > + <i className={`bi bi-chevron-${collapsed ? 'right' : 'down'} me-2`}></i> + <div className="fs-7 text-slate-200">{title}</div> + </div> + {collapsed ? null : <div className="mb-4">{children}</div>} + </div> + ); +}; + +export const SectionWrapper: React.FC<{ title?: React.ReactNode }> = ({ + children, + title, +}) => ( + <div className={styles.panelSectionWrapper}> + {title && <div className={styles.panelSectionWrapperTitle}>{title}</div>} + {children} + </div> +); + +/** + * Stacktrace elements + */ +export const StacktraceValue = ({ + label, + value, +}: { + label: React.ReactNode; + value: React.ReactNode; +}) => { + return ( + <div + style={{ + paddingRight: 20, + marginRight: 12, + borderRight: '1px solid #ffffff20', + }} + > + <div className="text-slate-400">{label}</div> + <div className="fs-7">{value}</div> + </div> + ); +}; + +const StacktraceRowExpandButton = ({ + onClick, + isOpen, +}: { + onClick: VoidFunction; + isOpen: boolean; +}) => { + return ( + <TableCellButton + label={isOpen ? 'Hide context' : 'Show context'} + biIcon={isOpen ? 'chevron-up' : 'chevron-down'} + onClick={onClick} + /> + ); +}; + +export const StacktraceRow = ({ row }: { row: Row<StacktraceFrame> }) => { + const [lineContextOpen, setLineContextOpen] = React.useState(false); + + const frame = row.original; + const hasContext = !!frame.context_line; + + const handleToggleContext = React.useCallback(() => { + setLineContextOpen(!lineContextOpen); + }, [lineContextOpen]); + + return ( + <> + <div className="w-100 d-flex justify-content-between align-items-center"> + <div> + {frame.filename} + <span className="text-slate-400">{' in '}</span> + {frame.function} + {frame.lineno || frame.colno ? ( + <> + <span className="text-slate-400">{' at line '}</span> + <span className="text-slate-300"> + {frame.lineno}:{frame.colno} + </span> + </> + ) : null} + </div> + {hasContext && ( + <StacktraceRowExpandButton + onClick={handleToggleContext} + isOpen={lineContextOpen} + /> + )} + </div> + + {lineContextOpen && ( + <pre className={styles.lineContext}> + {frame.pre_context?.map((line, i) => ( + <div key={line}> + <span className={styles.lineContextLineNo}> + {(frame.lineno ?? 0) - (frame.pre_context?.length ?? 0) + i} + </span> + {line} + </div> + ))} + {frame.context_line && ( + <div className={styles.lineContextCurrentLine}> + <span className={styles.lineContextLineNo}>{frame.lineno}</span> + {frame.context_line} + </div> + )} + {frame.post_context?.map((line, i) => ( + <div key={line}> + <span className={styles.lineContextLineNo}> + {frame.lineno + i + 1} + </span> + {line} + </div> + ))} + </pre> + )} + </> + ); +}; + +export const stacktraceColumns: ColumnDef<StacktraceFrame>[] = [ + { + accessorKey: 'filename', + cell: StacktraceRow, + }, +]; + +export const breadcrumbColumns: ColumnDef<StacktraceBreadcrumb>[] = [ + { + accessorKey: 'category', + header: 'Category', + cell: ({ row }) => ( + <span className="text-slate-300">{row.original.category}</span> + ), + }, + { + accessorKey: 'message', + header: 'Message', + size: UNDEFINED_WIDTH, + cell: ({ row }) => + row.original.message || <span className="text-slate-500">Empty</span>,
I think it'd be nice here if can also make use of the fetch/xhr categories, as they currently show up as empty (maybe a follow up PR?) Here's a few examples if helpful of the data structure I see: ``` { "category": "fetch", "data": { "method": "GET", "status_code": 200, "url": "http://localhost:8080/__nextjs_original-stack-frame" }, "timestamp": 1700379487.918, "type": "http" } ``` ``` { "category": "xhr", "data": { "method": "POST", "status_code": 200, "url": "http://localhost:4318/v1/traces" }, "timestamp": 1700379491.015, "type": "http" } ```
hyperdx
github_2023
typescript
112
hyperdxio
MikeShi42
@@ -0,0 +1,333 @@ +import * as React from 'react';
I'm curious how do we differentiate what should belong here vs LogSidePanel? Is this like a generic util elements for the LogSidePanel?
hyperdx
github_2023
typescript
112
hyperdxio
MikeShi42
@@ -655,18 +667,24 @@ function TraceSubpanel({ )} > <ExceptionSubpanel - breadcrumbs={JSON.parse( - selectedLogData?.['string.values']?.[ - selectedLogData?.['string.names']?.indexOf('breadcrumbs') - ], - )} - exceptionValues={JSON.parse( - selectedLogData?.['string.values']?.[ - selectedLogData?.['string.names']?.indexOf( - 'exception.values', - ) - ], - )} + breadcrumbs={ + JSON.parse( + selectedLogData?.['string.values']?.[ + selectedLogData?.['string.names']?.indexOf( + 'breadcrumbs', + ) + ], + ) ?? []
Actually I believe in this case we need to do ``` JSON.parse( selectedLogData?.['string.values']?.[ selectedLogData?.['string.names']?.indexOf( 'breadcrumbs', ) ] ?? '[]') ``` as the undefined value into `JSON.parse` will throw a parse error
hyperdx
github_2023
typescript
117
hyperdxio
wrn14897
@@ -200,14 +200,22 @@ const Tile = forwardRef( <div className="fs-7 text-muted">{chart.name}</div> <i className="bi bi-grip-horizontal text-muted" /> <div className="fs-7 text-muted d-flex gap-2 align-items-center"> - {hasAlert && ( - <div - className="rounded px-1 text-muted bg-grey opacity-90 cursor-default" - title="Has alert" - > - <span className="bi bi-bell" /> + {alert && ( + <div className="rounded px-1 text-muted bg-grey opacity-90 cursor-default"> + {alert.state === 'ALERT' ? ( + <i + className="bi bi-bell text-danger effect-pulse"
Super cool effect. It would be awesome to add this to sidebar bell as well (saved logs alert)
hyperdx
github_2023
others
113
hyperdxio
wrn14897
@@ -205,6 +205,7 @@ services: environment: NEXT_PUBLIC_API_SERVER_URL: 'http://localhost:8000' # need to be localhost (CORS) NEXT_PUBLIC_HDX_API_KEY: ${HYPERDX_API_KEY} + HYPERDX_API_KEY: ${HYPERDX_API_KEY}
Ah can we also remove `NEXT_PUBLIC_HDX_API_KEY` here since its not used ?
hyperdx
github_2023
typescript
103
hyperdxio
svc-shorpo
@@ -0,0 +1,101 @@ +import { useEffect, useState } from 'react'; + +const checkLength = (password: string) => password.length >= 12; +const checkOneUpper = (password: string) => /[A-Z]+/.test(password); +const checkOneLower = (password: string) => /[a-z]+/.test(password); +const checkOneNumber = (password: string) => /\d+/.test(password); +const checkOneSpecial = (password: string) => /\W+/.test(password); + +export const PasswordCheck = (password: string | null) => { + password = password ?? ''; + return ( + <ul> + <li> + <CheckOrX handler={checkLength} password={password}> + minimum 12 characters + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneUpper} password={password}> + at least 1 uppercase + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneLower} password={password}> + at least 1 lowercase + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneNumber} password={password}> + at least 1 number + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneSpecial} password={password}> + at least 1 special character + </CheckOrX> + </li> + </ul> + ); +}; + +export const CheckOrX = ({ + handler, + password, + children, +}: { + handler: (password: string) => boolean; + password: string; + children: React.ReactNode; +}) => { + const [isValid, setIsValid] = useState(false); + useEffect(() => { + const actualPass = (password['password'] as string) ?? password; + setIsValid(handler(actualPass)); + }, [handler, password]); + return ( + <span className={isValid ? 'text-success' : 'text-danger'}> + {isValid ? <Check /> : <XShape />} {children} + </span> + ); +}; + +const Check = () => (
alternatively you should be able to use bootstrap-icons here: https://icons.getbootstrap.com ``` <span class="bi bi-check" /> ```
hyperdx
github_2023
typescript
103
hyperdxio
svc-shorpo
@@ -0,0 +1,101 @@ +import { useEffect, useState } from 'react'; + +const checkLength = (password: string) => password.length >= 12; +const checkOneUpper = (password: string) => /[A-Z]+/.test(password); +const checkOneLower = (password: string) => /[a-z]+/.test(password); +const checkOneNumber = (password: string) => /\d+/.test(password); +const checkOneSpecial = (password: string) => /\W+/.test(password); + +export const PasswordCheck = (password: string | null) => { + password = password ?? ''; + return ( + <ul> + <li> + <CheckOrX handler={checkLength} password={password}> + minimum 12 characters + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneUpper} password={password}> + at least 1 uppercase + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneLower} password={password}> + at least 1 lowercase + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneNumber} password={password}> + at least 1 number + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneSpecial} password={password}> + at least 1 special character + </CheckOrX> + </li> + </ul> + ); +}; + +export const CheckOrX = ({ + handler, + password, + children, +}: { + handler: (password: string) => boolean; + password: string; + children: React.ReactNode; +}) => { + const [isValid, setIsValid] = useState(false); + useEffect(() => {
you can also consider having a computed var with `useMemo` instead of updating state with useEffect: ```ts const isValid = useMemo(() => handler(password), [handler, password]) ```
hyperdx
github_2023
typescript
103
hyperdxio
svc-shorpo
@@ -0,0 +1,101 @@ +import { useEffect, useState } from 'react'; + +const checkLength = (password: string) => password.length >= 12; +const checkOneUpper = (password: string) => /[A-Z]+/.test(password); +const checkOneLower = (password: string) => /[a-z]+/.test(password); +const checkOneNumber = (password: string) => /\d+/.test(password); +const checkOneSpecial = (password: string) => /\W+/.test(password); + +export const PasswordCheck = (password: string | null) => { + password = password ?? ''; + return ( + <ul> + <li> + <CheckOrX handler={checkLength} password={password}> + minimum 12 characters + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneUpper} password={password}> + at least 1 uppercase + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneLower} password={password}> + at least 1 lowercase + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneNumber} password={password}> + at least 1 number + </CheckOrX> + </li> + <li> + <CheckOrX handler={checkOneSpecial} password={password}> + at least 1 special character + </CheckOrX> + </li> + </ul> + ); +}; + +export const CheckOrX = ({ + handler, + password, + children, +}: { + handler: (password: string) => boolean; + password: string; + children: React.ReactNode; +}) => { + const [isValid, setIsValid] = useState(false); + useEffect(() => { + const actualPass = (password['password'] as string) ?? password;
interesting, maybe type checking isn't working correctly upstream 🤔
hyperdx
github_2023
typescript
103
hyperdxio
wrn14897
@@ -45,6 +46,26 @@ export default function AuthPage({ action }: { action: 'register' | 'login' }) { } }, [installation, isRegister, router]); + const [currentPassword, setCurrentPassword] = useState<string>(''); + const [confirmPassword, setConfirmPassword] = useState<string>(''); + + const updateCurrentPassword = () => { + const val = (document.getElementById('password') as HTMLInputElement).value; + console.log(val); + setCurrentPassword(val); + }; + + const updateConfirmPassword = () => { + const val = (document.getElementById('confirmPassword') as HTMLInputElement) + .value; + console.log(val);
can remove this console log ?
hyperdx
github_2023
typescript
103
hyperdxio
wrn14897
@@ -45,6 +46,26 @@ export default function AuthPage({ action }: { action: 'register' | 'login' }) { } }, [installation, isRegister, router]); + const [currentPassword, setCurrentPassword] = useState<string>(''); + const [confirmPassword, setConfirmPassword] = useState<string>(''); + + const updateCurrentPassword = () => { + const val = (document.getElementById('password') as HTMLInputElement).value; + console.log(val); + setCurrentPassword(val); + }; + + const updateConfirmPassword = () => {
can we get target object from the arg ? (so no need to call document.getElementById)
hyperdx
github_2023
typescript
103
hyperdxio
wrn14897
@@ -63,39 +68,25 @@ export const CheckOrX = ({ const Check = () => ( <svg xmlns="http://www.w3.org/2000/svg" - width="1em" - height="1em" + width="16"
I wonder if we can use bootstrap icon directly without using this svg component (for example: https://github.com/hyperdxio/hyperdx/blob/2fcd167540f596f800b042c9403dfbcc3072fffe/packages/app/src/InstallInstructionsModal.tsx#L33-L38)
hyperdx
github_2023
typescript
103
hyperdxio
MikeShi42
@@ -45,6 +48,23 @@ export default function AuthPage({ action }: { action: 'register' | 'login' }) { } }, [installation, isRegister, router]); + const [currentPassword, setCurrentPassword] = useState<string>('');
sorry to pile on late on this PR... we're a bit inconsistent in the project but we're trying to move frontend forms to be more based off of `react-hook-form` when possible. In this case we're already using the hook on line 30 above here, and the hook allows for listening to values in the form via [`watch`](https://react-hook-form.com/docs/useform/watch) so you don't have to manage the state/updating yourself here. Instead you should be able to do something like: `const currentPassword = watch('password');`
hyperdx
github_2023
typescript
103
hyperdxio
MikeShi42
@@ -0,0 +1,70 @@ +import { useMemo } from 'react'; + +const checkLength = (password: string) => password.length >= 12; +const checkOneUpper = (password: string) => /[A-Z]+/.test(password); +const checkOneLower = (password: string) => /[a-z]+/.test(password); +const checkOneNumber = (password: string) => /\d+/.test(password); +const checkOneSpecial = (password: string) => /\W+/.test(password); + +export const PasswordCheck = (password: string | null) => { + password = password ?? ''; + return ( + <ul> + <li>
personally not a fan of the li's here, can probably do without the bullet points and just have them as just divs to make it nice and clean <img width="497" alt="image" src="https://github.com/hyperdxio/hyperdx/assets/2781687/5b4e4c05-556b-49c5-bc67-9110ab0ba5fc">
hyperdx
github_2023
typescript
103
hyperdxio
MikeShi42
@@ -153,15 +174,22 @@ export default function AuthPage({ action }: { action: 'register' | 'login' }) { htmlFor="confirmPassword" className="text-start text-muted fs-7.5 mb-1" > - Confirm Password + <CheckOrX + handler={confirmPass} + password={currentPassword} + > + Confirm Password + </CheckOrX> </Form.Label> <Form.Control data-test-id="form-confirm-password" id="confirmPassword" type="password" className="border-0"
```suggestion className="border-0 mb-2" ``` just adds a bit of breathing room between the input <img width="510" alt="image" src="https://github.com/hyperdxio/hyperdx/assets/2781687/34288d16-be85-4222-8c21-b7fa54761dfc"> vs <img width="454" alt="image" src="https://github.com/hyperdxio/hyperdx/assets/2781687/554bb8ea-1327-4527-b6cb-fcecb1982fcf">
hyperdx
github_2023
typescript
92
hyperdxio
wrn14897
@@ -25,3 +25,8 @@ export const PORT = Number.parseInt(env.PORT as string); export const REDIS_URL = env.REDIS_URL as string; export const SERVER_URL = env.SERVER_URL as string; export const USAGE_STATS_ENABLED = env.USAGE_STATS_ENABLED !== 'false'; +export const CACHE_METRICS_TAGS = env.CACHE_METRICS_TAGS !== 'false'; +// deliberately using '||' instead of '??' to avoid empty/falsey values +// returning as a string since all env values are stringish (to be parsed using ms()) +export const CACHE_METRICS_EXPIRATION = + (env.CACHE_METRICS_EXPIRATION as string) || '600s';
we want this defaults to '600' instead of '600s', right ?
hyperdx
github_2023
typescript
92
hyperdxio
wrn14897
@@ -13,3 +13,5 @@ client.on('error', (err: any) => { }); export default client; + +export { client as redisClient };
I think this export is redundant since we already export client as default. So importing code is like ``` import redisClient from './utils/redis'; ```
hyperdx
github_2023
typescript
92
hyperdxio
wrn14897
@@ -25,3 +25,8 @@ export const PORT = Number.parseInt(env.PORT as string); export const REDIS_URL = env.REDIS_URL as string; export const SERVER_URL = env.SERVER_URL as string; export const USAGE_STATS_ENABLED = env.USAGE_STATS_ENABLED !== 'false'; +export const CACHE_METRICS_TAGS = env.CACHE_METRICS_TAGS !== 'false'; +// deliberately using '||' instead of '??' to avoid empty/falsey values +// returning as a string since all env values are stringish (to be parsed using ms()) +export const CACHE_METRICS_EXPIRATION =
nit: maybe be more explicit in naming like `CACHE_METRICS_EXPIRATION_IN_SEC`
hyperdx
github_2023
typescript
108
hyperdxio
MikeShi42
@@ -12,10 +14,10 @@ type Chart = { series: { table: string; type: 'time' | 'histogram' | 'search' | 'number' | 'table' | 'markdown'; - aggFn: string; - field?: string; - where?: string; - groupBy?: string[]; + aggFn: AggFn; + field: string; + where: string; + groupBy: string[];
this typing seems incorrect, the frontend has a more accurate version of types but we can't assume these fields are non-null for all chart types. I'm assuming we went with a less specific type here for a reason though as opposed to the specific union. https://github.com/hyperdxio/hyperdx/blob/fe41b150de8065634bf3e5f209e751441caa6513/packages/app/src/EditChartForm.tsx#L32
hyperdx
github_2023
typescript
108
hyperdxio
MikeShi42
@@ -58,25 +56,110 @@ export const buildLogSearchLink = ({ return url.toString(); }; -const buildEventSlackMessage = ({ +// TODO: should link to the chart instead +export const buildChartLink = ({ + dashboardId, + endTime, + granularity, + startTime, +}: { + dashboardId: string; + endTime: Date; + granularity: string; + startTime: Date; +}) => { + const url = new URL(`${config.FRONTEND_URL}/dashboards/${dashboardId}`); + const queryParams = new URLSearchParams({ + from: startTime.getTime().toString(), + granularity, + to: endTime.getTime().toString(), + }); + url.search = queryParams.toString(); + return url.toString(); +}; + +const buildChartEventSlackMessage = ({ + alert, + dashboard, + endTime, + granularity, + group, + startTime, + totalCount, +}: { + alert: AlertDocument; + endTime: Date; + dashboard: { + id: string; + name: string; + chart: { + id: string; + name: string; + series: IDashboard['charts'][0]['series'][0]; + }; + }; + granularity: string; + group?: string; + startTime: Date; + totalCount: number; +}) => { + const mrkdwn = [ + `*<${buildChartLink({ + dashboardId: dashboard.id, + endTime, + granularity, + startTime, + })} | Alert for "${dashboard.chart.name}" in "${dashboard.name}">*`, + ...(group != null ? [`Group: "${group}"`] : []), + `${totalCount} lines found, expected ${
```suggestion `${totalCount} ${ ``` lines doesn't make sense in this context
hyperdx
github_2023
typescript
108
hyperdxio
MikeShi42
@@ -58,25 +56,110 @@ export const buildLogSearchLink = ({ return url.toString(); }; -const buildEventSlackMessage = ({ +// TODO: should link to the chart instead +export const buildChartLink = ({ + dashboardId, + endTime, + granularity, + startTime, +}: { + dashboardId: string; + endTime: Date; + granularity: string; + startTime: Date; +}) => { + const url = new URL(`${config.FRONTEND_URL}/dashboards/${dashboardId}`); + const queryParams = new URLSearchParams({ + from: startTime.getTime().toString(), + granularity, + to: endTime.getTime().toString(), + }); + url.search = queryParams.toString(); + return url.toString(); +}; + +const buildChartEventSlackMessage = ({ + alert, + dashboard, + endTime, + granularity, + group, + startTime, + totalCount, +}: { + alert: AlertDocument; + endTime: Date; + dashboard: { + id: string; + name: string; + chart: { + id: string; + name: string; + series: IDashboard['charts'][0]['series'][0]; + }; + }; + granularity: string; + group?: string; + startTime: Date; + totalCount: number; +}) => { + const mrkdwn = [ + `*<${buildChartLink({ + dashboardId: dashboard.id, + endTime, + granularity, + startTime, + })} | Alert for "${dashboard.chart.name}" in "${dashboard.name}">*`, + ...(group != null ? [`Group: "${group}"`] : []), + `${totalCount} lines found, expected ${ + alert.type === 'presence' ? 'less than' : 'greater than' + } ${alert.threshold} lines`,
```suggestion } ${alert.threshold}`, ``` ![image](https://github.com/hyperdxio/hyperdx/assets/2781687/0ae0f969-84bb-4669-a08f-0ae708ed0daf)
hyperdx
github_2023
typescript
108
hyperdxio
MikeShi42
@@ -58,25 +56,110 @@ export const buildLogSearchLink = ({ return url.toString(); }; -const buildEventSlackMessage = ({ +// TODO: should link to the chart instead +export const buildChartLink = ({ + dashboardId, + endTime, + granularity, + startTime, +}: { + dashboardId: string; + endTime: Date; + granularity: string; + startTime: Date; +}) => { + const url = new URL(`${config.FRONTEND_URL}/dashboards/${dashboardId}`); + const queryParams = new URLSearchParams({ + from: startTime.getTime().toString(), + granularity, + to: endTime.getTime().toString(), + }); + url.search = queryParams.toString(); + return url.toString(); +}; + +const buildChartEventSlackMessage = ({ + alert, + dashboard, + endTime, + granularity, + group, + startTime, + totalCount, +}: { + alert: AlertDocument; + endTime: Date; + dashboard: { + id: string; + name: string; + chart: { + id: string; + name: string; + series: IDashboard['charts'][0]['series'][0]; + }; + }; + granularity: string; + group?: string; + startTime: Date; + totalCount: number; +}) => { + const mrkdwn = [ + `*<${buildChartLink({ + dashboardId: dashboard.id, + endTime, + granularity, + startTime, + })} | Alert for "${dashboard.chart.name}" in "${dashboard.name}">*`, + ...(group != null ? [`Group: "${group}"`] : []), + `${totalCount} lines found, expected ${ + alert.type === 'presence' ? 'less than' : 'greater than' + } ${alert.threshold} lines`, + ].join('\n'); + + return { + text: `Alert for "${dashboard.chart.name}" in "${dashboard.name}" - ${totalCount} lines found`,
```suggestion text: `Alert for "${dashboard.chart.name}" in "${dashboard.name}" - ${totalCount} ${... exceeds/falls below ${threshold} ...}`, ``` We should include the exceeds/falls below threshold message here too