code
stringlengths 1
1.05M
| repo_name
stringlengths 6
83
| path
stringlengths 3
242
| language
stringclasses 222
values | license
stringclasses 20
values | size
int64 1
1.05M
|
|---|---|---|---|---|---|
import {
ArrowUpDown,
Combine,
Filter as FilterIcon,
GitBranch,
Indent,
Merge,
Pointer,
Sigma,
Table as TableIcon,
TextCursorInput,
XSquareIcon,
} from 'lucide-vue-next'
import { copy } from '../helpers'
import { FIELDTYPES } from '../helpers/constants'
import dayjs from '../helpers/dayjs'
import {
Cast,
CastArgs,
Column,
CustomOperation,
CustomOperationArgs,
Expression,
Filter,
FilterArgs,
FilterGroup,
FilterGroupArgs,
FilterOperator,
FilterValue,
GranularityType,
Join,
JoinArgs,
Limit,
Measure,
Mutate,
MutateArgs,
OrderBy,
OrderByArgs,
PivotWider,
PivotWiderArgs,
QueryTableArgs,
Remove,
RemoveArgs,
Rename,
RenameArgs,
Select,
SelectArgs,
Source,
SourceArgs,
Summarize,
SummarizeArgs,
Table,
TableArgs,
Union,
UnionArgs,
} from '../types/query.types'
import { Query } from './query'
export const table = (args: Partial<TableArgs>): Table => ({
type: 'table',
table_name: args.table_name || '',
data_source: args.data_source || '',
})
export const query_table = (args: Partial<QueryTableArgs>): Table => ({
type: 'query',
workbook: args.workbook || '',
query_name: args.query_name || '',
})
export const column = (column_name: string, options = {}): Column => ({
type: 'column',
column_name,
...options,
})
export const count = (): Measure => ({
column_name: 'count',
data_type: 'Integer',
aggregation: 'count',
measure_name: 'count(*)',
})
export const operator = (operator: FilterOperator): FilterOperator => operator
export const value = (value: FilterValue): FilterValue => value
export const expression = (expression: string): Expression => ({
type: 'expression',
expression,
})
// export const window_operation = (options: WindowOperationArgs): WindowOperation => ({
// type: 'window_operation',
// operation: options.operation,
// column: options.column,
// partition_by: options.partition_by,
// order_by: options.order_by,
// })
export function getFormattedRows(query: Query) {
const result = query.result
if (!result.rows?.length || !result.columns?.length) return []
const rows = copy(result.rows)
const columns = copy(result.columns)
const operations = copy(query.doc.operations)
const summarize_step = operations.reverse().find((op) => op.type === 'summarize')
const pivot_step = operations.reverse().find((op) => op.type === 'pivot_wider')
const getGranularity = (column_name: string) => {
const dim =
summarize_step?.dimensions.find((dim) => dim.column_name === column_name) ||
pivot_step?.rows.find((dim) => dim.column_name === column_name)
return dim ? dim.granularity : null
}
const formattedRows = rows.map((row) => {
const formattedRow = { ...row }
columns.forEach((column) => {
if (FIELDTYPES.DATE.includes(column.type) && getGranularity(column.name)) {
const granularity = getGranularity(column.name) as GranularityType
formattedRow[column.name] = getFormattedDate(row[column.name], granularity)
}
})
return formattedRow
})
return formattedRows
}
export function getFormattedDate(date: string, granularity: GranularityType) {
if (!date) return ''
const dayjsFormat = {
minute: 'MMMM D, YYYY h:mm A',
hour: 'MMMM D, YYYY h:00 A',
day: 'MMMM D, YYYY',
week: 'MMM Do, YYYY',
month: 'MMMM, YYYY',
year: 'YYYY',
quarter: '[Q]Q, YYYY',
}
if (!dayjsFormat[granularity]) return date
return dayjs(date).format(dayjsFormat[granularity])
}
export const query_operation_types = {
source: {
label: 'Source',
type: 'source',
icon: TableIcon,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: SourceArgs): Source => ({ type: 'source', ...args }),
getDescription: (op: Source) => {
return op.table.type == 'table' ? `${op.table.table_name}` : `${op.table.query_name}`
},
},
join: {
label: 'Merge',
type: 'join',
icon: Merge,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: JoinArgs): Join => ({ type: 'join', ...args }),
getDescription: (op: Join) => {
return op.table.type == 'table' ? `${op.table.table_name}` : `${op.table.query_name}`
},
},
union: {
label: 'Union',
type: 'union',
icon: Merge,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: UnionArgs): Union => ({ type: 'union', ...args }),
getDescription: (op: Union) => {
return op.table.type == 'table' ? `${op.table.table_name}` : `${op.table.query_name}`
},
},
select: {
label: 'Select',
type: 'select',
icon: Pointer,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: SelectArgs): Select => ({ type: 'select', ...args }),
getDescription: (op: Select) => {
return `${op.column_names.length} columns`
},
},
remove: {
label: 'Remove',
type: 'remove',
icon: XSquareIcon,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: RemoveArgs): Remove => ({ type: 'remove', ...args }),
getDescription: (op: Remove) => {
if (op.column_names.length < 3) {
return `${op.column_names.join(', ')}`
}
return `${op.column_names.length} columns`
},
},
rename: {
label: 'Rename',
type: 'rename',
icon: TextCursorInput,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: RenameArgs): Rename => ({ type: 'rename', ...args }),
getDescription: (op: Rename) => {
return `${op.column.column_name} -> ${op.new_name}`
},
},
cast: {
label: 'Cast',
type: 'cast',
icon: TextCursorInput,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: CastArgs): Cast => ({ type: 'cast', ...args }),
getDescription: (op: Cast) => {
return `${op.column.column_name} -> ${op.data_type}`
},
},
filter: {
label: 'Filter',
type: 'filter',
icon: FilterIcon,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: FilterArgs): Filter => ({ type: 'filter', ...args }),
getDescription: (op: Filter) => {
// @ts-ignore
if (op.expression) return `custom expression`
// @ts-ignore
return `${op.column.column_name}`
},
},
filter_group: {
label: 'Filter Group',
type: 'filter_group',
icon: FilterIcon,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: FilterGroupArgs): FilterGroup => ({ type: 'filter_group', ...args }),
getDescription: (op: FilterGroup) => {
if (!op.filters.length) return 'empty'
const columns = op.filters.map((f) => {
if ('expression' in f) return 'custom expression'
return f.column.column_name
})
const more = columns.length - 2
return `${columns.slice(0, 2).join(', ')}${more > 0 ? ` & ${more} more` : ''}`
},
},
mutate: {
label: 'Calculate',
type: 'mutate',
icon: Sigma,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: MutateArgs): Mutate => ({ type: 'mutate', ...args }),
getDescription: (op: Mutate) => {
return `${op.new_name}`
},
},
summarize: {
label: 'Summarize',
type: 'summarize',
icon: Combine,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: SummarizeArgs): Summarize => ({ type: 'summarize', ...args }),
getDescription: (op: Summarize) => {
const measures = op.measures.map((m) => m.measure_name).join(', ')
const dimensions = op.dimensions.map((g) => g.column_name).join(', ')
return `${measures} BY ${dimensions}`
},
},
pivot_wider: {
label: 'Pivot',
type: 'pivot_wider',
icon: GitBranch,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: PivotWiderArgs): PivotWider => ({ type: 'pivot_wider', ...args }),
getDescription: (op: PivotWider) => {
return 'Pivot Wider'
},
},
order_by: {
label: 'Sort',
type: 'order_by',
icon: ArrowUpDown,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: OrderByArgs): OrderBy => ({ type: 'order_by', ...args }),
getDescription: (op: OrderBy) => {
return `${op.column.column_name} ${op.direction}`
},
},
limit: {
label: 'Limit',
type: 'limit',
icon: Indent,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (limit: number): Limit => ({ type: 'limit', limit }),
getDescription: (op: Limit) => {
return `${op.limit}`
},
},
custom_operation: {
label: 'Custom Operation',
type: 'custom_operation',
icon: Sigma,
color: 'gray',
class: 'text-gray-600 bg-gray-100',
init: (args: CustomOperationArgs): CustomOperation => ({ type: 'custom_operation', ...args }),
getDescription: (op: CustomOperation) => {
return `${op.expression.expression}`
},
},
}
export const source = query_operation_types.source.init
export const join = query_operation_types.join.init
export const union = query_operation_types.union.init
export const select = query_operation_types.select.init
export const rename = query_operation_types.rename.init
export const remove = query_operation_types.remove.init
export const cast = query_operation_types.cast.init
export const filter = query_operation_types.filter.init
export const filter_group = query_operation_types.filter_group.init
export const mutate = query_operation_types.mutate.init
export const summarize = query_operation_types.summarize.init
export const pivot_wider = query_operation_types.pivot_wider.init
export const order_by = query_operation_types.order_by.init
export const limit = query_operation_types.limit.init
export const custom_operation = query_operation_types.custom_operation.init
|
2302_79757062/insights
|
frontend/src2/query/helpers.ts
|
TypeScript
|
agpl-3.0
| 9,298
|
import { useDebouncedRefHistory, UseRefHistoryReturn } from '@vueuse/core'
import { call } from 'frappe-ui'
import { computed, reactive } from 'vue'
import { copy, showErrorToast, wheneverChanges } from '../helpers'
import { confirmDialog } from '../helpers/confirm_dialog'
import { FIELDTYPES } from '../helpers/constants'
import { createToast } from '../helpers/toasts'
import {
ColumnDataType,
CustomOperationArgs,
Dimension,
DimensionDataType,
FilterGroupArgs,
JoinArgs,
Measure,
MeasureDataType,
MutateArgs,
Operation,
OrderByArgs,
PivotWiderArgs,
QueryResult,
Rename,
SelectArgs,
Source,
SourceArgs,
SummarizeArgs,
UnionArgs,
} from '../types/query.types'
import { WorkbookQuery } from '../types/workbook.types'
import {
cast,
column,
count,
custom_operation,
filter_group,
getFormattedRows,
join,
limit,
mutate,
order_by,
pivot_wider,
query_table,
remove,
rename,
select,
source,
summarize,
union,
} from './helpers'
const queries = new Map<string, Query>()
export function getCachedQuery(name: string): Query | undefined {
return queries.get(name)
}
export default function useQuery(workbookQuery: WorkbookQuery) {
const existingQuery = queries.get(workbookQuery.name)
if (existingQuery) return existingQuery
const query = makeQuery(workbookQuery)
queries.set(workbookQuery.name, query)
return query
}
export function makeQuery(workbookQuery: WorkbookQuery) {
const query = reactive({
doc: workbookQuery,
activeOperationIdx: -1,
activeEditIndex: -1,
source: computed(() => ({} as Source)),
currentOperations: computed(() => [] as Operation[]),
activeEditOperation: computed(() => ({} as Operation)),
autoExecute: true,
executing: false,
result: { ...EMPTY_RESULT },
getOperationsForExecution,
execute,
setOperations,
setActiveOperation,
setActiveEditIndex,
removeOperation,
setSource,
addSource,
addJoin,
addUnion,
addFilterGroup,
addMutate,
addSummarize,
addOrderBy,
removeOrderBy,
addLimit,
addPivotWider,
selectColumns,
renameColumn,
removeColumn,
changeColumnType,
addCustomOperation,
getDistinctColumnValues,
getColumnsForSelection,
downloadResults,
dimensions: computed(() => ({} as Dimension[])),
measures: computed(() => ({} as Measure[])),
getDimension,
getMeasure,
addMeasure,
updateMeasure,
removeMeasure,
reorderOperations,
reset,
history: {} as UseRefHistoryReturn<any, any>,
})
query.activeOperationIdx = query.doc.operations.length - 1
// @ts-ignore
query.dimensions = computed(() => {
if (!query.result.columns?.length) return []
return query.result.columns
.filter((column) => FIELDTYPES.DIMENSION.includes(column.type))
.map((column) => {
const isDate = FIELDTYPES.DATE.includes(column.type)
return {
column_name: column.name,
data_type: column.type as DimensionDataType,
granularity: isDate ? 'month' : undefined,
}
})
})
// @ts-ignore
query.measures = computed(() => {
if (!query.result.columns?.length) return []
const count_measure = count()
return [
count_measure,
...query.result.columns
.filter((column) => FIELDTYPES.MEASURE.includes(column.type))
.map((column) => {
return {
aggregation: 'sum',
column_name: column.name,
measure_name: `sum(${column.name})`,
data_type: column.type as MeasureDataType,
}
}),
...Object.values(query.doc.calculated_measures || {}),
]
})
// @ts-ignore
query.source = computed(() => {
const sourceOp = query.doc.operations.find((op) => op.type === 'source')
if (!sourceOp) return {} as Source
return sourceOp as Source
})
// @ts-ignore
query.currentOperations = computed(() => {
const operations = [...query.doc.operations]
if (query.activeOperationIdx >= 0) {
operations.splice(query.activeOperationIdx + 1)
}
return operations
})
// @ts-ignore
query.activeEditOperation = computed(() => {
if (query.activeEditIndex === -1) return {}
return query.doc.operations[query.activeEditIndex]
})
wheneverChanges(
() => query.currentOperations,
() => query.autoExecute && execute(),
{ deep: true }
)
function getOperationsForExecution(): Operation[] {
if (!query.doc.operations.length) {
return []
}
const sourceOp = query.doc.operations.find((op) => op.type === 'source')
if (!sourceOp) {
return []
}
if ('query' in sourceOp && sourceOp.query) {
// move old structure to new structure
sourceOp.table = query_table({
query_name: sourceOp.query as string,
})
delete sourceOp.query
}
let _operations = [...query.currentOperations]
if (sourceOp.table.type === 'query') {
const sourceQuery = getCachedQuery(sourceOp.table.query_name)
if (!sourceQuery) {
const message = `Source query ${sourceOp.table.query_name} not found`
createToast({
variant: 'error',
title: 'Error',
message,
})
throw new Error(message)
}
const sourceQueryOperations = sourceQuery.getOperationsForExecution()
const currentOperationsWithoutSource = query.currentOperations.slice(1)
_operations = [...sourceQueryOperations, ...currentOperationsWithoutSource]
}
for (const op of _operations) {
if (op.type !== 'join' && op.type !== 'union') continue
if (op.table.type !== 'query') continue
const queryTable = getCachedQuery(op.table.query_name)
if (!queryTable) {
const message = `Query ${op.table.query_name} not found`
createToast({
variant: 'error',
title: 'Error',
message,
})
throw new Error(message)
}
op.table.operations = queryTable.getOperationsForExecution()
}
return _operations
}
async function execute() {
if (!query.doc.operations.length) {
query.result = { ...EMPTY_RESULT }
return
}
query.executing = true
return call('insights.api.workbooks.fetch_query_results', {
use_live_connection: query.doc.use_live_connection,
operations: query.getOperationsForExecution(),
})
.then((response: any) => {
if (!response) return
query.result.executedSQL = response.sql
query.result.columns = response.columns
query.result.rows = response.rows
query.result.formattedRows = getFormattedRows(query)
query.result.totalRowCount = response.total_row_count
query.result.columnOptions = query.result.columns.map((column) => ({
label: column.name,
value: column.name,
description: column.type,
query: query.doc.name,
data_type: column.type,
}))
})
.catch((e: Error) => {
query.result = { ...EMPTY_RESULT }
showErrorToast(e)
})
.finally(() => {
query.executing = false
})
}
function setActiveOperation(index: number) {
query.activeOperationIdx = index
query.activeEditIndex = -1
}
function setActiveEditIndex(index: number) {
query.activeEditIndex = index
}
function removeOperation(index: number) {
query.doc.operations.splice(index, 1)
if (index > query.activeOperationIdx) return
query.activeOperationIdx--
query.activeOperationIdx = Math.max(query.activeOperationIdx, -1)
}
function setSource(args: SourceArgs) {
const editingSource = query.activeEditOperation.type === 'source'
const _setSource = () => {
if (editingSource) {
query.doc.operations[query.activeEditIndex] = source(args)
query.setActiveEditIndex(-1)
} else {
query.setOperations([])
query.addSource(args)
}
}
if (!query.doc.operations.length || editingSource) {
_setSource()
return
}
confirmDialog({
title: 'Change Source',
message: 'Changing the source will clear the current operations. Please confirm.',
onSuccess: _setSource,
})
}
function addOperation(op: Operation) {
query.doc.operations.splice(query.activeOperationIdx + 1, 0, op)
query.activeOperationIdx++
}
function addSource(args: SourceArgs) {
addOperation(source(args))
}
function addJoin(args: JoinArgs) {
const editingJoin = query.activeEditOperation.type === 'join'
if (!editingJoin) {
addOperation(join(args))
} else {
query.doc.operations[query.activeEditIndex] = join(args)
query.setActiveEditIndex(-1)
}
}
function addUnion(args: UnionArgs) {
const editingUnion = query.activeEditOperation.type === 'union'
if (!editingUnion) {
addOperation(union(args))
} else {
query.doc.operations[query.activeEditIndex] = union(args)
query.setActiveEditIndex(-1)
}
}
function addFilterGroup(args: FilterGroupArgs) {
const editingFilter =
query.activeEditOperation.type === 'filter_group' ||
query.activeEditOperation.type === 'filter'
if (!editingFilter) {
addOperation(filter_group(args))
} else {
query.doc.operations[query.activeEditIndex] = filter_group(args)
query.setActiveEditIndex(-1)
}
}
function addMutate(args: MutateArgs) {
const editingMutate = query.activeEditOperation.type === 'mutate'
if (!editingMutate) {
addOperation(mutate(args))
} else {
query.doc.operations[query.activeEditIndex] = mutate(args)
query.setActiveEditIndex(-1)
}
}
function addSummarize(args: SummarizeArgs) {
const editingSummarize = query.activeEditOperation.type === 'summarize'
if (!editingSummarize) {
addOperation(summarize(args))
} else {
query.doc.operations[query.activeEditIndex] = summarize(args)
query.setActiveEditIndex(-1)
}
}
function addOrderBy(args: OrderByArgs) {
const existingOrderBy = query.currentOperations.find(
(op) =>
op.type === 'order_by' &&
op.column.column_name === args.column.column_name &&
op.direction === args.direction
)
if (existingOrderBy) return
const existingOrderByIndex = query.currentOperations.findIndex(
(op) => op.type === 'order_by' && op.column.column_name === args.column.column_name
)
if (existingOrderByIndex > -1) {
query.currentOperations[existingOrderByIndex] = order_by(args)
} else {
addOperation(order_by(args))
}
}
function removeOrderBy(column_name: string) {
const index = query.doc.operations.findIndex(
(op) => op.type === 'order_by' && op.column.column_name === column_name
)
if (index > -1) {
query.doc.operations.splice(index, 1)
}
}
function addLimit(args: number) {
addOperation(limit(args))
}
function addPivotWider(args: PivotWiderArgs) {
addOperation(pivot_wider(args))
}
function selectColumns(args: SelectArgs) {
const editingSelect = query.activeEditOperation.type === 'select'
if (!editingSelect) {
addOperation(select(args))
} else {
query.doc.operations[query.activeEditIndex] = select(args)
query.setActiveEditIndex(-1)
}
}
function renameColumn(oldName: string, newName: string) {
// first check if there's already a rename operation for the column
const existingRenameIdx = query.currentOperations.findIndex(
(op) => op.type === 'rename' && op.new_name === oldName
)
if (existingRenameIdx > -1) {
const existingRename = query.currentOperations[existingRenameIdx] as Rename
existingRename.new_name = newName
}
// if not, add a new rename operation
else {
addOperation(
rename({
column: column(oldName),
new_name: newName,
})
)
}
}
function removeColumn(column_names: string | string[]) {
if (!Array.isArray(column_names)) column_names = [column_names]
addOperation(remove({ column_names }))
}
function changeColumnType(column_name: string, newType: ColumnDataType) {
addOperation(
cast({
column: column(column_name),
data_type: newType,
})
)
}
function addCustomOperation(args: CustomOperationArgs) {
const editingCustomOperation = query.activeEditOperation.type === 'custom_operation'
if (!editingCustomOperation) {
addOperation(custom_operation(args))
} else {
query.doc.operations[query.activeEditIndex] = custom_operation(args)
query.setActiveEditIndex(-1)
}
}
function setOperations(newOperations: Operation[]) {
query.doc.operations = newOperations
query.activeOperationIdx = newOperations.length - 1
}
function reorderOperations() {
const sourceOp = query.doc.operations.find((op) => op.type === 'source')
if (!sourceOp) return
let newOperations: Operation[] = [sourceOp]
const opsOrder = [
'join',
'mutate',
'filter_group',
'filter',
'select',
'remove',
'cast',
'rename',
'order_by',
'limit',
]
opsOrder.forEach((opType) => {
newOperations.push(...query.doc.operations.filter((op) => op.type === opType))
})
// combine multiple filter_group & select operations into one
const filterGroups: FilterGroupArgs[] = []
const selects: SelectArgs[] = []
newOperations.forEach((op) => {
if (op.type === 'filter_group') {
filterGroups.push(op as FilterGroupArgs)
} else if (op.type === 'select') {
selects.push(op as SelectArgs)
}
})
if (filterGroups.length > 1) {
const index = newOperations.findIndex((op) => op.type === 'filter_group')
newOperations.splice(
index,
filterGroups.length,
filter_group({
logical_operator: 'And',
filters: filterGroups.flatMap((fg) => fg.filters),
})
)
}
if (selects.length > 1) {
const index = newOperations.findIndex((op) => op.type === 'select')
newOperations.splice(
index,
selects.length,
select({
column_names: selects.flatMap((s) => s.column_names),
})
)
}
// append all mutated columns & joined columns to the select operation (if not already selected)
const joinColumns = newOperations
.filter((op) => op.type === 'join')
.map((op) => op.select_columns.map((col) => col.column_name))
.flat()
const mutatedColumns = newOperations
.filter((op) => op.type === 'mutate')
.map((op) => op.new_name)
const selectOp = newOperations.find((op) => op.type === 'select')
if (selectOp) {
const selectArgs = selectOp as SelectArgs
joinColumns.forEach((column) => {
if (!selectArgs.column_names.includes(column)) {
selectArgs.column_names.push(column)
}
})
mutatedColumns.forEach((column) => {
if (!selectArgs.column_names.includes(column)) {
selectArgs.column_names.push(column)
}
})
}
query.setOperations(newOperations)
}
function downloadResults() {
return call('insights.api.workbooks.download_query_results', {
use_live_connection: query.doc.use_live_connection,
operations: query.getOperationsForExecution(),
}).then((csv_data: string) => {
const blob = new Blob([csv_data], { type: 'text/csv' })
const url = window.URL.createObjectURL(blob)
const a = document.createElement('a')
a.setAttribute('hidden', '')
a.setAttribute('href', url)
a.setAttribute('download', `${query.doc.title || 'data'}.csv`)
document.body.appendChild(a)
a.click()
document.body.removeChild(a)
})
}
function getDistinctColumnValues(column: string, search_term: string = '') {
const operationsForExecution = query.getOperationsForExecution()
const operations =
query.activeEditIndex > -1
? // when editing a filter, get distinct values from the operations before the filter
operationsForExecution.slice(0, query.activeEditIndex)
: operationsForExecution
return call('insights.api.workbooks.get_distinct_column_values', {
use_live_connection: query.doc.use_live_connection,
operations: operations,
column_name: column,
search_term,
})
}
function getColumnsForSelection() {
const operationsForExecution = query.getOperationsForExecution()
const operations =
query.activeEditOperation.type === 'select' || query.activeEditOperation.type === 'summarize'
? operationsForExecution.slice(0, query.activeEditIndex)
: operationsForExecution
const method = 'insights.api.workbooks.get_columns_for_selection'
return call(method, {
use_live_connection: query.doc.use_live_connection,
operations,
})
}
function getDimension(column_name: string) {
return query.dimensions.find((d) => d.column_name === column_name)
}
function getMeasure(column_name: string) {
return query.measures.find((m) => m.measure_name === column_name)
}
function addMeasure(measure: Measure) {
query.doc.calculated_measures = {
...query.doc.calculated_measures,
[measure.measure_name]: measure,
}
}
function updateMeasure(column_name: string, measure: Measure) {
if (!query.doc.calculated_measures) query.doc.calculated_measures = {}
delete query.doc.calculated_measures[column_name]
query.doc.calculated_measures[measure.measure_name] = measure
}
function removeMeasure(column_name: string) {
if (!query.doc.calculated_measures) return
delete query.doc.calculated_measures[column_name]
}
const originalQuery = copy(workbookQuery)
function reset() {
query.doc = copy(originalQuery)
query.activeOperationIdx = -1
query.autoExecute = true
query.executing = false
query.result = {} as QueryResult
}
query.history = useDebouncedRefHistory(
// @ts-ignore
computed({
get() {
return {
doc: query.doc,
activeOperationIdx: query.activeOperationIdx,
activeEditIndex: query.activeEditIndex,
}
},
set(value) {
Object.assign(query.doc, value.doc)
query.activeOperationIdx = value.activeOperationIdx
query.activeEditIndex = value.activeEditIndex
},
}),
{
deep: true,
max: 100,
debounce: 500,
}
)
return query
}
const EMPTY_RESULT = {
executedSQL: '',
totalRowCount: 0,
rows: [],
formattedRows: [],
columns: [],
columnOptions: [],
} as QueryResult
export type Query = ReturnType<typeof makeQuery>
|
2302_79757062/insights
|
frontend/src2/query/query.ts
|
TypeScript
|
agpl-3.0
| 17,498
|
import { createRouter, createWebHistory, RouteLocation } from 'vue-router'
import session from './session.ts'
const routes = [
{
path: '/login',
name: 'Login',
component: () => import('./auth/Login.vue'),
meta: { isGuestView: true, hideSidebar: true },
},
{
path: '/',
name: 'Home',
redirect: '/workbook',
component: () => import('./home/Home.vue'),
},
{
path: '/dashboard',
name: 'DashboardList',
component: () => import('./dashboard/DashboardList.vue'),
},
{
path: '/workbook',
name: 'WorkbookList',
component: () => import('./workbook/WorkbookList.vue'),
},
{
props: true,
name: 'Workbook',
path: '/workbook/:name',
component: () => import('./workbook/Workbook.vue'),
redirect: (to: RouteLocation) => `/workbook/${to.params.name}/query/0`,
meta: { hideSidebar: true },
children: [
{
props: true,
path: 'query/:index',
name: 'WorkbookQuery',
component: () => import('./workbook/WorkbookQuery.vue'),
},
{
props: true,
path: 'chart/:index',
name: 'WorkbookChart',
component: () => import('./workbook/WorkbookChart.vue'),
},
{
props: true,
path: 'dashboard/:index',
name: 'WorkbookDashboard',
component: () => import('./workbook/WorkbookDashboard.vue'),
},
],
},
{
path: '/data-source',
name: 'DataSourceList',
component: () => import('./data_source/DataSourceList.vue'),
},
{
props: true,
path: '/data-source/:name',
name: 'DataSourceTableList',
component: () => import('./data_source/DataSourceTableList.vue'),
},
{
path: '/users',
name: 'UserList',
component: () => import('./users/UserList.vue'),
},
{
path: '/teams',
name: 'TeamList',
component: () => import('./teams/TeamList.vue'),
},
{
path: '/:pathMatch(.*)*',
component: () => import('./auth/NotFound.vue'),
meta: { hideSidebar: true },
},
]
let router = createRouter({
history: createWebHistory('/insights'),
// @ts-ignore
routes,
})
router.beforeEach(async (to, _, next) => {
!session.initialized && (await session.initialize())
if (to.meta.isGuestView && !session.isLoggedIn && to.name !== 'Login') {
// if page is allowed for guest, and is not login page, allow
return next()
}
// route to login page if not logged in
if (!session.isLoggedIn) {
// if in dev mode, open login page
if (import.meta.env.DEV) {
return to.fullPath === '/login' ? next() : next('/login')
}
// redirect to frappe login page, for oauth and signup
window.location.href = '/login'
return next(false)
}
to.path === '/login' ? next('/') : next()
})
const _fetch = window.fetch
window.fetch = async function () {
// @ts-ignore
const res = await _fetch(...arguments)
if (res.status === 403 && (!document.cookie || document.cookie.includes('user_id=Guest'))) {
session.resetSession()
router.push('/login')
}
return res
}
export default router
|
2302_79757062/insights
|
frontend/src2/router.ts
|
TypeScript
|
agpl-3.0
| 2,882
|
import { call } from 'frappe-ui'
import { computed, reactive } from 'vue'
type SessionUser = {
email: string
first_name: string
last_name: string
full_name: string
user_image: string
is_admin: boolean
is_user: boolean
country: string
locale: string
is_v2_user: boolean
default_version: 'v3' | 'v2' | ''
}
const emptyUser: SessionUser = {
email: '',
first_name: '',
last_name: '',
full_name: '',
user_image: '',
is_admin: false,
is_user: false,
country: '',
locale: 'en-US',
is_v2_user: false,
default_version: '',
}
const session = reactive({
user: { ...emptyUser },
initialized: false,
isLoggedIn: computed(() => false),
isAuthorized: computed(() => false),
initialize,
fetchSessionInfo,
updateDefaultVersion,
login,
logout,
resetSession,
})
// @ts-ignore
session.isLoggedIn = computed(() => session.user.email && session.user.email !== 'Guest')
// @ts-ignore
session.isAuthorized = computed(() => session.user.is_admin || session.user.is_user)
async function initialize(force: boolean = false) {
if (session.initialized && !force) return
Object.assign(session.user, getSessionFromCookies())
session.isLoggedIn && (await fetchSessionInfo())
session.initialized = true
}
async function fetchSessionInfo() {
if (!session.isLoggedIn) return
const userInfo: SessionUser = await call('insights.api.get_user_info')
Object.assign(session.user, {
...userInfo,
is_admin: Boolean(userInfo.is_admin),
is_user: Boolean(userInfo.is_user),
is_v2_user: Boolean(userInfo.is_v2_user),
})
}
function updateDefaultVersion(version: SessionUser['default_version']) {
session.user.default_version = version
return call('insights.api.update_default_version', { version })
}
async function login(email: string, password: string) {
resetSession()
const userInfo = await call('login', { usr: email, pwd: password })
if (!userInfo) return
Object.assign(session.user, userInfo)
window.location.reload()
}
async function logout() {
resetSession()
await call('logout')
window.location.reload()
}
function resetSession() {
Object.assign(session.user, { ...emptyUser })
}
function getSessionFromCookies() {
return document.cookie
.split('; ')
.map((c) => c.split('='))
.reduce((acc, [key, value]) => {
key = key === 'user_id' ? 'email' : key
acc[key] = decodeURIComponent(value)
return acc
}, {} as any)
}
export default session
export type Session = typeof session
|
2302_79757062/insights
|
frontend/src2/session.ts
|
TypeScript
|
agpl-3.0
| 2,424
|
import useDocumentResource from '../helpers/resource'
import { createToast } from '../helpers/toasts'
let settings = undefined as Settings | undefined
export default function useSettings() {
if (settings) return settings
return makeSettings()
}
function makeSettings() {
const doctype = 'Insights Settings'
const _settings = useDocumentResource<InsightsSettings>(doctype, doctype, {
initialDoc: {
name: '',
setup_complete: false,
telegram_api_token: '',
fiscal_year_start: '',
},
})
_settings.onAfterSave(() =>
createToast({
title: 'Settings Updated',
message: 'Your settings have been updated successfully',
variant: 'success',
})
)
settings = _settings
return _settings
}
type Settings = ReturnType<typeof makeSettings>
type InsightsSettings = {
name: string
setup_complete: boolean
telegram_api_token: string
fiscal_year_start: string
}
|
2302_79757062/insights
|
frontend/src2/settings/settings.ts
|
TypeScript
|
agpl-3.0
| 886
|
import { io } from 'socket.io-client'
import { socketio_port } from '../../../../sites/common_site_config.json'
export function initSocket() {
let host = window.location.hostname
let siteName = import.meta.env.DEV ? host : window.site_name
let port = window.location.port ? `:${socketio_port}` : ''
let protocol = port ? 'http' : 'https'
let url = `${protocol}://${host}${port}/${siteName}`
let socket = io(url, {
withCredentials: true,
reconnectionAttempts: 5,
})
return socket
}
|
2302_79757062/insights
|
frontend/src2/socket.ts
|
TypeScript
|
agpl-3.0
| 495
|
.cm-editor {
user-select: text;
padding: 0px !important;
position: relative !important;
}
.cm-gutters {
@apply !border-r !bg-transparent !px-1 !text-center !text-sm !leading-6 !text-gray-600;
}
.cm-gutters {
@apply !border-r !bg-transparent !text-sm !leading-6 !text-gray-600;
}
.cm-foldGutter span {
@apply !hidden !opacity-0;
}
.cm-gutterElement {
@apply !text-center;
}
.cm-activeLine {
@apply !bg-transparent;
}
.cm-activeLineGutter {
@apply !bg-transparent text-gray-600;
}
.cm-editor {
height: 100%;
width: 100%;
border-radius: 0.375rem;
padding: 0.5rem;
user-select: text;
}
.cm-placeholder {
@apply !leading-6 !text-gray-500;
}
.cm-content {
padding: 6px 0px !important;
}
.cm-scroller {
@apply !font-mono !leading-6 !text-gray-600;
}
.cm-matchingBracket {
font-weight: 500 !important;
background: none !important;
border-bottom: 1px solid #000 !important;
outline: none !important;
}
.cm-focused {
outline: none !important;
}
.cm-tooltip-autocomplete {
border: 1px solid #fafafa !important;
padding: 0.25rem;
background-color: #fff !important;
border-radius: 0.375rem;
filter: drop-shadow(0 4px 3px rgb(0 0 0 / 0.07)) drop-shadow(0 2px 2px rgb(0 0 0 / 0.06));
}
.cm-tooltip-autocomplete > ul {
font-family: 'Inter' !important;
}
.cm-tooltip-autocomplete ul li[aria-selected='true'] {
@apply !rounded !bg-gray-200/80;
color: #000 !important;
}
|
2302_79757062/insights
|
frontend/src2/styles/codemirror.css
|
CSS
|
agpl-3.0
| 1,382
|
<script setup lang="tsx">
import { Avatar, Breadcrumbs, ListView } from 'frappe-ui'
import { DatabaseIcon, PlusIcon, SearchIcon, Table2Icon } from 'lucide-vue-next'
import { computed, ref } from 'vue'
import UserSelector from '../components/UserSelector.vue'
import { copy } from '../helpers'
import session from '../session'
import useUserStore from '../users/users'
import TeamResourceSelector from './TeamResourceSelector.vue'
import useTeamStore, { Team, TeamPermission } from './teams'
const userStore = useUserStore()
const teamStore = useTeamStore()
teamStore.getTeams()
const searchQuery = ref('')
const filteredTeams = computed(() => {
if (!searchQuery.value) {
return teamStore.teams
}
return teamStore.teams.filter((team) =>
team.team_name.toLowerCase().includes(searchQuery.value.toLowerCase())
)
})
const listOptions = ref({
columns: [
{
label: 'Team',
key: 'team_name',
},
{
label: 'Owner',
key: 'owner',
getLabel(props: any) {
const team = props.row as Team
const user = userStore.getUser(team.owner)
return user?.full_name || team.owner
},
prefix(props: any) {
const team = props.row as Team
const imageUrl = userStore.getUser(team.owner)?.user_image
return <Avatar size="md" label={team.owner} image={imageUrl} />
},
},
{
label: 'Creation',
key: 'creation_from_now',
},
],
rows: filteredTeams,
rowKey: 'team_name',
options: {
showTooltip: false,
onRowClick: (team: Team) => {
editTeam.value = copy(team)
showEditTeamDialog.value = true
},
emptyState: {
title: 'No teams.',
description: 'No teams to display.',
button: session.user.is_admin
? {
label: 'Create Team',
variant: 'solid',
onClick: () => (showCreateTeamDialog.value = true),
}
: undefined,
},
},
})
const showCreateTeamDialog = ref(false)
const newTeamName = ref('')
const showEditTeamDialog = ref(false)
const editTeam = ref<Team | null>(null)
const teamModified = computed(() => {
if (!editTeam.value) {
return false
}
const team = teamStore.teams.find((t) => t.name === editTeam.value?.name)
return JSON.stringify(team) !== JSON.stringify(editTeam.value)
})
const newMemberEmail = ref<string>('')
function addMember() {
if (!editTeam.value || !newMemberEmail.value) {
return
}
editTeam.value.team_members.push({
user: newMemberEmail.value,
})
newMemberEmail.value = ''
}
function removeMember(userEmail: string) {
if (!editTeam.value) {
return
}
editTeam.value.team_members = editTeam.value.team_members.filter((u) => u.user !== userEmail)
}
const newResources = ref<TeamPermission[]>([])
function removePermission(perm: TeamPermission) {
if (!editTeam.value) return
editTeam.value.team_permissions = editTeam.value.team_permissions.filter(
(p) => p.resource_name !== perm.resource_name
)
}
document.title = 'Teams | Insights'
</script>
<template>
<header class="mb-2 flex h-12 items-center justify-between border-b py-2.5 pl-5 pr-2">
<Breadcrumbs :items="[{ label: 'Teams', route: '/teams' }]" />
<div class="flex items-center gap-2">
<Button
v-if="session.user.is_admin"
label="Create Team"
variant="solid"
@click="showCreateTeamDialog = true"
>
<template #prefix>
<PlusIcon class="w-4" />
</template>
</Button>
</div>
</header>
<div class="mb-4 flex h-full flex-col gap-2 overflow-auto px-4">
<div class="flex gap-2 overflow-visible py-1">
<FormControl placeholder="Search" v-model="searchQuery" :debounce="300">
<template #prefix>
<SearchIcon class="h-4 w-4 text-gray-500" />
</template>
</FormControl>
</div>
<ListView class="h-full" v-bind="listOptions"> </ListView>
</div>
<Dialog
v-model="showCreateTeamDialog"
:options="{
title: 'Create Team',
actions: [
{
label: 'Create',
variant: 'solid',
disabled: !newTeamName || teamStore.creatingTeam,
loading: teamStore.creatingTeam,
onClick: () => {
teamStore.createTeam(newTeamName).then(() => {
showCreateTeamDialog = false
})
},
},
],
}"
>
<template #body-content>
<div class="flex flex-col gap-4">
<FormControl label="Team Name" v-model="newTeamName" autocomplete="off" />
</div>
</template>
</Dialog>
<Dialog
v-if="editTeam"
v-model="showEditTeamDialog"
:options="{
title: 'Manage Team',
actions: [
{
label: 'Done',
variant: 'solid',
disabled: !teamModified || teamStore.updatingTeam,
loading: teamStore.updatingTeam,
onClick: () => {
if (!editTeam) return
teamStore.updateTeam(editTeam).then(() => {
showEditTeamDialog = false
})
},
},
],
}"
>
<template #body-content>
<div class="-mb-5 flex flex-col gap-4 text-base">
<FormControl
label="Team Name"
v-model="editTeam.team_name"
:disabled="editTeam.name === 'Admin'"
autocomplete="off"
/>
<div class="flex flex-col gap-3">
<div class="space-y-1.5">
<label class="block text-xs text-gray-600">Members</label>
<div class="flex w-full gap-2">
<div class="flex-1">
<UserSelector
placeholder="Add members"
v-model="newMemberEmail"
:hide-users="editTeam.team_members.map((u) => u.user)"
/>
</div>
<Button
class="flex-shrink-0"
variant="solid"
label="Add"
:disabled="!newMemberEmail"
@click="addMember"
></Button>
</div>
</div>
<div class="flex max-h-[10rem] flex-col gap-1 overflow-y-auto">
<div
v-if="editTeam.team_members.length"
v-for="member in editTeam.team_members"
:key="member.user"
class="flex w-full items-center gap-2 py-1"
>
<Avatar
size="xl"
:label="userStore.getUser(member.user)?.full_name"
:image="userStore.getUser(member.user)?.user_image"
/>
<div class="flex flex-1 flex-col">
<div class="leading-5">
{{ userStore.getUser(member.user)?.full_name }}
</div>
<div class="text-xs text-gray-600">
{{ userStore.getUser(member.user)?.email }}
</div>
</div>
<Button
variant="ghost"
icon="x"
class="flex-shrink-0"
@click="removeMember(member.user)"
/>
</div>
<div
v-else
class="rounded border border-dashed border-gray-300 px-32 py-6 text-center text-sm text-gray-500"
>
This team does not have any members
</div>
</div>
</div>
<div class="flex flex-col gap-3">
<div class="space-y-1.5">
<label class="block text-xs text-gray-600">Access</label>
<p
v-if="editTeam.name == 'Admin'"
class="rounded bg-gray-50 p-2 text-sm leading-4 text-gray-600"
>
Admin team has access to all the data sources and tables. Members of
this team are allowed to manage teams, users, and other admin settings
</p>
<div v-else class="flex w-full gap-2">
<div class="flex-1">
<TeamResourceSelector v-model="newResources" :team="editTeam" />
</div>
</div>
</div>
<div
v-if="editTeam.name !== 'Admin'"
class="flex max-h-[10rem] flex-col divide-y overflow-y-auto"
>
<div
v-if="editTeam.team_permissions.length"
v-for="perm in editTeam.team_permissions"
:key="`${perm.resource_type}-${perm.resource_name}`"
class="flex w-full items-center gap-2 py-1"
>
<DatabaseIcon
v-if="perm.resource_type_label == 'Source'"
class="h-4 w-4 text-gray-700"
stroke-width="1.5"
/>
<Table2Icon
v-else-if="perm.resource_type_label == 'Table'"
class="h-4 w-4 text-gray-700"
stroke-width="1.5"
/>
<div class="flex flex-1 items-baseline gap-2">
<div class="">{{ perm.label }}</div>
<div class="text-xs text-gray-600">{{ perm.description }}</div>
</div>
<Badge size="md">{{ perm.resource_type_label }}</Badge>
<Button
variant="ghost"
icon="x"
class="flex-shrink-0"
@click="removePermission(perm)"
/>
</div>
<div
v-else
class="rounded border border-dashed border-gray-300 px-32 py-6 text-center text-sm text-gray-500"
>
This team does not have access to any data sources or tables
</div>
</div>
</div>
</div>
</template>
</Dialog>
</template>
|
2302_79757062/insights
|
frontend/src2/teams/TeamList.vue
|
Vue
|
agpl-3.0
| 8,507
|
<script setup lang="ts">
import { ref } from 'vue'
import { wheneverChanges } from '../helpers'
import useTeamStore, { ResourceOption, Team, TeamPermission } from './teams'
const props = defineProps<{ team: Team }>()
const newResources = defineModel<TeamPermission[]>({
required: true,
})
const teamStore = useTeamStore()
const groupedResourceOptions = ref([])
const resourceSearchQuery = ref('')
wheneverChanges(
() => [props.team.name, resourceSearchQuery.value],
() => {
if (!props.team.name) return
teamStore
.getResourceOptions(props.team.name, resourceSearchQuery.value)
.then((options: ResourceOption[]) => {
groupedResourceOptions.value = options.reduce(
(acc: any, option: ResourceOption) => {
if (option.resource_type_label === 'Source') {
acc[0].items.push(option)
}
if (option.resource_type_label === 'Table') {
acc[1].items.push(option)
}
return acc
},
[
{ group: 'Data Sources', items: [] },
{ group: 'Tables', items: [] },
]
)
})
},
{ deep: true, debounce: 500, immediate: true }
)
</script>
<template>
<Autocomplete
:multiple="true"
:hide-search="true"
:autofocus="false"
v-model="newResources"
:options="groupedResourceOptions"
>
<template #target="{ open }">
<FormControl
class="w-full"
type="text"
v-model="resourceSearchQuery"
placeholder="Add permissions"
autocomplete="off"
@update:modelValue="open"
@focus="open"
/>
</template>
<template #footer="{ togglePopover }">
<div class="flex items-center justify-between p-0.5">
<p class="px-3 text-sm text-gray-600">
{{ newResources.length }} resources selected
</p>
<div class="flex gap-1">
<Button
label="Reset"
:disabled="!newResources.length"
@click.prevent.stop="newResources = []"
>
</Button>
<Button
variant="solid"
label="Done"
:disabled="!newResources.length"
@click="
() => {
if (!team) return
team.team_permissions.push(...newResources)
newResources = []
togglePopover(false)
}
"
>
</Button>
</div>
</div>
</template>
</Autocomplete>
</template>
|
2302_79757062/insights
|
frontend/src2/teams/TeamResourceSelector.vue
|
Vue
|
agpl-3.0
| 2,234
|
import { useTimeAgo } from '@vueuse/core'
import { call } from 'frappe-ui'
import { reactive, ref } from 'vue'
import { showErrorToast } from '../helpers'
import { createToast } from '../helpers/toasts'
export type TeamMember = {
user: string
}
export type TeamPermission = {
resource_type: 'Insights Data Source v3' | 'Insights Table v3'
resource_type_label: 'Source' | 'Table'
resource_name: string
label: string
value: string
description: string
}
export type Team = {
name: string
team_name: string
owner: string
creation: string
creation_from_now: string
team_members: TeamMember[]
team_permissions: TeamPermission[]
}
const teams = ref<Team[]>([])
const loading = ref(false)
async function getTeams(search_term = '') {
loading.value = true
return call('insights.api.user.get_teams', { search_term }).then((res: Team[]) => {
teams.value = res.map((t: any) => {
return {
...t,
creation_from_now: useTimeAgo(t.creation),
team_permissions: t.team_permissions.map((p: any) => {
return {
...p,
resource_type_label: getResourceTypeLabel(p.resource_type),
}
}),
}
})
loading.value = false
return teams.value
})
}
const creatingTeam = ref(false)
async function createTeam(team_name: string) {
creatingTeam.value = true
return call('insights.api.user.create_team', { team_name })
.then(() => {
getTeams()
createToast({
message: 'Team created',
variant: 'success',
})
})
.catch((e: Error) => {
showErrorToast(e)
})
.finally(() => {
creatingTeam.value = false
})
}
const updatingTeam = ref(false)
async function updateTeam(team: Team) {
updatingTeam.value = true
return call('insights.api.user.update_team', { team })
.then(() => {
getTeams()
createToast({
message: 'Team updated',
variant: 'success',
})
})
.catch((e: Error) => {
showErrorToast(e)
})
.finally(() => {
updatingTeam.value = false
})
}
export type ResourceOption = TeamPermission
const fetchingResourceOptions = ref(false)
async function getResourceOptions(team_name: string, search_term = '') {
fetchingResourceOptions.value = true
return call('insights.api.user.get_resource_options', { team_name, search_term })
.then((res: ResourceOption[]) =>
res.map((p: any) => {
return {
...p,
resource_type_label: getResourceTypeLabel(p.resource_type),
}
})
)
.catch((e: Error) => {
showErrorToast(e)
})
.finally(() => {
fetchingResourceOptions.value = false
})
}
export default function useTeamStore() {
if (!teams.value.length) {
getTeams()
}
return reactive({
teams,
loading,
getTeams,
creatingTeam,
createTeam,
updatingTeam,
updateTeam,
fetchingResourceOptions,
getResourceOptions,
})
}
function getResourceTypeLabel(resource_type: string) {
if (resource_type === 'Insights Data Source v3') {
return 'Source'
}
if (resource_type === 'Insights Table v3') {
return 'Table'
}
}
|
2302_79757062/insights
|
frontend/src2/teams/teams.ts
|
TypeScript
|
agpl-3.0
| 2,961
|
import { call } from 'frappe-ui'
import '../../../frappe/frappe/public/js/lib/posthog.js'
const posthog = {
init: (projectToken: string, options: any) => {},
identify: (userId: string) => {},
startSessionRecording: () => {},
capture: (eventName: string, data?: any) => {},
}
declare global {
interface Window {
posthog: typeof posthog
}
}
type PosthogSettings = {
posthog_project_id: string
posthog_host: string
enable_telemetry: boolean
telemetry_site_age: number
record_session: boolean
posthog_identifier: string
}
call('insights.api.telemetry.get_posthog_settings').then((posthogSettings: PosthogSettings) => {
if (!posthogSettings.enable_telemetry || !posthogSettings.posthog_project_id) {
return
}
window.posthog.init(posthogSettings.posthog_project_id, {
api_host: posthogSettings.posthog_host,
person_profiles: 'identified_only',
autocapture: false,
capture_pageview: false,
capture_pageleave: false,
enable_heatmaps: false,
disable_session_recording: true,
loaded: (ph: typeof posthog) => {
ph.identify(posthogSettings.posthog_identifier || window.location.host)
Object.assign(posthog, ph)
if (posthogSettings.record_session) {
ph.startSessionRecording()
}
},
})
})
export { posthog }
|
2302_79757062/insights
|
frontend/src2/telemetry.ts
|
TypeScript
|
agpl-3.0
| 1,253
|
import { Dimension, Measure } from "./query.types"
export const AXIS_CHARTS = ['Bar', 'Line']
export type AxisChartType = (typeof AXIS_CHARTS)[number]
export const CHARTS = ['Number', ...AXIS_CHARTS, 'Donut', 'Table']
export type ChartType = (typeof CHARTS)[number]
export type AxisChartConfig = {
x_axis: Dimension
y_axis: Measure[]
y2_axis?: Measure[]
y2_axis_type?: 'line' | 'bar'
split_by: Dimension
show_data_labels?: boolean
}
export type BarChartConfig = AxisChartConfig & {
stack?: boolean
normalize?: boolean
swap_axes?: boolean
}
export type LineChartConfig = AxisChartConfig & {
smooth?: boolean
show_data_points?: boolean
show_area?: boolean
}
export type NumberChartConfig = {
number_columns: Measure[]
comparison: boolean
sparkline: boolean
date_column?: Dimension
shorten_numbers?: boolean
decimal?: number
prefix?: string
suffix?: string
negative_is_better?: boolean
}
export type DountChartConfig = {
label_column: Dimension
value_column: Measure
}
export type TableChartConfig = {
rows: Dimension[]
columns: Dimension[]
values: Measure[]
}
export type ChartConfig = AxisChartConfig | NumberChartConfig | DountChartConfig | TableChartConfig
|
2302_79757062/insights
|
frontend/src2/types/chart.types.ts
|
TypeScript
|
agpl-3.0
| 1,191
|
export type TableArgs = { type: 'table'; data_source: string; table_name: string }
export type QueryTableArgs = {
type: 'query'
workbook: string
query_name: string
operations?: Operation[]
}
export type Table = TableArgs | QueryTableArgs
export type Column = {
type: 'column'
column_name: string
}
export type Measure = ColumnMeasure | ExpressionMeasure
export type ColumnMeasure = {
measure_name: string
column_name: string
data_type: MeasureDataType
aggregation: AggregationType
}
export type ExpressionMeasure = {
measure_name: string
expression: Expression
data_type: MeasureDataType
}
export type Dimension = {
column_name: string
data_type: DimensionDataType
granularity?: GranularityType
}
export type ColumnDataType =
| 'String'
| 'Integer'
| 'Decimal'
| 'Date'
| 'Datetime'
| 'Time'
| 'Text'
export type MeasureDataType = 'String' | 'Integer' | 'Decimal'
export type DimensionDataType = 'String' | 'Date' | 'Datetime' | 'Time'
export const aggregations = ['sum', 'count', 'avg', 'min', 'max', 'count_distinct']
export type AggregationType = (typeof aggregations)[number]
export type GranularityType = 'day' | 'week' | 'month' | 'quarter' | 'year'
export type DataFormat = 'currency' | 'percent'
export type FilterOperator =
| '='
| '!='
| '>'
| '>='
| '<'
| '<='
| 'in'
| 'not_in'
| 'between'
| 'within'
| 'contains'
| 'not_contains'
| 'starts_with'
| 'ends_with'
| 'is_set'
| 'is_not_set'
export type FilterValue = string | number | boolean | any[] | string[] | undefined
export type Expression = {
type: 'expression'
expression: string
}
export type SourceArgs = { table: Table }
export type Source = { type: 'source' } & SourceArgs
export type LogicalOperator = 'And' | 'Or'
export type FilterRule = {
column: Column
operator: FilterOperator
value: FilterValue | Column
}
export type FilterExpression = { expression: Expression }
export type FilterArgs = FilterRule | FilterExpression
export type Filter = { type: 'filter' } & FilterArgs
export type FilterGroupArgs = { logical_operator: LogicalOperator; filters: FilterArgs[] }
export type FilterGroup = { type: 'filter_group' } & FilterGroupArgs
export type SelectArgs = { column_names: string[] }
export type Select = { type: 'select' } & SelectArgs
export type RenameArgs = { column: Column; new_name: string }
export type Rename = { type: 'rename' } & RenameArgs
export type RemoveArgs = { column_names: string[] }
export type Remove = { type: 'remove' } & RemoveArgs
export type CastArgs = { column: Column; data_type: ColumnDataType }
export type Cast = { type: 'cast' } & CastArgs
export type JoinType = 'inner' | 'left' | 'right' | 'full'
export type JoinCondition =
| { left_column: Column; right_column: Column }
| { join_expression: Expression }
export type JoinArgs = {
join_type: JoinType
table: Table
select_columns: Column[]
join_condition: JoinCondition
}
export type Join = { type: 'join' } & JoinArgs
export type UnionArgs = { table: Table, distinct: boolean }
export type Union = { type: 'union' } & UnionArgs
export type MutateArgs = { new_name: string; data_type: ColumnDataType; expression: Expression }
export type Mutate = { type: 'mutate' } & MutateArgs
export type SummarizeArgs = { measures: Measure[]; dimensions: Dimension[] }
export type Summarize = { type: 'summarize' } & SummarizeArgs
export type OrderByArgs = { column: Column; direction: 'asc' | 'desc' }
export type OrderBy = { type: 'order_by' } & OrderByArgs
export type Limit = { type: 'limit'; limit: number }
export type WindowOperationType = 'sum' | 'lag_difference' | 'row_number'
export type WindowOperationArgs = {
op: WindowOperationType
column: Column
partition_by?: Column
order_by?: Column
}
export type WindowOperation = { type: 'window_operation' } & WindowOperationArgs
export type PivotWiderArgs = {
rows: Dimension[]
columns: Dimension[]
values: Measure[]
}
export type PivotWider = { type: 'pivot_wider' } & PivotWiderArgs
export type CustomOperationArgs = { expression: Expression }
export type CustomOperation = { type: 'custom_operation' } & CustomOperationArgs
export type Operation =
| Source
| Filter
| FilterGroup
| Select
| Rename
| Remove
| Cast
| Join
| Union
| Mutate
| Summarize
| OrderBy
| Limit
| PivotWider
| CustomOperation
export type QueryResultRow = Record<string, any>
export type QueryResultColumn = {
name: string
type: ColumnDataType
}
export type DropdownOption = {
label: string
value: string
description?: string
}
export type ColumnOption = DropdownOption & {
query: string
data_type: ColumnDataType
}
export type QueryResult = {
executedSQL: string
totalRowCount: number
rows: QueryResultRow[]
formattedRows: QueryResultRow[]
columns: QueryResultColumn[]
columnOptions: ColumnOption[]
}
|
2302_79757062/insights
|
frontend/src2/types/query.types.ts
|
TypeScript
|
agpl-3.0
| 4,791
|
import { ChartConfig, ChartType } from './chart.types'
import { ColumnDataType, FilterGroupArgs, Measure, Operation, OrderByArgs } from './query.types'
export type WorkbookListItem = {
title: string
name: string
owner: string
creation: string
modified: string
created_from_now: string
modified_from_now: string
}
export type InsightsWorkbook = {
doctype: 'Insights Workbook'
name: string
owner: string
title: string
queries: WorkbookQuery[]
charts: WorkbookChart[]
dashboards: WorkbookDashboard[]
}
export type WorkbookQuery = {
name: string
title?: string
operations: Operation[]
use_live_connection?: boolean
calculated_measures?: Record<string, Measure>
}
export type WorkbookChart = {
name: string
title: string
query: string
public: boolean
chart_type: ChartType
config: ChartConfig & {
order_by: OrderByArgs[]
filters?: FilterGroupArgs
limit?: number
}
}
export type WorkbookDashboard = {
name: string
title: string
items: WorkbookDashboardItem[]
}
export type WorkbookDashboardItem =
| WorkbookDashboardChart
| WorkbookDashboardFilter
| WorkbookDashboardText
export type Layout = {
i: string
x: number
y: number
w: number
h: number
}
export type WorkbookDashboardChart = {
type: 'chart'
chart: string
layout: Layout
}
export type WorkbookDashboardFilter = {
type: 'filter'
column: DashboardFilterColumn
layout: Layout
}
export type WorkbookDashboardText = {
type: 'text'
text: string
layout: Layout
}
export type DashboardFilterColumn = {
query: string
name: string
type: ColumnDataType
}
export type ShareAccess = 'view' | 'edit' | undefined
export type WorkbookSharePermission = { email: string; full_name: string; access: ShareAccess }
|
2302_79757062/insights
|
frontend/src2/types/workbook.types.ts
|
TypeScript
|
agpl-3.0
| 1,709
|
<script setup lang="tsx">
import { useTimeAgo } from '@vueuse/core'
import { Avatar, Breadcrumbs, ListView } from 'frappe-ui'
import { PlusIcon, SearchIcon, XIcon } from 'lucide-vue-next'
import { computed, ref, watch } from 'vue'
import IndicatorIcon from '../components/Icons/IndicatorIcon.vue'
import session from '../session'
import useUserStore, { User } from './users'
const userStore = useUserStore()
userStore.getUsers()
const searchQuery = ref('')
const filteredUsers = computed(() => {
if (!searchQuery.value) {
return userStore.users
}
return userStore.users.filter(
(user) =>
user.full_name.toLowerCase().includes(searchQuery.value.toLowerCase()) ||
user.email.toLowerCase().includes(searchQuery.value.toLowerCase())
)
})
const listOptions = ref({
columns: [
{
label: 'User',
key: 'full_name',
prefix: (props: any) => {
const user = props.row as User
return <Avatar size="md" label={user.full_name} image={user.user_image} />
},
},
{
label: 'Status',
key: 'enabled',
getLabel: (props: any) => {
const user = props.row as User
if (user.invitation_status) {
return user.invitation_status === 'Pending'
? 'Invitation Sent'
: 'Invitation Expired'
}
return props.row.enabled ? 'Enabled' : 'Disabled'
},
prefix: (props: any) => {
let color
const user = props.row as User
if (user.invitation_status) {
color =
user.invitation_status === 'Pending' ? 'text-yellow-500' : 'text-red-500'
} else {
color = props.row.enabled ? 'text-green-500' : 'text-gray-500'
}
return <IndicatorIcon class={color} />
},
},
{
label: 'Email',
key: 'email',
},
{
label: 'Last Active',
key: 'last_active',
getLabel: (props: any) => {
if (!props.row.last_active) {
return ''
}
return useTimeAgo(props.row.last_active).value
},
},
],
rows: filteredUsers,
rowKey: 'email',
options: {
showTooltip: false,
emptyState: {
title: 'No users.',
description: 'No users to display.',
button: session.user.is_admin
? {
label: 'Invite User',
variant: 'solid',
onClick: () => (showInviteUserDialog.value = true),
}
: undefined,
},
},
})
const showInviteUserDialog = ref(false)
const emailsToInvite = ref<string[]>([])
const emailsTxt = ref('')
watch(emailsTxt, extractEmails)
function extractEmails(emails: string) {
const lastChar = emails.slice(-1)
if (![' ', ','].includes(lastChar)) {
emailsTxt.value = emails
return
}
const newEmails = emails
.split(/,|\s/)
.filter((email) => email)
.filter((email) => !emailsToInvite.value.includes(email))
emailsToInvite.value = [...emailsToInvite.value, ...newEmails]
emailsTxt.value = ''
}
const areAllEmailsValid = computed(() => {
if (!emailsToInvite.value.length) {
return false
}
return emailsToInvite.value.every((email) => {
return /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(email)
})
})
document.title = 'Users | Insights'
</script>
<template>
<header class="mb-2 flex h-12 items-center justify-between border-b py-2.5 pl-5 pr-2">
<Breadcrumbs :items="[{ label: 'Users', route: '/users' }]" />
<div class="flex items-center gap-2">
<Button
v-if="session.user.is_admin"
label="Invite User"
variant="solid"
@click="showInviteUserDialog = true"
>
<template #prefix>
<PlusIcon class="w-4" />
</template>
</Button>
</div>
</header>
<div class="mb-4 flex h-full flex-col gap-2 overflow-auto px-4">
<div class="flex gap-2 overflow-visible py-1">
<FormControl placeholder="Search" v-model="searchQuery" :debounce="300">
<template #prefix>
<SearchIcon class="h-4 w-4 text-gray-500" />
</template>
</FormControl>
</div>
<ListView class="h-full" v-bind="listOptions"> </ListView>
</div>
<Dialog
v-model="showInviteUserDialog"
:options="{
title: 'Invite User',
actions: [
{
label: 'Send Invitation',
variant: 'solid',
disabled: !areAllEmailsValid,
loading: userStore.sendingInvitation,
onClick: () => {
userStore.inviteUsers(emailsToInvite)
showInviteUserDialog = false
},
},
],
}"
>
<template #body-content>
<div class="flex flex-col gap-4">
<div class="flex flex-wrap gap-1 rounded bg-gray-100 p-0.5">
<Button
v-for="(email, idx) in emailsToInvite"
:key="email"
:label="email"
variant="outline"
class="shadow-sm"
>
<template #suffix>
<XIcon
class="h-4"
stroke-width="1.5"
@click.stop="() => emailsToInvite.splice(idx, 1)"
/>
</template>
</Button>
<div class="min-w-[10rem] flex-1">
<input
type="text"
autocomplete="off"
placeholder="Enter email address"
v-model="emailsTxt"
@keydown.enter.capture.stop="extractEmails(`${emailsTxt} `)"
class="h-7 w-full rounded border-none bg-gray-100 py-1.5 pl-2 pr-2 text-base text-gray-800 placeholder-gray-500 transition-colors focus:outline-none focus:ring-0 focus-visible:outline-none focus-visible:ring-0"
/>
</div>
</div>
</div>
</template>
</Dialog>
</template>
|
2302_79757062/insights
|
frontend/src2/users/UserList.vue
|
Vue
|
agpl-3.0
| 5,183
|
import { call } from 'frappe-ui'
import { reactive, ref } from 'vue'
import { createToast } from '../helpers/toasts'
import { showErrorToast } from '../helpers'
export type User = {
name: ''
email: ''
full_name: ''
user_image: ''
type: 'Admin' | 'User'
enabled: 1 | 0
last_active?: ''
invitation_status?: 'Pending' | 'Expired'
}
const users = ref<User[]>([])
const loading = ref(false)
async function getUsers(search_term = '') {
loading.value = true
return call('insights.api.user.get_users', { search_term }).then((res: User[]) => {
users.value = res
loading.value = false
return users.value
})
}
function getUser(email: string) {
return users.value.find((user) => user.email === email)
}
const sendingInvitation = ref(false)
function inviteUsers(emails: string[]) {
sendingInvitation.value = true
return call('insights.api.user.invite_users', { emails: emails.join(',') })
.then(() => {
getUsers()
createToast({
title: 'Invitation Sent',
message:
emails.length === 1
? `Invitation sent to ${emails[0]}`
: `Invitations sent to ${emails.length} users`,
variant: 'success',
})
})
.catch((e: Error) => {
showErrorToast(e)
})
.finally(() => {
sendingInvitation.value = false
})
}
export default function useUserStore() {
if (!users.value.length) {
getUsers()
}
return reactive({
users,
loading,
getUsers,
getUser,
inviteUsers,
sendingInvitation,
})
}
|
2302_79757062/insights
|
frontend/src2/users/users.ts
|
TypeScript
|
agpl-3.0
| 1,450
|
<script setup lang="ts">
import { useMagicKeys, whenever } from '@vueuse/core'
import { Badge } from 'frappe-ui'
import { AlertOctagon, ArrowLeft } from 'lucide-vue-next'
import { computed, provide, watchEffect } from 'vue'
import { useRoute, useRouter } from 'vue-router'
import ContentEditable from '../components/ContentEditable.vue'
import Navbar from '../components/Navbar.vue'
import useWorkbook, { workbookKey } from './workbook'
import WorkbookNavbarActions from './WorkbookNavbarActions.vue'
import WorkbookSidebar from './WorkbookSidebar.vue'
import WorkbookTabSwitcher from './WorkbookTabSwitcher.vue'
defineOptions({ inheritAttrs: false })
const props = defineProps<{ name: string }>()
const router = useRouter()
const route = useRoute()
const workbook = useWorkbook(props.name)
provide(workbookKey, workbook)
const keys = useMagicKeys()
const cmdS = keys['Meta+S']
whenever(cmdS, () => workbook.save())
if (workbook.islocal && workbook.doc.queries.length === 0) {
workbook.addQuery()
}
const tabExists = computed(() => {
const tabType = route.name?.toString().replace('Workbook', '').toLowerCase()
const tabIndex = parseInt(route.params.index.toString())
return (
(tabType === 'query' && workbook.doc.queries[tabIndex]) ||
(tabType === 'chart' && workbook.doc.charts[tabIndex]) ||
(tabType === 'dashboard' && workbook.doc.dashboards[tabIndex])
)
})
watchEffect(() => {
document.title = `${workbook.doc.title} | Workbook`
})
</script>
<template>
<div class="flex h-full w-full flex-col">
<Navbar>
<template #left>
<Button variant="ghost" @click="router.push('/')">
<template #icon>
<ArrowLeft class="h-4 w-4" stroke-width="1.5" />
</template>
</Button>
</template>
<template #center>
<div class="flex gap-3">
<ContentEditable
class="rounded-sm font-medium !text-gray-800 focus:ring-2 focus:ring-gray-700 focus:ring-offset-4"
v-model="workbook.doc.title"
placeholder="Untitled Workbook"
></ContentEditable>
<Badge size="sm" v-if="workbook.islocal || workbook.isdirty" theme="orange">
Unsaved
</Badge>
</div>
</template>
<template #right>
<WorkbookNavbarActions />
</template>
</Navbar>
<div
class="relative flex w-full flex-1 overflow-hidden bg-gray-50"
:class="workbook.showSidebar ? 'flex-row' : 'flex-col'"
>
<WorkbookSidebar v-if="workbook.showSidebar" />
<WorkbookTabSwitcher v-else />
<RouterView :key="route.fullPath" v-slot="{ Component }">
<component v-if="tabExists" :is="Component" />
<div v-else class="flex flex-1 items-center justify-center">
<div class="flex flex-col items-center gap-4">
<AlertOctagon class="h-16 w-16 text-gray-400" stroke-width="1" />
<p class="text-center text-lg text-gray-500">
Looks like this tab doesn't exist <br />
Try switching to another tab
</p>
</div>
</div>
</RouterView>
</div>
</div>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/Workbook.vue
|
Vue
|
agpl-3.0
| 2,958
|
<script setup lang="ts">
import { computed, inject } from 'vue'
import ChartBuilder from '../charts/ChartBuilder.vue'
import { Workbook, workbookKey } from './workbook'
const props = defineProps<{ name?: string; index: number | string }>()
const workbook = inject(workbookKey) as Workbook
const activeChart = computed(() => workbook.doc.charts[Number(props.index)])
</script>
<template>
<ChartBuilder
v-if="activeChart"
:key="activeChart.name"
:chart="activeChart"
:queries="workbook.doc.queries"
/>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookChart.vue
|
Vue
|
agpl-3.0
| 525
|
<script setup lang="ts">
import { computed, inject } from 'vue'
import DashboardBuilder from '../dashboard/DashboardBuilder.vue'
import { Workbook, workbookKey } from './workbook'
const props = defineProps<{ name?: string; index: number | string }>()
const workbook = inject(workbookKey) as Workbook
const activeDashboard = computed(() => workbook.doc.dashboards[Number(props.index)])
</script>
<template>
<DashboardBuilder
v-if="activeDashboard"
:key="activeDashboard.name"
:dashboard="activeDashboard"
:charts="workbook.doc.charts"
:queries="workbook.doc.queries"
/>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookDashboard.vue
|
Vue
|
agpl-3.0
| 596
|
<script setup lang="tsx">
import { Avatar, Breadcrumbs, ListView } from 'frappe-ui'
import { PlusIcon, SearchIcon } from 'lucide-vue-next'
import { computed, ref, watchEffect } from 'vue'
import { useRouter } from 'vue-router'
import { getUniqueId } from '../helpers'
import { WorkbookListItem } from '../types/workbook.types'
import useUserStore from '../users/users'
import useWorkbooks from './workbooks'
const router = useRouter()
const workbookStore = useWorkbooks()
workbookStore.getWorkbooks()
const searchQuery = ref('')
const filteredWorkbooks = computed(() => {
if (!searchQuery.value) {
return workbookStore.workbooks
}
return workbookStore.workbooks.filter((workbook) =>
workbook.title.toLowerCase().includes(searchQuery.value.toLowerCase())
)
})
const userStore = useUserStore()
const listOptions = ref({
columns: [
{ label: 'Title', key: 'title' },
{
label: 'Owner',
key: 'owner',
getLabel(props: any) {
const workbook = props.row as WorkbookListItem
const user = userStore.getUser(workbook.owner)
return user?.full_name
},
prefix: (props: any) => {
const workbook = props.row as WorkbookListItem
const user = userStore.getUser(workbook.owner)
return <Avatar size="md" label={workbook.owner} image={user?.user_image} />
},
},
{ label: 'Created', key: 'created_from_now' },
{ label: 'Modified', key: 'modified_from_now' },
],
rows: filteredWorkbooks,
rowKey: 'name',
options: {
showTooltip: false,
getRowRoute: (workbook: WorkbookListItem) => ({
path: `/workbook/${workbook.name}`,
}),
emptyState: {
title: 'No workbooks.',
description: 'No workbooks to display.',
button: {
label: 'New Workbook',
variant: 'solid',
onClick: openNewWorkbook,
},
},
},
})
function openNewWorkbook() {
const unique_id = getUniqueId()
const name = `new-workbook-${unique_id}`
router.push(`/workbook/${name}`)
}
watchEffect(() => {
document.title = 'Workbooks | Insights'
})
</script>
<template>
<header class="mb-2 flex h-12 items-center justify-between border-b py-2.5 pl-5 pr-2">
<Breadcrumbs :items="[{ label: 'Workbooks', route: '/workbook' }]" />
<div class="flex items-center gap-2">
<Button label="New Workbook" variant="solid" @click="openNewWorkbook">
<template #prefix>
<PlusIcon class="w-4" />
</template>
</Button>
</div>
</header>
<div class="mb-4 flex h-full flex-col gap-2 overflow-auto px-4">
<div class="flex gap-2 overflow-visible py-1">
<FormControl placeholder="Search by Title" v-model="searchQuery" :debounce="300">
<template #prefix>
<SearchIcon class="h-4 w-4 text-gray-500" />
</template>
</FormControl>
</div>
<ListView class="h-full" v-bind="listOptions"> </ListView>
</div>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookList.vue
|
Vue
|
agpl-3.0
| 2,775
|
<script setup lang="ts">
import { inject, ref } from 'vue'
import { Workbook, workbookKey } from './workbook'
import WorkbookShareDialog from './WorkbookShareDialog.vue'
import { PanelRightClose, PanelRightOpen } from 'lucide-vue-next'
const workbook = inject(workbookKey) as Workbook
const showShareDialog = ref(false)
</script>
<template>
<div v-if="workbook" class="flex gap-2">
<Button
v-if="workbook.canShare && !workbook.isdirty && !workbook.islocal"
variant="outline"
@click="showShareDialog = true"
>
Share
</Button>
<Button
v-show="!workbook.islocal && workbook.isdirty"
variant="outline"
@click="workbook.discard()"
>
Discard
</Button>
<Button
v-show="workbook.islocal || workbook.isdirty"
variant="solid"
:loading="workbook.saving"
@click="workbook.save()"
>
Save
</Button>
<Dropdown
:button="{ icon: 'more-horizontal', variant: 'outline' }"
:options="[
{
label: workbook.showSidebar ? 'Hide Sidebar' : 'Show Sidebar',
icon: workbook.showSidebar ? PanelRightOpen : PanelRightClose,
onClick: () => (workbook.showSidebar = !workbook.showSidebar),
},
!workbook.islocal
? {
label: 'Delete',
icon: 'trash-2',
onClick: () => workbook.delete(),
}
: null,
]"
/>
</div>
<WorkbookShareDialog v-if="workbook.canShare && showShareDialog" v-model="showShareDialog" />
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookNavbarActions.vue
|
Vue
|
agpl-3.0
| 1,420
|
<script setup lang="ts">
import { computed, inject } from 'vue'
import QueryBuilder from '../query/QueryBuilder.vue'
import { Workbook, workbookKey } from './workbook'
const props = defineProps<{ name?: string; index: number | string }>()
const workbook = inject<Workbook>(workbookKey)
const activeQuery = computed(() => workbook?.doc.queries[Number(props.index)])
</script>
<template>
<QueryBuilder v-if="activeQuery" :key="activeQuery.name" :query="activeQuery" />
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookQuery.vue
|
Vue
|
agpl-3.0
| 483
|
<script setup lang="ts">
import { computed, inject, ref } from 'vue'
import UserSelector from '../components/UserSelector.vue'
import { createToast } from '../helpers/toasts'
import session from '../session'
import { ShareAccess, WorkbookSharePermission } from '../types/workbook.types'
import useUserStore, { User } from '../users/users'
import { Workbook, workbookKey } from './workbook'
const show = defineModel()
const userStore = useUserStore()
const selectedUserEmail = ref<string>('')
function shareWorkbook() {
if (!selectedUserEmail.value) return
permissionMap.value[selectedUserEmail.value] = 'view'
selectedUserEmail.value = ''
}
type PermissionMap = Record<string, ShareAccess>
const permissionMap = ref<PermissionMap>({})
const accessOptions = (user_email: string) => [
{ label: 'Can Edit', value: 'edit', onClick: () => (permissionMap.value[user_email] = 'edit') },
{ label: 'Can View', value: 'view', onClick: () => (permissionMap.value[user_email] = 'view') },
{
label: 'Remove',
value: 'remove',
onClick: () => (permissionMap.value[user_email] = undefined),
},
]
const workbook = inject(workbookKey) as Workbook
const workbookPermissions = ref<PermissionMap>({})
workbook.getSharePermissions().then((permissions) => {
permissions.forEach((p: any) => {
workbookPermissions.value[p.email] = p.access
permissionMap.value[p.email] = p.access
})
})
const userPermissions = computed(() => {
return Object.keys(permissionMap.value)
.map((email) => {
const user = userStore.users.find((u) => u.email === email)
if (!user) return null
return {
email,
full_name: user.full_name,
access: permissionMap.value[email],
}
})
.filter(Boolean) as WorkbookSharePermission[]
})
const saveDisabled = computed(() => {
return JSON.stringify(permissionMap.value) === JSON.stringify(workbookPermissions.value)
})
function updatePermissions() {
workbook.updateSharePermissions(userPermissions.value)
show.value = false
createToast({
title: 'Permissions updated',
variant: 'success',
})
}
</script>
<template>
<Dialog
v-model="show"
:options="{
title: 'Share Workbook',
actions: [
{
label: 'Save',
variant: 'solid',
disabled: saveDisabled,
onClick: updatePermissions,
},
],
}"
>
<template #body-content>
<div class="-mb-8 flex flex-col gap-3 text-base">
<div class="flex w-full gap-2">
<div class="flex-1">
<UserSelector
v-model="selectedUserEmail"
:hide-users="userPermissions.map((u) => u.email)"
/>
</div>
<Button
class="flex-shrink-0"
variant="solid"
label="Share"
:disabled="!selectedUserEmail"
@click="shareWorkbook"
></Button>
</div>
<div class="flex flex-col gap-1 overflow-y-auto">
<div
v-for="user in userPermissions.filter((u) => u.access)"
:key="user.email"
class="flex w-full items-center gap-2 py-1"
>
<Avatar
size="xl"
:label="user.full_name"
:image="userStore.getUser(user.email)?.user_image"
/>
<div class="flex flex-1 flex-col">
<div class="leading-5">{{ user.full_name }}</div>
<div class="text-xs text-gray-600">{{ user.email }}</div>
</div>
<Dropdown
v-if="user.email !== session.user.email"
class="flex-shrink-0"
placement="right"
:options="accessOptions(user.email)"
:button="{
iconRight: 'chevron-down',
variant: 'ghost',
label: user.access === 'edit' ? 'Can Edit' : 'Can View',
}"
/>
<Button
v-else
variant="ghost"
label="Owner"
disabled
class="flex-shrink-0"
/>
</div>
</div>
</div>
</template>
</Dialog>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookShareDialog.vue
|
Vue
|
agpl-3.0
| 3,766
|
<script setup lang="ts">
import { LayoutPanelTop, Table2 } from 'lucide-vue-next'
import { inject } from 'vue'
import ChartIcon from '../charts/components/ChartIcon.vue'
import WorkbookSidebarListSection from './WorkbookSidebarListSection.vue'
import { Workbook, workbookKey } from './workbook'
const workbook = inject(workbookKey) as Workbook
</script>
<template>
<div
v-if="workbook"
class="relative flex h-full w-[17rem] flex-shrink-0 flex-col overflow-y-auto bg-white shadow-sm"
>
<WorkbookSidebarListSection
v-bind="{
title: 'Queries',
emptyMessage: 'No queries',
items: workbook.doc.queries,
itemKey: 'name',
isActive: (idx: number) => workbook.isActiveTab('query', idx),
add: workbook.addQuery,
remove: (query) => workbook.removeQuery(query.name),
route: (idx: number) => `/workbook/${workbook.name}/query/${idx}`,
}"
>
<template #item-icon>
<Table2 class="h-4 w-4 text-gray-700" stroke-width="1.5" />
</template>
</WorkbookSidebarListSection>
<WorkbookSidebarListSection
v-bind="{
title: 'Charts',
emptyMessage: 'No charts',
items: workbook.doc.charts,
itemKey: 'name',
isActive: (idx: number) => workbook.isActiveTab('chart', idx),
add: workbook.addChart,
remove: (chart) => workbook.removeChart(chart.name),
route: (idx: number) => `/workbook/${workbook.name}/chart/${idx}`,
}"
>
<template #item-icon="{ item }">
<ChartIcon :chart-type="item.chart_type" />
</template>
</WorkbookSidebarListSection>
<WorkbookSidebarListSection
v-bind="{
title: 'Dashboards',
emptyMessage: 'No dashboards',
items: workbook.doc.dashboards,
itemKey: 'name',
isActive: (idx: number) => workbook.isActiveTab('dashboard', idx),
add: workbook.addDashboard,
remove: (dashboard) => workbook.removeDashboard(dashboard.name),
route: (idx: number) => `/workbook/${workbook.name}/dashboard/${idx}`,
}"
>
<template #item-icon>
<LayoutPanelTop class="h-4 w-4 text-gray-700" stroke-width="1.5" />
</template>
</WorkbookSidebarListSection>
</div>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookSidebar.vue
|
Vue
|
agpl-3.0
| 2,108
|
<script setup lang="ts">
import { Plus, X } from 'lucide-vue-next'
const section = defineProps<{
title: string
emptyMessage: string
items: any[]
itemKey: string
isActive: (idx: number) => boolean
add: () => void
remove: (item: any) => void
route: (idx: number) => string
}>()
function setDraggedItem(event: DragEvent, row: any) {
if (!event.dataTransfer) return
const data = JSON.stringify({ type: section.title, item: row })
event.dataTransfer.setData('text/plain', data)
}
</script>
<template>
<div class="flex flex-col border-b px-2.5 py-2">
<div class="mb-1 flex h-6 items-center justify-between">
<div class="flex items-center gap-1">
<div class="text-sm font-medium">{{ section.title }}</div>
</div>
<div>
<button
class="cursor-pointer rounded p-1 transition-colors hover:bg-gray-100"
@click="section.add()"
>
<Plus class="h-4 w-4 text-gray-700" stroke-width="1.5" />
</button>
</div>
</div>
<div
v-if="!section.items.length"
class="flex h-12 flex-col items-center justify-center rounded border border-dashed border-gray-300 py-2"
>
<div class="text-xs text-gray-500">{{ section.emptyMessage }}</div>
</div>
<div v-else class="flex flex-col">
<div
v-for="(row, idx) in section.items"
:key="row[section.itemKey]"
class="group w-full cursor-pointer rounded transition-all hover:bg-gray-50"
:class="section.isActive(idx) ? ' bg-gray-100' : ' hover:border-gray-300'"
draggable="true"
@dragstart="setDraggedItem($event, row)"
>
<router-link
:to="route(idx)"
class="flex h-7.5 items-center justify-between rounded pl-1.5 text-sm"
>
<div class="flex gap-1.5">
<slot name="item-icon" :item="row" />
<p>{{ row.title }}</p>
</div>
<button
class="invisible cursor-pointer rounded p-1 transition-all hover:bg-gray-100 group-hover:visible"
@click.prevent.stop="section.remove(row)"
>
<X class="h-4 w-4 text-gray-700" stroke-width="1.5" />
</button>
</router-link>
</div>
</div>
</div>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookSidebarListSection.vue
|
Vue
|
agpl-3.0
| 2,081
|
<script setup lang="ts">
import { LayoutPanelTop, Table2, XIcon } from 'lucide-vue-next'
import { inject } from 'vue'
import { useRouter } from 'vue-router'
import ChartIcon from '../charts/components/ChartIcon.vue'
import { workbookKey } from './workbook'
const router = useRouter()
const workbook = inject(workbookKey)
</script>
<template>
<div v-if="workbook" class="relative flex flex-shrink-0 items-center overflow-x-auto bg-white">
<div class="flex">
<button
v-for="(query, idx) in workbook.doc.queries"
:key="idx"
class="flex h-10 items-center border-y-2 border-r border-transparent border-r-gray-200 px-3 text-base transition-all"
:class="workbook.isActiveTab('query', idx) ? 'border-b-gray-800' : ''"
@click="router.push(`/workbook/${workbook.name}/query/${idx}`)"
>
<Table2 class="h-3.5 w-3.5 text-gray-700" stroke-width="1.5" />
<span class="ml-2">{{ query.title }}</span>
<XIcon
class="ml-2 h-3.5 w-3.5 cursor-pointer text-gray-500 transition-all hover:text-gray-800"
@click.prevent.stop="workbook.removeQuery(query.name)"
/>
</button>
<button
v-for="(chart, idx) in workbook.doc.charts"
:key="idx"
class="flex h-10 items-center border-y-2 border-r border-transparent border-r-gray-200 px-3 text-base transition-all"
:class="workbook.isActiveTab('chart', idx) ? 'border-b-gray-800' : ''"
@click="router.push(`/workbook/${workbook.name}/chart/${idx}`)"
>
<ChartIcon :chart-type="chart.chart_type" />
<span class="ml-2">{{ chart.title }}</span>
<XIcon
class="ml-2 h-3.5 w-3.5 cursor-pointer text-gray-500 transition-all hover:text-gray-800"
@click.prevent.stop="workbook.removeChart(chart.name)"
/>
</button>
<button
v-for="(dashboard, idx) in workbook.doc.dashboards"
:key="idx"
class="flex h-10 items-center border-y-2 border-r border-transparent border-r-gray-200 px-3 text-base transition-all"
:class="workbook.isActiveTab('dashboard', idx) ? 'border-b-gray-800' : ''"
@click="router.push(`/workbook/${workbook.name}/dashboard/${idx}`)"
>
<LayoutPanelTop class="h-3.5 w-3.5 text-gray-700" stroke-width="1.5" />
<span class="ml-2">{{ dashboard.title }}</span>
<XIcon
class="ml-2 h-3.5 w-3.5 cursor-pointer text-gray-500 transition-all hover:text-gray-800"
@click.prevent.stop="workbook.removeDashboard(dashboard.name)"
/>
</button>
</div>
<Dropdown
class="ml-1.5"
:options="[
{ label: 'New Query', onClick: workbook.addQuery },
{ label: 'New Chart', onClick: workbook.addChart },
{ label: 'New Dashboard', onClick: workbook.addDashboard },
]"
>
<Button variant="ghost" icon="plus"> </Button>
</Dropdown>
</div>
</template>
|
2302_79757062/insights
|
frontend/src2/workbook/WorkbookTabSwitcher.vue
|
Vue
|
agpl-3.0
| 2,739
|
import { call } from 'frappe-ui'
import { computed, InjectionKey, reactive, toRefs } from 'vue'
import { useRouter } from 'vue-router'
import useChart from '../charts/chart'
import useDashboard from '../dashboard/dashboard'
import { getUniqueId, safeJSONParse, wheneverChanges } from '../helpers'
import { confirmDialog } from '../helpers/confirm_dialog'
import useDocumentResource from '../helpers/resource'
import { createToast } from '../helpers/toasts'
import useQuery, { getCachedQuery } from '../query/query'
import session from '../session'
import { Join, Source } from '../types/query.types'
import type {
InsightsWorkbook,
WorkbookChart,
WorkbookSharePermission,
} from '../types/workbook.types'
export default function useWorkbook(name: string) {
const workbook = getWorkbookResource(name)
const router = useRouter()
workbook.onAfterInsert(() => {
window.location.href = window.location.href.replace(name, workbook.doc.name)
})
workbook.onAfterSave(() => createToast({ title: 'Saved', variant: 'success' }))
wheneverChanges(
() => workbook.doc,
() => {
// load & cache queries, charts and dashboards
// fix: dicarding workbook changes doesn't reset the query/chart/dashboard doc
// this is because, when the workbook doc is updated,
// the reference to the workbook.doc.queries/charts/dashboards is lost
// so we need to update the references to the new queries/charts/dashboards
workbook.doc.queries.forEach((q) => (useQuery(q).doc = q))
workbook.doc.charts.forEach((c) => (useChart(c).doc = c))
workbook.doc.dashboards.forEach((d) => (useDashboard(d).doc = d))
}
)
function setActiveTab(type: 'query' | 'chart' | 'dashboard' | '', idx: number) {
router.replace(
type ? `/workbook/${workbook.name}/${type}/${idx}` : `/workbook/${workbook.name}`
)
}
function isActiveTab(type: 'query' | 'chart' | 'dashboard', idx: number) {
const url = router.currentRoute.value.path
const regex = new RegExp(`/workbook/${workbook.name}/${type}/${idx}`)
return regex.test(url)
}
function addQuery() {
const idx = workbook.doc.queries.length
workbook.doc.queries.push({
name: getUniqueId(),
title: `Query ${idx + 1}`,
use_live_connection: true,
operations: [],
})
setActiveTab('query', idx)
}
function removeQuery(queryName: string) {
function _remove() {
const idx = workbook.doc.queries.findIndex((row) => row.name === queryName)
if (idx === -1) return
workbook.doc.queries.splice(idx, 1)
if (isActiveTab('query', idx)) {
setActiveTab('', 0)
}
}
confirmDialog({
title: 'Delete Query',
message: 'Are you sure you want to delete this query?',
onSuccess: _remove,
})
}
function addChart() {
const idx = workbook.doc.charts.length
workbook.doc.charts.push({
name: getUniqueId(),
title: `Chart ${idx + 1}`,
query: '',
chart_type: 'Line',
public: false,
config: {} as WorkbookChart['config'],
})
setActiveTab('chart', idx)
}
function removeChart(chartName: string) {
function _remove() {
const idx = workbook.doc.charts.findIndex((row) => row.name === chartName)
if (idx === -1) return
workbook.doc.charts.splice(idx, 1)
if (isActiveTab('chart', idx)) {
setActiveTab('', 0)
}
}
confirmDialog({
title: 'Delete Chart',
message: 'Are you sure you want to delete this chart?',
onSuccess: _remove,
})
}
function addDashboard() {
const idx = workbook.doc.dashboards.length
workbook.doc.dashboards.push({
name: getUniqueId(),
title: `Dashboard ${idx + 1}`,
items: [],
})
setActiveTab('dashboard', idx)
}
function removeDashboard(dashboardName: string) {
function _remove() {
const idx = workbook.doc.dashboards.findIndex((row) => row.name === dashboardName)
if (idx === -1) return
workbook.doc.dashboards.splice(idx, 1)
if (isActiveTab('dashboard', idx)) {
setActiveTab('', 0)
}
}
confirmDialog({
title: 'Delete Dashboard',
message: 'Are you sure you want to delete this dashboard?',
onSuccess: _remove,
})
}
const isOwner = computed(() => workbook.doc.owner === session.user?.email)
const canShare = computed(() => isOwner.value)
async function getSharePermissions(): Promise<WorkbookSharePermission[]> {
const method = 'insights.api.workbooks.get_share_permissions'
return call(method, { workbook_name: workbook.name }).then((permissions: any) => {
return permissions.map((p: any) => {
return {
email: p.user,
full_name: p.full_name,
access: p.read ? (p.write ? 'edit' : 'view') : undefined,
}
})
})
}
async function updateSharePermissions(permissions: WorkbookSharePermission[]) {
const method = 'insights.api.workbooks.update_share_permissions'
return call(method, {
workbook_name: workbook.name,
permissions: permissions.map((p) => {
return {
user: p.email,
read: p.access === 'view',
write: p.access === 'edit',
}
}),
})
}
function deleteWorkbook() {
confirmDialog({
title: 'Delete Workbook',
message: 'Are you sure you want to delete this workbook?',
theme: 'red',
onSuccess: () => {
workbook.delete().then(() => {
router.replace('/workbook')
})
},
})
}
function getLinkedQueries(query_name: string): string[] {
const query = getCachedQuery(query_name)
if (!query) {
console.error(`Query ${query_name} not found`)
return []
}
const querySource = query.doc.operations.find(
(op) => op.type === 'source' && op.table.type === 'query' && op.table.query_name
) as Source
const queryJoins = query.doc.operations.filter(
(op) => op.type === 'join' && op.table.type === 'query' && op.table.query_name
) as Join[]
const linkedQueries = [] as string[]
if (querySource && querySource.table.type === 'query') {
linkedQueries.push(querySource.table.query_name)
}
if (queryJoins.length) {
queryJoins.forEach((j) => {
if (j.table.type === 'query') {
linkedQueries.push(j.table.query_name)
}
})
}
const linkedQueriesByQuery = {} as Record<string, string[]>
linkedQueries.forEach((q) => {
linkedQueriesByQuery[q] = getLinkedQueries(q)
})
Object.values(linkedQueriesByQuery).forEach((subLinkedQueries) => {
linkedQueries.concat(subLinkedQueries)
})
return linkedQueries
}
return reactive({
...toRefs(workbook),
canShare,
isOwner,
showSidebar: true,
isActiveTab,
addQuery,
removeQuery,
addChart,
removeChart,
addDashboard,
removeDashboard,
getSharePermissions,
updateSharePermissions,
getLinkedQueries,
delete: deleteWorkbook,
})
}
export type Workbook = ReturnType<typeof useWorkbook>
export const workbookKey = Symbol() as InjectionKey<Workbook>
function getWorkbookResource(name: string) {
const doctype = 'Insights Workbook'
const workbook = useDocumentResource<InsightsWorkbook>(doctype, name, {
initialDoc: {
doctype,
name,
owner: '',
title: '',
queries: [],
charts: [],
dashboards: [],
},
transform(doc) {
doc.queries = safeJSONParse(doc.queries) || []
doc.charts = safeJSONParse(doc.charts) || []
doc.dashboards = safeJSONParse(doc.dashboards) || []
doc.charts.forEach((chart) => {
chart.config.filters = chart.config.filters?.filters?.length
? chart.config.filters
: {
filters: [],
logical_operator: 'And',
}
chart.config.order_by = chart.config.order_by || []
})
return doc
},
})
return workbook
}
|
2302_79757062/insights
|
frontend/src2/workbook/workbook.ts
|
TypeScript
|
agpl-3.0
| 7,469
|
import { useTimeAgo } from '@vueuse/core'
import { call } from 'frappe-ui'
import { reactive, ref } from 'vue'
import { WorkbookListItem } from '../types/workbook.types'
const workbooks = ref<WorkbookListItem[]>([])
const loading = ref(false)
async function getWorkbooks() {
loading.value = true
workbooks.value = await call('insights.api.workbooks.get_workbooks')
workbooks.value = workbooks.value.map((workbook: any) => ({
...workbook,
created_from_now: useTimeAgo(workbook.creation),
modified_from_now: useTimeAgo(workbook.modified),
}))
loading.value = false
return workbooks.value
}
export default function useWorkbookListItemStore() {
if (!workbooks.value.length) {
getWorkbooks()
}
return reactive({
workbooks,
loading,
getWorkbooks,
})
}
|
2302_79757062/insights
|
frontend/src2/workbook/workbooks.ts
|
TypeScript
|
agpl-3.0
| 774
|
const plugin = require('tailwindcss/plugin')
module.exports = {
presets: [require('frappe-ui/src/utils/tailwind.config')],
content: [
'./index.html',
'./src/**/*.{vue,js,ts,jsx,tsx}',
'./src2/**/*.{vue,js,ts,jsx,tsx}',
'./node_modules/frappe-ui/src/components/**/*.{vue,js,ts,jsx,tsx}',
'../node_modules/frappe-ui/src/components/**/*.{vue,js,ts,jsx,tsx}',
],
theme: {
container: {
center: true,
padding: {
DEFAULT: '1rem',
sm: '2rem',
lg: '2rem',
xl: '4rem',
'2xl': '4rem',
},
},
extend: {
maxWidth: {
'main-content': '768px',
},
screens: {
standalone: {
raw: '(display-mode: standalone)',
},
},
},
},
plugins: [
plugin(function ({ addUtilities }) {
addUtilities({
'.scrollbar-hide': {
/* IE and Edge */
'-ms-overflow-style': 'none',
/* Firefox */
'scrollbar-width': 'none',
/* Safari and Chrome */
'&::-webkit-scrollbar': {
display: 'none',
},
},
})
}),
],
}
|
2302_79757062/insights
|
frontend/tailwind.config.js
|
JavaScript
|
agpl-3.0
| 1,005
|
import vue from '@vitejs/plugin-vue'
import vueJsx from '@vitejs/plugin-vue-jsx'
import frappeui from 'frappe-ui/vite'
import path from 'path'
import { defineConfig } from 'vite'
export default defineConfig({
plugins: [frappeui(), vue(), vueJsx()],
esbuild: { loader: 'tsx' },
resolve: {
alias: {
'@': path.resolve(__dirname, 'src'),
},
},
build: {
outDir: `../insights/public/frontend`,
emptyOutDir: true,
target: 'es2015',
sourcemap: true,
rollupOptions: {
input: {
main: path.resolve(__dirname, 'index.html'),
insights_v2: path.resolve(__dirname, 'index_v2.html'),
},
output: {
manualChunks: {
'frappe-ui': ['frappe-ui'],
},
},
},
},
optimizeDeps: {
include: ['feather-icons', 'showdown', 'engine.io-client'],
},
define: {
// enable hydration mismatch details in production build
__VUE_PROD_HYDRATION_MISMATCH_DETAILS__: 'true',
},
})
|
2302_79757062/insights
|
frontend/vite.config.js
|
JavaScript
|
agpl-3.0
| 907
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
__version__ = "3.0.0"
def notify(*args, **kwargs):
import frappe
if len(args) == 1:
kwargs["message"] = args[0]
frappe.publish_realtime(
event="insights_notification",
user=frappe.session.user,
message={
"message": kwargs.get("message"),
"title": kwargs.get("title"),
"type": kwargs.get("type", "success"),
"user": frappe.session.user,
},
)
|
2302_79757062/insights
|
insights/__init__.py
|
Python
|
agpl-3.0
| 570
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
import ibis
from frappe.defaults import get_user_default, set_user_default
from frappe.integrations.utils import make_post_request
from frappe.rate_limiter import rate_limit
from insights.decorators import insights_whitelist, validate_type
from insights.insights.doctype.insights_data_source_v3.ibis_utils import (
get_columns_from_schema,
)
from insights.insights.doctype.insights_table_v3.insights_table_v3 import (
InsightsTablev3,
)
from insights.insights.doctype.insights_team.insights_team import (
check_data_source_permission,
)
@insights_whitelist()
def get_app_version():
return frappe.get_attr("insights" + ".__version__")
@insights_whitelist()
def get_user_info():
is_admin = frappe.db.exists(
"Has Role",
{
"parenttype": "User",
"parent": frappe.session.user,
"role": ["in", ("Insights Admin")],
},
)
is_user = frappe.db.exists(
"Has Role",
{
"parenttype": "User",
"parent": frappe.session.user,
"role": ["in", ("Insights User")],
},
)
user = frappe.db.get_value(
"User", frappe.session.user, ["first_name", "last_name"], as_dict=1
)
return {
"email": frappe.session.user,
"first_name": user.get("first_name"),
"last_name": user.get("last_name"),
"is_admin": is_admin or frappe.session.user == "Administrator",
"is_user": is_user or frappe.session.user == "Administrator",
# TODO: move to `get_session_info` since not user specific
"country": frappe.db.get_single_value("System Settings", "country"),
"locale": frappe.db.get_single_value("System Settings", "language"),
"is_v2_user": frappe.db.count("Insights Query") > 0,
"default_version": get_user_default(
"insights_default_version", frappe.session.user
),
}
@insights_whitelist()
def update_default_version(version):
if get_user_default("insights_has_visited_v3", frappe.session.user) != "1":
set_user_default("insights_has_visited_v3", "1", frappe.session.user)
set_user_default("insights_default_version", version, frappe.session.user)
@frappe.whitelist()
@rate_limit(limit=10, seconds=60 * 60)
def contact_team(message_type, message_content, is_critical=False):
if not message_type or not message_content:
frappe.throw("Message Type and Content are required")
message_title = {
"Feedback": "Feedback from Insights User",
"Bug": "Bug Report from Insights User",
"Question": "Question from Insights User",
}.get(message_type)
if not message_title:
frappe.throw("Invalid Message Type")
try:
make_post_request(
"https://frappeinsights.com/api/method/contact-team",
data={
"message_title": message_title,
"message_content": message_content,
},
)
except Exception as e:
frappe.log_error(e)
frappe.throw("Something went wrong. Please try again later.")
def get_csv_file(filename: str):
file = frappe.get_doc("File", filename)
parts = file.get_extension()
if "csv" not in parts[1]:
frappe.throw("Only CSV files are supported")
return file
@insights_whitelist()
@validate_type
def get_csv_data(filename: str):
check_data_source_permission("uploads")
file = get_csv_file(filename)
file_path = file.get_full_path()
file_name = file.file_name.split(".")[0]
file_name = frappe.scrub(file_name)
table = ibis.read_csv(file_path, table_name=file_name)
count = table.count().execute().item()
columns = get_columns_from_schema(table.schema())
rows = table.head(50).execute().to_dict(orient="records")
return {
"tablename": file_name,
"rows": rows,
"columns": columns,
"total_rows": count,
}
@insights_whitelist()
@validate_type
def import_csv_data(filename: str):
check_data_source_permission("uploads")
file = get_csv_file(filename)
file_path = file.get_full_path()
table_name = file.file_name.split(".")[0]
table_name = frappe.scrub(table_name)
if not frappe.db.exists("Insights Data Source v3", "uploads"):
uploads = frappe.new_doc("Insights Data Source v3")
uploads.name = "uploads"
uploads.title = "Uploads"
uploads.database_type = "DuckDB"
uploads.database_name = "insights_file_uploads"
uploads.save(ignore_permissions=True)
ds = frappe.get_doc("Insights Data Source v3", "uploads")
db = ds._get_ibis_backend()
table = db.read_csv(file_path, table_name=table_name)
db.create_table(table_name, table, overwrite=True)
InsightsTablev3.create(ds.name, table_name)
|
2302_79757062/insights
|
insights/api/__init__.py
|
Python
|
agpl-3.0
| 4,931
|
import frappe
from insights.decorators import insights_whitelist
@insights_whitelist()
def create_alert(alert):
alert = frappe._dict(alert)
alert_doc = frappe.new_doc("Insights Alert")
alert_doc.update(alert)
alert_doc.save()
return alert_doc
@insights_whitelist()
def test_alert(alert):
alert_doc = frappe.new_doc("Insights Alert")
alert_doc.update(alert)
should_send = alert_doc.evaluate_condition()
if should_send:
alert_doc.send_alert()
return True
return False
|
2302_79757062/insights
|
insights/api/alerts.py
|
Python
|
agpl-3.0
| 527
|
import frappe
from insights.api.permissions import is_private
from insights.decorators import insights_whitelist
from insights.insights.doctype.insights_team.insights_team import (
get_allowed_resources_for_user,
get_permission_filter,
)
@insights_whitelist()
def get_dashboard_list():
dashboards = frappe.get_list(
"Insights Dashboard",
filters={**get_permission_filter("Insights Dashboard")},
fields=["name", "title", "modified", "_liked_by"],
)
for dashboard in dashboards:
if dashboard._liked_by:
dashboard["is_favourite"] = frappe.session.user in frappe.as_json(
dashboard._liked_by
)
dashboard["charts"] = frappe.get_all(
"Insights Dashboard Item",
filters={
"parent": dashboard.name,
"item_type": ["not in", ["Text", "Filter"]],
},
pluck="parent",
)
dashboard["charts_count"] = len(dashboard["charts"])
dashboard["view_count"] = frappe.db.count(
"View Log",
filters={
"reference_doctype": "Insights Dashboard",
"reference_name": dashboard.name,
},
)
dashboard["is_private"] = is_private("Insights Dashboard", dashboard.name)
return dashboards
@insights_whitelist()
def create_dashboard(title):
dashboard = frappe.get_doc({"doctype": "Insights Dashboard", "title": title})
dashboard.insert()
return {
"name": dashboard.name,
"title": dashboard.title,
}
@insights_whitelist()
def get_dashboard_options(chart):
allowed_dashboards = get_allowed_resources_for_user("Insights Dashboard")
if not allowed_dashboards:
return []
# find all dashboards that don't have the chart within the allowed dashboards
Dashboard = frappe.qb.DocType("Insights Dashboard")
DashboardItem = frappe.qb.DocType("Insights Dashboard Item")
return (
frappe.qb.from_(Dashboard)
.left_join(DashboardItem)
.on(Dashboard.name == DashboardItem.parent)
.select(Dashboard.name.as_("value"), Dashboard.title.as_("label"))
.where(Dashboard.name.isin(allowed_dashboards) & (DashboardItem.chart != chart))
.groupby(Dashboard.name)
.run(as_dict=True)
)
@insights_whitelist()
def add_chart_to_dashboard(dashboard, chart):
dashboard = frappe.get_doc("Insights Dashboard", dashboard)
dashboard.add_chart(chart)
dashboard.save()
|
2302_79757062/insights
|
insights/api/dashboards.py
|
Python
|
agpl-3.0
| 2,530
|
import frappe
from frappe.utils.caching import redis_cache, site_cache
from insights import notify
from insights.decorators import insights_whitelist, validate_type
from insights.insights.doctype.insights_data_source_v3.ibis_utils import (
to_insights_type,
)
from insights.insights.doctype.insights_query.utils import infer_type_from_list
from insights.insights.doctype.insights_table_link_v3.insights_table_link_v3 import (
InsightsTableLinkv3,
)
from insights.insights.doctype.insights_team.insights_team import (
check_data_source_permission,
check_table_permission,
get_permission_filter,
)
from insights.utils import InsightsTable, detect_encoding
@insights_whitelist()
def get_data_sources():
return frappe.get_list(
"Insights Data Source",
filters={
"status": "Active",
**get_permission_filter("Insights Data Source"),
},
fields=[
"name",
"title",
"status",
"database_type",
"creation",
"is_site_db",
],
order_by="creation desc",
)
@insights_whitelist()
def get_table_columns(data_source, table):
check_table_permission(data_source, table)
doc = frappe.get_doc(
"Insights Table",
{
"data_source": data_source,
"table": table,
},
)
return {"columns": doc.columns}
@insights_whitelist()
def get_table_name(data_source, table):
check_table_permission(data_source, table)
return frappe.get_value(
"Insights Table", {"data_source": data_source, "table": table}, "name"
)
@insights_whitelist()
def get_tables(data_source=None, with_query_tables=False):
if not data_source:
return []
check_data_source_permission(data_source)
filters = {
"hidden": 0,
"data_source": data_source,
**get_permission_filter("Insights Table"),
}
if not with_query_tables:
filters["is_query_based"] = 0
return frappe.get_list(
"Insights Table",
filters=filters,
fields=["name", "table", "label", "is_query_based"],
order_by="is_query_based asc, label asc",
)
@insights_whitelist()
def create_table_link(
data_source, primary_table, foreign_table, primary_key, foreign_key
):
check_table_permission(data_source, primary_table.get("value"))
check_table_permission(data_source, foreign_table.get("value"))
primary = frappe.get_doc(
"Insights Table",
{
"data_source": data_source,
"table": primary_table.get("table"),
},
)
link = {
"primary_key": primary_key,
"foreign_key": foreign_key,
"foreign_table": foreign_table.get("table"),
"foreign_table_label": foreign_table.get("label"),
}
if not primary.get("table_links", link):
primary.append("table_links", link)
primary.save()
foreign = frappe.get_doc(
"Insights Table",
{
"data_source": data_source,
"table": foreign_table.get("table"),
},
)
link = {
"primary_key": foreign_key,
"foreign_key": primary_key,
"foreign_table": primary_table.get("table"),
"foreign_table_label": primary_table.get("label"),
}
if not foreign.get("table_links", link):
foreign.append("table_links", link)
foreign.save()
@insights_whitelist()
def get_columns_from_uploaded_file(filename):
import pandas as pd
file = frappe.get_doc("File", filename)
parts = file.get_extension()
if "csv" not in parts[1]:
frappe.throw("Only CSV files are supported")
file_path = file.get_full_path()
encoding = detect_encoding(file_path)
df = pd.read_csv(file_path, encoding=encoding)
columns = df.columns.tolist()
columns_with_types = []
for column in columns:
column_type = infer_type_from_list(df[column].dropna().head(1000).tolist())
columns_with_types.append({"label": column, "type": column_type})
return columns_with_types
def create_data_source_for_csv():
if not frappe.db.exists("Insights Data Source", {"title": "File Uploads"}):
data_source = frappe.new_doc("Insights Data Source")
data_source.database_type = "SQLite"
data_source.database_name = "file_uploads"
data_source.title = "File Uploads"
data_source.allow_imports = 1
data_source.insert(ignore_permissions=True)
@insights_whitelist()
def import_csv(table_label, table_name, filename, if_exists, columns, data_source):
create_data_source_for_csv()
table_import = frappe.new_doc("Insights Table Import")
table_import.data_source = data_source
table_import.table_label = table_label
table_import.table_name = table_name
table_import.if_exists = if_exists
table_import.source = frappe.get_doc("File", filename).file_url
table_import.save()
table_import.columns = []
for column in columns:
table_import.append(
"columns",
{
"column": column.get("name"),
"label": column.get("label"),
"type": column.get("type"),
},
)
table_import.submit()
notify(
**{
"title": "Success",
"message": "Table Imported",
"type": "success",
}
)
@insights_whitelist()
def delete_data_source(data_source):
try:
frappe.delete_doc("Insights Data Source", data_source)
notify(
**{
"title": "Success",
"message": "Data Source Deleted",
"type": "success",
}
)
except frappe.LinkExistsError:
notify(
**{
"type": "error",
"title": "Cannot delete Data Source",
"message": "Data Source is linked to a Query or Dashboard",
}
)
except Exception as e:
notify(
**{
"type": "error",
"title": "Error",
"message": e,
}
)
@insights_whitelist()
@redis_cache()
def fetch_column_values(data_source, table, column, search_text=None):
if not data_source or not isinstance(data_source, str):
frappe.throw("Data Source is required")
if not table or not isinstance(table, str):
frappe.throw("Table is required")
if not column or not isinstance(column, str):
frappe.throw("Column is required")
doc = frappe.get_doc("Insights Data Source", data_source)
return doc.get_column_options(table, column, search_text)
@insights_whitelist()
def get_relation(data_source, table_one, table_two):
table_one_doc = InsightsTable.get_doc(
{"data_source": data_source, "table": table_one}
)
if not table_one_doc:
frappe.throw(f"Table {table_one} not found")
table_two_doc = InsightsTable.get_doc(
{"data_source": data_source, "table": table_two}
)
if not table_two_doc:
frappe.throw(f"Table {table_two} not found")
if relation := table_one_doc.get({"foreign_table": table_two}):
return {
"primary_table": table_one,
"primary_table_label": table_one_doc.label,
"primary_column": relation[0].primary_key,
"foreign_table": table_two,
"foreign_column": relation[0].foreign_key,
"foreign_table_label": table_two_doc.label,
"cardinality": relation[0].cardinality,
}
if relation := table_two_doc.get({"foreign_table": table_one}):
reverse_cardinality = get_reverse_cardinality(relation[0].cardinality)
return {
"primary_table": table_one,
"primary_table_label": table_one_doc.label,
"primary_column": relation[0].foreign_key,
"foreign_table": table_two,
"foreign_column": relation[0].primary_key,
"foreign_table_label": table_two_doc.label,
"cardinality": reverse_cardinality,
}
def get_reverse_cardinality(cardinality):
if cardinality == "1:N":
return "N:1"
if cardinality == "N:1":
return "1:N"
return cardinality
# v3 APIs
@insights_whitelist()
def get_all_data_sources():
return frappe.get_list(
"Insights Data Source v3",
filters={"status": "Active"},
fields=[
"name",
"status",
"title",
"owner",
"creation",
"modified",
"database_type",
],
)
@insights_whitelist()
@validate_type
def get_data_source_tables(data_source=None, search_term=None, limit=100):
tables = frappe.get_list(
"Insights Table v3",
filters={
"data_source": data_source or ["is", "set"],
},
or_filters={
"label": ["is", "set"] if not search_term else ["like", f"%{search_term}%"],
"table": ["is", "set"] if not search_term else ["like", f"%{search_term}%"],
},
fields=["name", "table", "label", "data_source", "last_synced_on"],
limit=limit,
)
ret = []
for table in tables:
ret.append(
frappe._dict(
{
"label": table.label,
"table_name": table.table,
"data_source": table.data_source,
"last_synced_on": table.last_synced_on,
}
)
)
return ret
@insights_whitelist()
@site_cache
@validate_type
def get_data_source_table_columns(data_source: str, table_name: str):
check_table_permission(data_source, table_name)
ds = frappe.get_doc("Insights Data Source v3", data_source)
db = ds._get_ibis_backend()
table = db.table(table_name)
return [
frappe._dict(
column=column,
label=column,
type=to_insights_type(datatype),
)
for column, datatype in table.schema().items()
]
@insights_whitelist()
@validate_type
def update_data_source_tables(data_source: str):
check_data_source_permission(data_source)
ds = frappe.get_doc("Insights Data Source v3", data_source)
ds.update_table_list()
@insights_whitelist()
@validate_type
def get_table_links(data_source: str, left_table: str, right_table: str):
check_table_permission(data_source, left_table)
return InsightsTableLinkv3.get_links(data_source, left_table, right_table)
def make_data_source(data_source):
data_source = frappe._dict(data_source)
ds = frappe.new_doc("Insights Data Source v3")
ds.database_type = data_source.database_type
ds.title = data_source.title
ds.host = data_source.host
ds.port = data_source.port
ds.username = data_source.username
ds.password = data_source.password
ds.database_name = data_source.database_name
ds.use_ssl = data_source.use_ssl
ds.connection_string = data_source.connection_string
return ds
@insights_whitelist()
def test_connection(data_source):
frappe.only_for("Insights Admin")
ds = make_data_source(data_source)
return ds.test_connection(raise_exception=True)
@insights_whitelist()
def create_data_source(data_source):
frappe.only_for("Insights Admin")
ds = make_data_source(data_source)
ds.save()
return ds.name
|
2302_79757062/insights
|
insights/api/data_sources.py
|
Python
|
agpl-3.0
| 11,420
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from pypika.functions import Max
from insights.decorators import insights_whitelist
@insights_whitelist()
def create_last_viewed_log(record_type, record_name):
recordToDoctype = {
"Query": "Insights Query",
"Dashboard": "Insights Dashboard",
"NotebookPage": "Insights Notebook Page",
}
try:
doc = frappe.get_doc(recordToDoctype[record_type], record_name)
doc.add_viewed(force=True)
except Exception:
pass
@insights_whitelist()
def get_last_viewed_records():
ViewLog = frappe.qb.DocType("View Log")
TRACKED_DOCTYPES = [
"Insights Query",
"Insights Dashboard",
"Insights Notebook Page",
]
records = (
frappe.qb.from_(ViewLog)
.select(
ViewLog.reference_doctype,
ViewLog.reference_name,
Max(ViewLog.modified).as_("creation"),
)
.where(
(ViewLog.viewed_by == frappe.session.user)
& ViewLog.reference_doctype.isin(TRACKED_DOCTYPES)
)
.groupby(ViewLog.reference_doctype, ViewLog.reference_name)
.orderby(Max(ViewLog.modified).as_("creation"), order=frappe.qb.desc)
.limit(20)
.run(as_dict=True)
)
fetch_titles(records)
fetch_notebook_names(records)
return records
def fetch_titles(records):
docnames_by_doctype = {}
for record in records:
docnames_by_doctype.setdefault(record.reference_doctype, []).append(
record.reference_name
)
for doctype, docnames in docnames_by_doctype.items():
titles = frappe.get_all(
doctype,
filters={"name": ["in", docnames]},
fields=["name", "title"],
)
for title in titles:
for record in records:
if (
record.reference_doctype == doctype
and record.reference_name == title.name
):
record["title"] = title.title
break
def fetch_notebook_names(records):
docnames_by_doctype = {}
for record in records:
if record.reference_doctype == "Insights Notebook Page":
docnames_by_doctype.setdefault(record.reference_doctype, []).append(
record.reference_name
)
for doctype, docnames in docnames_by_doctype.items():
notebooks = frappe.get_all(
"Insights Notebook Page",
filters={"name": ["in", docnames]},
fields=["name", "notebook"],
)
for notebook in notebooks:
for record in records:
if (
record.reference_doctype == doctype
and record.reference_name == notebook.name
):
record["notebook"] = notebook.notebook
break
|
2302_79757062/insights
|
insights/api/home.py
|
Python
|
agpl-3.0
| 2,999
|
import frappe
@frappe.whitelist()
def get_notebooks():
# TODO: Add permission check
return frappe.get_list(
"Insights Notebook",
fields=["name", "title", "creation", "modified"],
order_by="creation desc",
)
@frappe.whitelist()
def create_notebook(title):
notebook = frappe.new_doc("Insights Notebook")
notebook.title = title
notebook.save()
return notebook.name
@frappe.whitelist()
def create_notebook_page(notebook):
notebook_page = frappe.new_doc("Insights Notebook Page")
notebook_page.notebook = notebook
notebook_page.title = "Untitled"
notebook_page.save()
return notebook_page.name
@frappe.whitelist()
def get_notebook_pages(notebook):
return frappe.get_list(
"Insights Notebook Page",
filters={"notebook": notebook},
fields=["name", "title", "creation", "modified"],
order_by="creation desc",
)
|
2302_79757062/insights
|
insights/api/notebooks.py
|
Python
|
agpl-3.0
| 923
|
import frappe
from frappe.query_builder.functions import Count
from insights.decorators import insights_whitelist
@insights_whitelist()
def get_resource_access_info(resource_type, resource_name):
# returns a list of authorized and unauthorized teams for a resource
InsightsTeam = frappe.qb.DocType("Insights Team")
InsightsTeamMember = frappe.qb.DocType("Insights Team Member")
InsightsResourcePermission = frappe.qb.DocType("Insights Resource Permission")
authorized_teams = (
frappe.qb.from_(InsightsTeam)
.join(InsightsTeamMember)
.on(InsightsTeam.name == InsightsTeamMember.parent)
.join(InsightsResourcePermission)
.on(InsightsTeam.name == InsightsResourcePermission.parent)
.where(
(InsightsResourcePermission.resource_type == resource_type)
& (InsightsResourcePermission.resource_name == resource_name)
)
.select(
InsightsTeam.name,
InsightsTeam.team_name,
Count(InsightsTeamMember.user).as_("members_count"),
)
.groupby(InsightsTeam.name)
.run(as_dict=True)
)
unauthorized_teams = (
frappe.qb.from_(InsightsTeam)
.left_join(InsightsTeamMember)
.on(InsightsTeam.name == InsightsTeamMember.parent)
.where(
~(
InsightsTeam.name.isin(
frappe.qb.from_(InsightsResourcePermission)
.where(
(InsightsResourcePermission.resource_type == resource_type)
& (InsightsResourcePermission.resource_name == resource_name)
)
.select(InsightsResourcePermission.parent)
)
)
)
.select(
InsightsTeam.name,
InsightsTeam.team_name,
Count(InsightsTeamMember.user).as_("members_count"),
)
.groupby(InsightsTeam.name)
.run(as_dict=True)
)
return {
"authorized_teams": authorized_teams,
"unauthorized_teams": unauthorized_teams,
}
@insights_whitelist()
def grant_access(resource_type, resource_name, team):
if (
frappe.db.get_value(resource_type, resource_name, "owner")
== frappe.session.user
):
team_doc = frappe.get_doc("Insights Team", team)
team_doc.append(
"team_permissions",
{
"resource_type": resource_type,
"resource_name": resource_name,
},
)
team_doc.save(ignore_permissions=True)
else:
frappe.throw(
"You are not authorized to grant access to this resource.",
frappe.PermissionError,
)
@insights_whitelist()
def revoke_access(resource_type, resource_name, team):
if (
frappe.db.get_value(resource_type, resource_name, "owner")
== frappe.session.user
):
team_doc = frappe.get_doc("Insights Team", team)
for permission in team_doc.team_permissions:
if (
permission.resource_type == resource_type
and permission.resource_name == resource_name
):
team_doc.remove(permission)
team_doc.save(ignore_permissions=True)
def is_private(resource_type, resource_name):
if not frappe.db.get_single_value("Insights Settings", "enable_permissions"):
return False
return bool(
get_resource_access_info(resource_type, resource_name).get("authorized_teams")
)
|
2302_79757062/insights
|
insights/api/permissions.py
|
Python
|
agpl-3.0
| 3,559
|
import frappe
from frappe.utils.caching import redis_cache
from insights.api.data_sources import fetch_column_values
from insights.decorators import insights_whitelist
@insights_whitelist()
def get_public_key(resource_type, resource_name):
from insights.insights.doctype.insights_chart.insights_chart import (
get_chart_public_key,
)
from insights.insights.doctype.insights_dashboard.insights_dashboard import (
get_dashboard_public_key,
)
if resource_type == "Insights Dashboard":
return get_dashboard_public_key(resource_name)
if resource_type == "Insights Chart":
return get_chart_public_key(resource_name)
@frappe.whitelist(allow_guest=True)
def get_public_dashboard(public_key):
if not public_key or not isinstance(public_key, str):
frappe.throw("Public Key is required")
dashboard_name = frappe.db.exists(
"Insights Dashboard", {"public_key": public_key, "is_public": 1}
)
if not dashboard_name:
frappe.throw("Invalid Public Key")
return frappe.get_cached_doc("Insights Dashboard", dashboard_name).as_dict(
no_default_fields=True
)
@frappe.whitelist(allow_guest=True)
def get_public_chart(public_key):
if not public_key or not isinstance(public_key, str):
frappe.throw("Public Key is required")
chart_name = frappe.db.exists(
"Insights Chart", {"public_key": public_key, "is_public": 1}
)
if not chart_name:
frappe.throw("Invalid Public Key")
chart = frappe.get_cached_doc("Insights Chart", chart_name).as_dict(
no_default_fields=True
)
chart_data = frappe.get_cached_doc("Insights Query", chart.query).fetch_results()
chart["data"] = chart_data
return chart
@frappe.whitelist(allow_guest=True)
def get_public_dashboard_chart_data(public_key, *args, **kwargs):
if not public_key or not isinstance(public_key, str):
frappe.throw("Public Key is required")
dashboard_name = frappe.db.exists(
"Insights Dashboard", {"public_key": public_key, "is_public": 1}
)
if not dashboard_name:
frappe.throw("Invalid Public Key")
kwargs.pop("cmd")
return frappe.get_cached_doc("Insights Dashboard", dashboard_name).fetch_chart_data(
*args, **kwargs
)
@frappe.whitelist(allow_guest=True)
@redis_cache()
def fetch_column_values_public(public_key, item_id, search_text=None):
if not public_key or not isinstance(public_key, str):
frappe.throw("Public Key is required")
dashboard_name = frappe.db.exists(
"Insights Dashboard", {"public_key": public_key, "is_public": 1}
)
if not dashboard_name:
frappe.throw("Invalid Public Key")
doc = frappe.get_doc("Insights Dashboard", dashboard_name)
row = next((row for row in doc.items if row.item_id == item_id), None)
if not row:
frappe.throw("Invalid Item ID")
options = frappe.parse_json(row.options)
column = options.get("column")
if not column:
frappe.throw("Column not found in Item Options")
return fetch_column_values(
data_source=column.get("data_source"),
table=column.get("table"),
column=column.get("column"),
search_text=search_text,
)
|
2302_79757062/insights
|
insights/api/public.py
|
Python
|
agpl-3.0
| 3,258
|
import frappe
from insights.decorators import insights_whitelist
from insights.insights.doctype.insights_team.insights_team import (
get_allowed_resources_for_user,
)
@insights_whitelist()
def get_queries():
allowed_queries = get_allowed_resources_for_user("Insights Query")
if not allowed_queries:
return []
Query = frappe.qb.DocType("Insights Query")
QueryChart = frappe.qb.DocType("Insights Chart")
DataSource = frappe.qb.DocType("Insights Data Source")
User = frappe.qb.DocType("User")
return (
frappe.qb.from_(Query)
.left_join(QueryChart)
.on(QueryChart.query == Query.name)
.left_join(User)
.on(Query.owner == User.name)
.left_join(DataSource)
.on(Query.data_source == DataSource.name)
.select(
Query.name,
Query.title,
Query.status,
Query.is_assisted_query,
Query.is_native_query,
Query.is_stored,
Query.data_source,
Query.creation,
Query.owner,
User.full_name.as_("owner_name"),
User.user_image.as_("owner_image"),
QueryChart.chart_type,
DataSource.title.as_("data_source_title"),
)
.where(Query.name.isin(allowed_queries))
.groupby(
Query.name,
User.full_name.as_("owner_name"),
User.user_image.as_("owner_image"),
QueryChart.chart_type,
DataSource.title.as_("data_source_title"),
)
.orderby(Query.creation, order=frappe.qb.desc)
).run(as_dict=True)
@insights_whitelist()
def create_query(**query):
doc = frappe.new_doc("Insights Query")
doc.title = query.get("title")
doc.data_source = query.get("data_source")
doc.status = "Execution Successful"
doc.is_assisted_query = query.get("is_assisted_query")
doc.is_native_query = query.get("is_native_query")
doc.is_script_query = query.get("is_script_query")
if query.get("is_script_query"):
doc.data_source = "Query Store"
if table := query.get("table") and not doc.is_assisted_query:
doc.append(
"tables",
{
"table": table.get("value"),
"label": table.get("label"),
},
)
doc.save()
return doc.as_dict()
@insights_whitelist()
def create_chart():
chart = frappe.new_doc("Insights Chart")
chart.save()
return chart.name
@frappe.whitelist(allow_guest=True)
def pivot(
data,
indexes: list[str] | None = None,
columns: list[str] | None = None,
values: list[str] | None = None,
):
indexes = indexes or []
columns = columns or []
values = values or []
if not data or not (indexes + columns + values):
return []
import pandas as pd
df = pd.DataFrame(data)
for value_column in values:
try:
df[value_column] = df[value_column].astype(float).fillna(0).round(2)
except ValueError:
# if the value is not a number, then convert it to 1
# this will show the count of records
df[value_column] = df[value_column].apply(lambda x: 1)
pivot = pd.pivot_table(
df,
index=indexes,
columns=columns,
values=values,
sort=False,
fill_value=0,
aggfunc="sum",
)
pivot = pivot.reset_index()
pivot = pivot.to_dict("records")
return flatten_column_keys(pivot)
def flatten_column_keys(pivoted_records: list[dict]):
"""
- Move the values to the bottom level
- Flatten the column names
Input:
df = [{ ("Date", "", ""): "2018-01-01", ("Region", "", ""): "A", ("Price", "OK", "No"): 100, ...}]
Output:
df = [{ "Date": "2018-01-01", "Region": "A", "OK___No__Price": 100, ...}]
"""
new_records = []
for row in pivoted_records:
new_row = {}
cols = list(row.keys())
if type(cols[0]) != tuple:
new_records.append(row)
continue
for keys in cols:
first_key = keys[0]
new_keys = list(keys[1:]) + [first_key]
new_keys = [key for key in new_keys if key]
new_key = "___".join(new_keys)
new_row[new_key] = row[keys]
new_records.append(new_row)
return new_records
|
2302_79757062/insights
|
insights/api/queries.py
|
Python
|
agpl-3.0
| 4,361
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import json
import frappe
from insights.decorators import insights_whitelist
from insights.setup.demo import DemoDataFactory
@insights_whitelist()
def setup_complete():
return bool(frappe.get_single("Insights Settings").setup_complete)
@insights_whitelist()
def update_erpnext_source_title(title):
frappe.db.set_value("Insights Data Source", "Site DB", "title", title)
@insights_whitelist()
def setup_sample_data(dataset):
factory = DemoDataFactory()
factory.run()
# import_demo_queries_and_dashboards()
def import_demo_queries_and_dashboards():
demo_dashboard_exists = frappe.db.exists(
"Insights Dashboard", {"title": "eCommerce"}
)
if demo_dashboard_exists:
return
try:
setup_fixture_path = frappe.get_app_path("insights", "setup")
with open(setup_fixture_path + "/demo_queries.json") as f:
queries = json.load(f)
for query in queries:
query_doc = frappe.new_doc("Insights Query")
query_doc.update(query)
query_doc.save(ignore_permissions=True)
with open(setup_fixture_path + "/demo_dashboards.json") as f:
dashboards = json.load(f)
for dashboard in dashboards:
dashboard_doc = frappe.new_doc("Insights Dashboard")
dashboard_doc.update(dashboard)
dashboard_doc.save(ignore_permissions=True)
except Exception as e:
frappe.log_error("Failed to create Demo Queries and Dashboards")
print(e)
@insights_whitelist()
def submit_survey_responses(responses):
responses = frappe.parse_json(responses)
try:
responses = json.dumps(responses, default=str, indent=4)
frappe.integrations.utils.make_post_request(
"https://frappeinsights.com/api/method/insights.telemetry.submit_survey_responses",
data={"response": responses},
)
except Exception:
frappe.log_error(title="Error submitting survey responses")
def get_new_datasource(db):
data_source = frappe.new_doc("Insights Data Source")
if db.get("connection_string"):
data_source.update(
{
"title": db.get("title"),
"database_type": db.get("type"),
"connection_string": db.get("connection_string"),
}
)
if db.get("type") == "MariaDB" or db.get("type") == "PostgreSQL":
data_source.update(
{
"database_type": db.get("type"),
"database_name": db.get("name"),
"title": db.get("title"),
"host": db.get("host"),
"port": db.get("port"),
"username": db.get("username"),
"password": db.get("password"),
"use_ssl": db.get("useSSL"),
}
)
if db.get("type") == "SQLite":
data_source.update(
{
"database_type": db.get("type"),
"title": db.get("title") or db.get("name"),
"database_name": db.get("name") or frappe.scrub(db.get("title")),
}
)
return data_source
@insights_whitelist()
def test_database_connection(database):
data_source = get_new_datasource(database)
return data_source.test_connection(raise_exception=True)
@insights_whitelist()
def add_database(database):
data_source = get_new_datasource(database)
data_source.save()
data_source.enqueue_sync_tables()
@insights_whitelist()
def complete_setup():
settings = frappe.get_single("Insights Settings")
settings.setup_complete = 1
settings.save()
|
2302_79757062/insights
|
insights/api/setup.py
|
Python
|
agpl-3.0
| 3,749
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.integrations.utils import make_post_request
def get_subscription_key():
try:
return frappe.conf.sk_insights
except Exception:
return None
def get_subscription_info():
secret_key = get_subscription_key()
if not secret_key:
return {}
try:
res = make_post_request(
"https://frappecloud.com/api/method/press.api.developer.marketplace.get_subscription_info",
data={"secret_key": secret_key},
)
return res["message"]
except Exception:
return None
@frappe.whitelist()
def trial_expired():
subscription_info = get_subscription_info()
if not subscription_info:
return None
plan = subscription_info.get("plan", "")
expiry = frappe.utils.get_datetime(subscription_info.get("end_date", "3000-01-01"))
return "trial" in plan.lower() and expiry < frappe.utils.now_datetime()
|
2302_79757062/insights
|
insights/api/subscription.py
|
Python
|
agpl-3.0
| 1,045
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from contextlib import suppress
import frappe
from frappe.utils.data import date_diff
from frappe.utils.telemetry import POSTHOG_HOST_FIELD, POSTHOG_PROJECT_FIELD
from posthog import Posthog
from insights.decorators import insights_whitelist
@frappe.whitelist()
def is_enabled():
return bool(
frappe.get_system_settings("enable_telemetry")
and frappe.conf.get("posthog_host")
and frappe.conf.get("posthog_project_id")
)
@insights_whitelist()
def get_posthog_settings():
can_record_session = False
if start_time := frappe.db.get_default("session_recording_start"):
time_difference = (
frappe.utils.now_datetime() - frappe.utils.get_datetime(start_time)
).total_seconds()
if time_difference < 86400: # 1 day
can_record_session = True
return {
"posthog_project_id": frappe.conf.get(POSTHOG_PROJECT_FIELD),
"posthog_host": frappe.conf.get(POSTHOG_HOST_FIELD),
"enable_telemetry": frappe.get_system_settings("enable_telemetry"),
"telemetry_site_age": frappe.utils.telemetry.site_age(),
"record_session": can_record_session,
"posthog_identifier": frappe.local.site,
}
@frappe.whitelist()
def get_credentials():
return {
"posthog_project_id": frappe.conf.get(POSTHOG_PROJECT_FIELD),
"posthog_host": frappe.conf.get(POSTHOG_HOST_FIELD),
}
@insights_whitelist()
def track_active_site(is_v3=False):
if (
frappe.conf.developer_mode
or not should_track_active_status()
or not frappe.conf.get(POSTHOG_PROJECT_FIELD)
):
return
capture_event("insights_v3_active_site" if is_v3 else "insights_active_site")
frappe.cache().set_value("last_active_at", frappe.utils.now_datetime())
def capture_event(event_name, properties=None):
project_id = frappe.conf.get(POSTHOG_PROJECT_FIELD)
host = frappe.conf.get(POSTHOG_HOST_FIELD)
if not project_id or not host:
return
with suppress(Exception):
ph = Posthog(project_id, host=host)
ph.capture(
distinct_id=frappe.local.site,
event=event_name,
properties=properties,
)
def should_track_active_status():
last_active_at = frappe.cache().get_value("last_active_at")
if not last_active_at:
return True
last_active_at = frappe.utils.get_datetime(last_active_at)
if date_diff(frappe.utils.now_datetime(), last_active_at) > 1:
return True
return False
|
2302_79757062/insights
|
insights/api/telemetry.py
|
Python
|
agpl-3.0
| 2,644
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.utils import split_emails, validate_email_address
from frappe.utils.user import get_users_with_role
from insights.decorators import insights_whitelist, validate_type
from insights.insights.doctype.insights_team.insights_team import (
get_teams as get_user_teams,
)
from insights.insights.doctype.insights_team.insights_team import is_admin
@insights_whitelist()
def get_users(search_term=None):
"""Returns full_name, email, type, teams, last_active"""
if not is_admin(frappe.session.user):
user_info = frappe.db.get_value(
"User",
frappe.session.user,
["name", "full_name", "email", "last_active", "user_image", "enabled"],
as_dict=True,
)
user_info["type"] = "User"
user_info["teams"] = get_user_teams(frappe.session.user)
return [user_info]
insights_admins = get_users_with_role("Insights Admin")
insights_users = get_users_with_role("Insights User")
additional_filters = {}
if search_term:
additional_filters = {
"full_name": ["like", f"%{search_term}%"],
"email": ["like", f"%{search_term}%"],
}
users = frappe.get_list(
"User",
fields=["name", "full_name", "email", "last_active", "user_image", "enabled"],
filters={
"name": ["in", list(set(insights_users + insights_admins))],
**additional_filters,
},
)
for user in users:
user["type"] = "Admin" if user.name in insights_admins else "User"
user["teams"] = get_user_teams(user.name)
invitations = frappe.get_list(
"Insights User Invitation",
fields=["email", "status"],
filters={"status": ["in", ["Pending", "Expired"]]},
)
for invitation in invitations:
users.append(
{
"name": invitation.email,
"full_name": invitation.email.split("@")[0],
"email": invitation.email,
"last_active": None,
"user_image": None,
"enabled": 0,
"type": "User",
"teams": [],
"invitation_status": invitation.status,
}
)
return users
@insights_whitelist()
def get_teams(search_term=None):
teams = frappe.get_list(
"Insights Team",
filters={
"name": ["like", f"%{search_term}%"] if search_term else ["is", "set"],
},
fields=[
"name",
"team_name",
"owner",
"creation",
],
)
members = frappe.get_all(
"Insights Team Member",
fields=["parent", "user"],
filters={"parent": ["in", [team.name for team in teams]]},
)
ResourcePermission = frappe.qb.DocType("Insights Resource Permission")
DataSource = frappe.qb.DocType("Insights Data Source v3")
Table = frappe.qb.DocType("Insights Table v3")
table_permissions = (
frappe.qb.from_(ResourcePermission)
.left_join(Table)
.on(ResourcePermission.resource_name == Table.name)
.left_join(DataSource)
.on(Table.data_source == DataSource.name)
.select(
ResourcePermission.parent,
ResourcePermission.resource_name,
ResourcePermission.resource_type,
DataSource.title.as_("description"),
Table.label.as_("label"),
Table.table.as_("table"),
Table.name.as_("value"),
)
.where(
ResourcePermission.parent.isin([team.name for team in teams])
& (ResourcePermission.resource_type == "Insights Table v3")
)
.run(as_dict=True)
)
source_permissions = (
frappe.qb.from_(ResourcePermission)
.left_join(DataSource)
.on(ResourcePermission.resource_name == DataSource.name)
.select(
ResourcePermission.parent,
ResourcePermission.resource_name,
ResourcePermission.resource_type,
DataSource.name.as_("value"),
DataSource.title.as_("label"),
DataSource.database_type.as_("description"),
)
.where(
ResourcePermission.parent.isin([team.name for team in teams])
& (ResourcePermission.resource_type == "Insights Data Source v3")
)
.run(as_dict=True)
)
for team in teams:
team.team_members = [
{"user": member.user} for member in members if member.parent == team.name
]
team.team_permissions = [
permission
for permission in source_permissions
if permission.parent == team.name
]
team.team_permissions += [
permission
for permission in table_permissions
if permission.parent == team.name
]
return teams
@insights_whitelist()
@validate_type
def get_resource_options(team_name: str, search_term: str | None = None):
"""
Returns the list of data sources and tables that the team doesn't have access to
"""
frappe.only_for("Insights Admin")
team = frappe.get_doc("Insights Team", team_name)
allowed_data_sources = team.get_sources()
allowed_tables = team.get_tables()
DataSource = frappe.qb.DocType("Insights Data Source v3")
Table = frappe.qb.DocType("Insights Table v3")
filter_condition = DataSource.name.isnotnull()
if allowed_data_sources:
filter_condition &= ~DataSource.name.isin(allowed_data_sources)
if search_term:
filter_condition &= (DataSource.title.like(f"%{search_term}%")) | (
DataSource.database_type.like(f"%{search_term}%")
)
data_sources = (
frappe.qb.from_(DataSource)
.select(DataSource.name, DataSource.title, DataSource.database_type)
.where(filter_condition)
.limit(50)
.run(as_dict=True)
)
filter_condition = Table.name.isnotnull()
if allowed_tables:
filter_condition &= ~Table.name.isin(allowed_tables)
if search_term:
filter_condition &= (
(Table.label.like(f"%{search_term}%"))
| (Table.table.like(f"%{search_term}%"))
| (DataSource.title.like(f"%{search_term}%"))
)
tables = (
frappe.qb.from_(Table)
.left_join(DataSource)
.on(Table.data_source == DataSource.name)
.select(
Table.name, Table.table, Table.label, DataSource.title.as_("data_source")
)
.where(filter_condition)
.limit(50)
.run(as_dict=True)
)
resources = []
for data_source in data_sources:
resources.append(
{
"resource_type": "Insights Data Source v3",
"resource_name": data_source.name,
"value": data_source.name,
"label": data_source.title,
"description": data_source.database_type,
}
)
for table in tables:
resources.append(
{
"resource_type": "Insights Table v3",
"resource_name": table.name,
"value": table.name,
"label": table.label,
"description": table.data_source,
}
)
return resources
@insights_whitelist()
@validate_type
def create_team(team_name: str):
frappe.only_for("Insights Admin")
team = frappe.new_doc("Insights Team")
team.team_name = team_name
team.insert()
return team
@insights_whitelist()
@validate_type
def update_team(team: dict):
frappe.only_for("Insights Admin")
team = frappe._dict(team)
doc = frappe.get_doc("Insights Team", team.name)
if team.name != "Admin" and doc.team_name != team.team_name:
doc.rename(team.team_name)
doc.set("team_members", [])
for member in team.team_members:
doc.append(
"team_members",
{
"user": member["user"],
},
)
team.team_permissions = sorted(
team.team_permissions, key=lambda x: (x["resource_type"], x["resource_name"])
)
doc.set("team_permissions", [])
for permission in team.team_permissions:
resource_type = (
"Insights Data Source v3"
if permission["resource_type"] == "Source"
else "Insights Table v3"
if permission["resource_type"] == "Table"
else permission["resource_type"]
)
if resource_type not in ["Insights Data Source v3", "Insights Table v3"]:
continue
doc.append(
"team_permissions",
{
"resource_type": permission["resource_type"],
"resource_name": permission["resource_name"],
},
)
doc.save()
@insights_whitelist()
def add_insights_user(user):
raise NotImplementedError
@frappe.whitelist(allow_guest=True)
@validate_type
def accept_invitation(key: str):
if not key:
frappe.throw("Invalid or expired key")
invitation_name = frappe.db.exists("Insights User Invitation", {"key": key})
if not invitation_name:
frappe.throw("Invalid or expired key")
invitation = frappe.get_doc("Insights User Invitation", invitation_name)
invitation.accept()
invitation.reload()
if invitation.status == "Accepted":
frappe.local.login_manager.login_as(invitation.email)
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = "/insights"
@insights_whitelist()
@validate_type
def invite_users(emails: str):
frappe.only_for("Insights Admin")
if not emails:
return
email_string = validate_email_address(emails, throw=False)
email_list = split_emails(email_string)
if not email_list:
return
existing_invites = frappe.db.get_all(
"Insights User Invitation",
filters={
"email": ["in", email_list],
"status": ["in", ["Pending", "Accepted"]],
},
pluck="email",
)
new_invites = list(set(email_list) - set(existing_invites))
for email in new_invites:
invite = frappe.new_doc("Insights User Invitation")
invite.email = email
invite.insert(ignore_permissions=True)
|
2302_79757062/insights
|
insights/api/user.py
|
Python
|
agpl-3.0
| 10,454
|
import frappe
import ibis
from ibis import _
from insights.decorators import insights_whitelist
from insights.insights.doctype.insights_data_source_v3.ibis_utils import (
IbisQueryBuilder,
execute_ibis_query,
get_columns_from_schema,
)
@insights_whitelist()
def fetch_query_results(operations, use_live_connection=True):
results = []
ibis_query = IbisQueryBuilder().build(operations, use_live_connection)
if ibis_query is None:
return
columns = get_columns_from_schema(ibis_query.schema())
results = execute_ibis_query(ibis_query)
results = results.to_dict(orient="records")
count_query = ibis_query.aggregate(count=_.count())
count_results = execute_ibis_query(count_query)
total_count = count_results.values[0][0]
return {
"sql": ibis.to_sql(ibis_query),
"columns": columns,
"rows": results,
"total_row_count": int(total_count),
}
@insights_whitelist()
def download_query_results(operations, use_live_connection=True):
ibis_query = IbisQueryBuilder().build(operations, use_live_connection)
if ibis_query is None:
return
results = execute_ibis_query(ibis_query, limit=100_00_00)
return results.to_csv(index=False)
@insights_whitelist()
def get_distinct_column_values(
operations, column_name, search_term=None, use_live_connection=True
):
query = IbisQueryBuilder().build(operations, use_live_connection)
values_query = (
query.select(column_name)
.filter(
getattr(_, column_name).notnull()
if not search_term
else getattr(_, column_name).ilike(f"%{search_term}%")
)
.distinct()
.head(20)
)
result = execute_ibis_query(values_query, cache=True)
return result[column_name].tolist()
@insights_whitelist()
def get_columns_for_selection(operations, use_live_connection=True):
query = IbisQueryBuilder().build(operations, use_live_connection)
columns = get_columns_from_schema(query.schema())
return columns
@insights_whitelist()
def get_workbooks():
return frappe.get_list(
"Insights Workbook",
fields=[
"name",
"title",
"owner",
"creation",
"modified",
],
)
@insights_whitelist()
def get_share_permissions(workbook_name):
if not frappe.has_permission("Insights Workbook", ptype="share", doc=workbook_name):
frappe.throw(_("You do not have permission to share this workbook"))
DocShare = frappe.qb.DocType("DocShare")
User = frappe.qb.DocType("User")
share_permissions = (
frappe.qb.from_(DocShare)
.left_join(User)
.on(DocShare.user == User.name)
.select(
DocShare.user,
DocShare.read,
DocShare.write,
DocShare.share,
User.full_name,
)
.where(DocShare.share_doctype == "Insights Workbook")
.where(DocShare.share_name == workbook_name)
.run(as_dict=True)
)
owner = frappe.db.get_value("Insights Workbook", workbook_name, "owner")
share_permissions.append(
{
"user": owner,
"full_name": frappe.get_value("User", owner, "full_name"),
"read": 1,
"write": 1,
}
)
return share_permissions
@insights_whitelist()
def update_share_permissions(workbook_name, permissions):
if not frappe.has_permission("Insights Workbook", ptype="share", doc=workbook_name):
frappe.throw(_("You do not have permission to share this workbook"))
perm_exists = lambda user: frappe.db.exists(
"DocShare",
{
"share_doctype": "Insights Workbook",
"share_name": workbook_name,
"user": user,
},
)
for permission in permissions:
if not perm_exists(permission["user"]):
doc = frappe.new_doc("DocShare")
doc.update(
{
"share_doctype": "Insights Workbook",
"share_name": workbook_name,
"user": permission["user"],
"read": permission["read"],
"write": permission["write"],
}
)
doc.save()
else:
doc = frappe.get_doc(
"DocShare",
{
"share_doctype": "Insights Workbook",
"share_name": workbook_name,
"user": permission["user"],
},
)
doc.read = permission["read"]
doc.write = permission["write"]
doc.save()
|
2302_79757062/insights
|
insights/api/workbooks.py
|
Python
|
agpl-3.0
| 4,688
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import hashlib
import frappe
EXPIRY = 60 * 10
def make_digest(*args):
key = ""
for arg in args:
if isinstance(arg, dict):
key += frappe.as_json(arg)
key += frappe.cstr(arg)
return hashlib.md5(key.encode("utf-8")).hexdigest()
def get_or_set_cache(key, func, force=False, expiry=EXPIRY):
key = f"insights|{key}"
cached_value = frappe.cache().get_value(key)
if cached_value is not None and not force:
return cached_value
value = func()
frappe.cache().set_value(key, value, expires_in_sec=expiry)
return value
@frappe.whitelist()
def reset_insights_cache():
frappe.only_for("System Manager")
frappe.cache().delete_keys("insights*")
|
2302_79757062/insights
|
insights/cache_utils.py
|
Python
|
agpl-3.0
| 837
|
from frappe import _
def get_data():
return [
{
"module_name": "Frappe Insights",
"color": "grey",
"icon": "octicon octicon-file-directory",
"type": "module",
"label": _("Frappe Insights"),
}
]
|
2302_79757062/insights
|
insights/config/desktop.py
|
Python
|
agpl-3.0
| 280
|
"""
Configuration for docs
"""
# source_link = "https://github.com/[org_name]/insights"
# headline = "App that does everything"
# sub_heading = "Yes, you got that right the first time, everything"
def get_context(context):
context.brand_html = "Frappe Insights"
|
2302_79757062/insights
|
insights/config/docs.py
|
Python
|
agpl-3.0
| 269
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
INCLUSIONS = ["*.py"]
EXCLUSIONS = [
"*.js",
"*.xml",
"*.pyc",
"*.css",
"*.less",
"*.scss",
"*.vue",
"*.html",
"*/test_*",
"*/node_modules/*",
"*/patches/*",
"*/config/*",
"*/tests/*",
"*/insights/setup.py",
"*/coverage.py",
"*/patches/*",
]
class CodeCoverage:
def __init__(self, with_coverage, app):
self.with_coverage = with_coverage
self.app = app or "insights"
def __enter__(self):
if self.with_coverage:
import os
from coverage import Coverage
from frappe.utils import get_bench_path
# Generate coverage report only for app that is being tested
source_path = os.path.join(get_bench_path(), "apps", self.app)
self.coverage = Coverage(
source=[source_path], omit=EXCLUSIONS, include=INCLUSIONS
)
self.coverage.start()
def __exit__(self, exc_type, exc_value, traceback):
if self.with_coverage:
self.coverage.stop()
self.coverage.save()
self.coverage.xml_report()
print("Saved Coverage")
|
2302_79757062/insights
|
insights/coverage.py
|
Python
|
agpl-3.0
| 1,254
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
import inspect
import threading
from functools import wraps
import frappe
def check_role(role):
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
if frappe.session.user == "Administrator":
return function(*args, **kwargs)
perm_disabled = not frappe.db.get_single_value(
"Insights Settings", "enable_permissions"
)
if perm_disabled and role in ["Insights Admin", "Insights User"]:
return function(*args, **kwargs)
if not frappe.db.get_value(
"Has Role",
{"parent": frappe.session.user, "role": role},
cache=not frappe.flags.in_test,
):
frappe.throw(
frappe._("You do not have permission to access this resource"),
frappe.PermissionError,
)
return function(*args, **kwargs)
return wrapper
return decorator
def check_permission(doctype, permission_type="read"):
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
frappe.has_permission(doctype, permission_type, throw=True)
return function(*args, **kwargs)
return wrapper
return decorator
def log_error(raise_exc=False):
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
try:
return function(*args, **kwargs)
except Exception as e:
frappe.log_error("Insights Error")
print(f"Error in {function.__name__}", e)
if raise_exc:
raise e
return wrapper
return decorator
def debounce(wait):
"""Debounce decorator to be used on methods.
- This decorator will ensure that the method is called only after
`wait` seconds have passed since the last call.
- The method will be called if the arguments are different from the
last call.
- Returns the result of the last call if the method is called again
Parameters
----------
wait : int
Number of seconds to wait before calling the method again.
Returns
-------
function
The decorated function.
"""
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
# check if the method is called for the first time
if not hasattr(function, "last_call"):
function.last_call = threading.Event()
# check if the arguments are different from the last call
if (
function.last_call.is_set()
and function.last_args == args
and function.last_kwargs == kwargs
and hasattr(function, "last_result")
):
return function.last_result
# set the arguments and call the method
function.last_args = args
function.last_kwargs = kwargs
function.last_call.set()
try:
function.last_result = function(*args, **kwargs)
return function.last_result
finally:
# reset the event after `wait` seconds
threading.Timer(wait, function.last_call.clear).start()
return wrapper
return decorator
def validate_type(func):
@wraps(func)
def wrapper(*args, **kwargs):
sig = inspect.signature(func)
annotated_types = {
k: v.annotation
for k, v in sig.parameters.items()
if v.annotation != inspect._empty
}
bound_args = sig.bind(*args, **kwargs)
bound_args.apply_defaults()
for arg_name, arg_value in bound_args.arguments.items():
if (
arg_name in annotated_types
and arg_value is not None
and not isinstance(arg_value, annotated_types[arg_name])
):
raise TypeError(
f"{func.__name__}: Argument {arg_name} must be of type {annotated_types[arg_name]}"
)
return func(*args, **kwargs)
return wrapper
def insights_whitelist(*args, **kwargs):
# usage:
# @insights_whitelist()
# def my_function():
# pass
#
# what it does:
# @frappe.whitelist()
# @check_role("Insights User")
# def my_function():
# pass
def decorator(function):
@wraps(function)
@frappe.whitelist(*args, **kwargs)
@check_role("Insights User")
def wrapper(*args, **kwargs):
return function(*args, **kwargs)
return wrapper
return decorator
|
2302_79757062/insights
|
insights/decorators.py
|
Python
|
agpl-3.0
| 4,838
|
app_name = "insights"
app_title = "Frappe Insights"
app_publisher = "Frappe Technologies Pvt. Ltd."
app_description = "Powerful Reporting Tool for Frappe Apps"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "hello@frappe.io"
app_license = "GNU GPLv3"
export_python_type_annotations = True
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/insights/css/insights.css"
# app_include_js = "insights.bundle.js"
# include js, css files in header of web template
# web_include_css = "/assets/insights/css/insights.css"
# web_include_js = "/assets/insights/js/insights.js"
# include custom scss in every website theme (without file extension ".scss")
# website_theme_scss = "insights/public/scss/website"
# include js, css files in header of web form
# webform_include_js = {"doctype": "public/js/doctype.js"}
# webform_include_css = {"doctype": "public/css/doctype.css"}
# include js in page
# page_js = {"page" : "public/js/file.js"}
# include js in doctype views
# doctype_js = {"doctype" : "public/js/doctype.js"}
# doctype_list_js = {"doctype" : "public/js/doctype_list.js"}
# doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"}
# doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {
# "Role": "home_page"
# }
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Jinja
# ----------
# add methods and filters to jinja environment
# jinja = {
# "methods": "insights.utils.jinja_methods",
# "filters": "insights.utils.jinja_filters"
# }
# Setup
# ------------
setup_wizard_requires = "assets/insights/js/setup_wizard.js"
setup_wizard_stages = "insights.setup.setup_wizard.get_setup_stages"
# Installation
# ------------
# before_install = "insights.install.before_install"
# after_install = "insights.setup.after_install"
# after_migrate = ["insights.migrate.after_migrate"]
before_request = [
"insights.insights.doctype.insights_data_source_v3.insights_data_source_v3.before_request"
]
after_request = [
"insights.insights.doctype.insights_data_source_v3.insights_data_source_v3.after_request"
]
fixtures = [
{
"dt": "Insights Data Source",
"filters": {"name": ("in", ["Site DB", "Query Store"])},
},
{
"dt": "Insights Data Source v3",
"filters": {"name": ("in", ["Site DB"])},
},
{
"dt": "Insights Team",
"filters": {"name": ("in", ["Admin"])},
},
]
# Uninstallation
# ------------
# before_uninstall = "insights.uninstall.before_uninstall"
# after_uninstall = "insights.uninstall.after_uninstall"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "insights.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
permission_query_conditions = {
"Insights Data Source v3": "insights.permissions.get_data_source_query_conditions",
"Insights Table v3": "insights.permissions.get_table_query_conditions",
"Insights Team": "insights.permissions.get_team_query_conditions",
}
has_permission = {
"Insights Data Source v3": "insights.permissions.has_doc_permission",
"Insights Table v3": "insights.permissions.has_doc_permission",
}
# DocType Class
# ---------------
# Override standard doctype classes
# override_doctype_class = {
# "ToDo": "custom_app.overrides.CustomToDo"
# }
# Document Events
# ---------------
# Hook on document methods and events
# doc_events = {
# "*": {
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }
# }
# Scheduled Tasks
# ---------------
scheduler_events = {
"all": ["insights.insights.doctype.insights_alert.insights_alert.send_alerts"]
}
# Testing
# -------
before_tests = "insights.tests.utils.before_tests"
# Overriding Methods
# ------------------------------
#
# override_whitelisted_methods = {
# "frappe.desk.doctype.event.event.get_events": "insights.event.get_events"
# }
#
# each overriding function accepts a `data` argument;
# generated from the base implementation of the doctype dashboard,
# along with any modifications made in other Frappe apps
# override_doctype_dashboards = {
# "Task": "insights.task.get_dashboard_data"
# }
# exempt linked doctypes from being automatically cancelled
#
# auto_cancel_exempted_doctypes = ["Auto Repeat"]
# User Data Protection
# --------------------
# user_data_fields = [
# {
# "doctype": "{doctype_1}",
# "filter_by": "{filter_by}",
# "redact_fields": ["{field_1}", "{field_2}"],
# "partial": 1,
# },
# {
# "doctype": "{doctype_2}",
# "filter_by": "{filter_by}",
# "partial": 1,
# },
# {
# "doctype": "{doctype_3}",
# "strict": False,
# },
# {
# "doctype": "{doctype_4}"
# }
# ]
# Authentication and authorization
# --------------------------------
# auth_hooks = [
# "insights.auth.validate"
# ]
page_renderer = "insights.utils.InsightsPageRenderer"
website_route_rules = [
{"from_route": "/insights/<path:app_path>", "to_route": "insights"},
{"from_route": "/insights_v2/<path:app_path>", "to_route": "insights_v2"},
]
|
2302_79757062/insights
|
insights/hooks.py
|
Python
|
agpl-3.0
| 5,398
|
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Insights Alert", {
refresh: function (frm) {
if (frm.doc.disabled) return;
frm.add_custom_button(__("Send Alert"), function () {
frappe.dom.freeze(__("Sending Alert..."));
frm.call("send_alert")
.then(() => {
frappe.dom.unfreeze();
frappe.show_alert({
message: __("Alert sent"),
indicator: "green",
});
})
.catch(() => {
frappe.dom.unfreeze();
});
});
},
});
|
2302_79757062/insights
|
insights/insights/doctype/insights_alert/insights_alert.js
|
JavaScript
|
agpl-3.0
| 562
|
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from datetime import datetime
import frappe
import telegram
from croniter import croniter
from frappe.model.document import Document
from frappe.utils import validate_email_address
from frappe.utils.data import get_datetime, get_datetime_str, now_datetime
from pandas import DataFrame
class InsightsAlert(Document):
def validate(self):
try:
self.evaluate_condition(for_validate=True)
except Exception as e:
frappe.throw(f"Invalid condition: {e}")
@frappe.whitelist()
def send_alert(self):
if not self.evaluate_condition():
return
if self.channel == "Email":
self.send_email_alert()
if self.channel == "Telegram":
self.send_telegram_alert()
self.db_set("last_execution", now_datetime(), update_modified=False)
def send_telegram_alert(self):
message = self.evaluate_message()
tg = Telegram(self.telegram_chat_id)
tg.send(message)
def send_email_alert(self):
subject = f"Insights Alert: {self.title}"
recievers = self.get_recipients()
message = self.evaluate_message()
frappe.sendmail(
recipients=recievers,
subject=subject,
message=message,
now=True,
)
def evaluate_condition(self, for_validate=False):
query = frappe.get_doc("Insights Query", self.query)
results = query.retrieve_results(fetch_if_not_cached=not for_validate)
if (hasattr(results, "empty") and results.empty) or not results:
return False
column_names = [d.get("label") for d in results[0]]
results = DataFrame(results[1:], columns=column_names)
return frappe.safe_eval(
self.condition, eval_locals=frappe._dict(results=results, any=any)
)
def evaluate_message(self):
query = frappe.get_doc("Insights Query", self.query)
query_dict = query.as_dict()
query_dict.results = query.retrieve_results(fetch_if_not_cached=True)
message = frappe.render_template(self.message, context=query_dict)
if self.channel == "Telegram":
return message
return frappe.render_template(
"insights/templates/alert.html", context=frappe._dict(message=message)
)
def get_recipients(self):
recipients = self.recipients.split(",")
for recipient in recipients:
if not validate_email_address(recipient):
frappe.throw(f"{recipient} is not a valid email address")
return recipients
@property
def next_execution(self):
return get_datetime_str(self.get_next_execution())
def get_next_execution(self):
CRON_MAP = {
"Monthly": "0 0 1 * *",
"Weekly": "0 0 * * 0",
"Daily": "0 0 * * *",
"Hourly": "0 * * * *",
}
if not self.cron_format:
self.cron_format = CRON_MAP[self.frequency]
start_time = get_datetime(self.last_execution or datetime(2000, 1, 1))
return croniter(self.cron_format, start_time).get_next(datetime)
def is_event_due(self):
if not self.last_execution:
return True
next_execution = self.get_next_execution()
return next_execution <= now_datetime()
def send_alerts():
alerts = frappe.get_all("Insights Alert", filters={"disabled": 0})
for alert in alerts:
alert_doc = frappe.get_cached_doc("Insights Alert", alert.name)
if alert_doc.is_event_due():
alert_doc.send_alert()
frappe.db.commit()
class Telegram:
def __init__(self, chat_id: str = None):
self.token = frappe.get_single("Insights Settings").get_password(
"telegram_api_token"
)
if not self.token:
frappe.throw("Telegram Bot Token not set in Insights Settings")
if chat_id:
self.chat_id = chat_id
def send(self, message):
try:
return self.bot.send_message(chat_id=self.chat_id, text=message[:4096])
except Exception:
frappe.log_error("Telegram Bot Error")
raise
@property
def bot(self):
return telegram.Bot(token=self.token)
|
2302_79757062/insights
|
insights/insights/doctype/insights_alert/insights_alert.py
|
Python
|
agpl-3.0
| 4,384
|
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("Insights Chart", {
// refresh(frm) {
// },
// });
|
2302_79757062/insights
|
insights/insights/doctype/insights_chart/insights_chart.js
|
JavaScript
|
agpl-3.0
| 197
|
import frappe
def execute():
"""convert_bar_to_row_chart"""
insights_charts = frappe.get_all(
"Insights Chart",
filters={"options": ["like", "%invertAxis%"]},
fields=["name", "options"],
)
for insights_chart in insights_charts:
doc = frappe.get_doc("Insights Chart", insights_chart.name)
options = frappe.parse_json(doc.options)
if options.get("invertAxis"):
options["invertAxis"] = False
doc.options = frappe.as_json(options)
doc.chart_type = "Row"
doc.save()
insights_dashboard_item = frappe.get_all(
"Insights Dashboard Item",
filters={"options": ["like", "%invertAxis%"]},
fields=["name", "options"],
)
for insights_dashboard in insights_dashboard_item:
doc = frappe.get_doc("Insights Dashboard Item", insights_dashboard.name)
options = frappe.parse_json(doc.options)
if options.get("invertAxis"):
options["invertAxis"] = False
doc.options = frappe.as_json(options)
doc.item_type = "Row"
doc.save()
|
2302_79757062/insights
|
insights/insights/doctype/insights_chart/patches/convert_bar_to_row_chart.py
|
Python
|
agpl-3.0
| 1,127
|
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on('Insights Dashboard', {
// refresh: function(frm) {
// }
})
|
2302_79757062/insights
|
insights/insights/doctype/insights_dashboard/insights_dashboard.js
|
JavaScript
|
agpl-3.0
| 202
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import random
from contextlib import suppress
import frappe
from frappe.model.document import Document
from insights import notify
from insights.api.permissions import is_private
from insights.cache_utils import make_digest
from .utils import guess_layout_for_chart
CACHE_NAMESPACE = "insights_dashboard"
class InsightsDashboard(Document):
@frappe.whitelist()
def is_private(self):
return is_private("Insights Dashboard", self.name)
@property
def cache_namespace(self):
return f"{CACHE_NAMESPACE}|{self.name}"
def add_chart(self, chart):
chart_doc = frappe.get_doc("Insights Chart", chart)
new_layout = guess_layout_for_chart(chart_doc.chart_type, self)
self.append(
"items",
{
"item_id": frappe.utils.cint(random.random() * 1000000),
"item_type": chart_doc.chart_type,
"options": chart_doc.options,
"layout": new_layout,
},
)
self.save()
@frappe.whitelist()
def clear_charts_cache(self):
frappe.cache().delete_keys(f"*{self.cache_namespace}:*")
for row in self.items:
if '"query"' not in row.options:
continue
options = frappe.parse_json(row.options)
if not options.query:
continue
frappe.cache().delete_keys(f"*insights_query_results:{options.query}*")
notify(**{"type": "success", "title": "Cache Cleared"})
@frappe.whitelist()
def fetch_chart_data(self, item_id, query_name=None, filters=None):
row = next((row for row in self.items if row.item_id == item_id), None)
if not row and not query_name:
return frappe.throw("Item not found")
query_name = query_name or frappe.parse_json(row.options).query
if not query_name:
return frappe.throw("Query not found")
return self.run_query(query_name, additional_filters=filters)
def run_query(self, query_name, additional_filters=None):
last_modified = frappe.db.get_value("Insights Query", query_name, "modified")
key = make_digest(query_name, last_modified, additional_filters)
key = f"{self.cache_namespace}:{key}"
if frappe.cache().exists(key):
return frappe.cache().get_value(key)
query = frappe.get_cached_doc("Insights Query", query_name)
# TODO: if 3 charts with same query results is fetched, it will be fetched 3 times
new_results = query.fetch_results(additional_filters=additional_filters)
query_result_expiry = frappe.db.get_single_value(
"Insights Settings", "query_result_expiry"
)
query_result_expiry_in_seconds = query_result_expiry * 60
frappe.cache().set_value(
key, new_results, expires_in_sec=query_result_expiry_in_seconds
)
return new_results
@frappe.whitelist()
def get_queries_column(query_names):
# TODO: handle permissions
table_by_datasource = {}
for query_name in list(set(query_names)):
# TODO: to further optimize, store the used tables in the query on save
query = frappe.get_cached_doc("Insights Query", query_name)
for table in query.get_selected_tables():
if query.data_source not in table_by_datasource:
table_by_datasource[query.data_source] = {}
table_by_datasource[query.data_source][table.table] = table
columns = []
for data_source, tables in table_by_datasource.items():
for table_name, table in tables.items():
table_doc = None
with suppress(frappe.DoesNotExistError):
table_doc = frappe.get_cached_doc(
"Insights Table",
{
"table": table_name,
"data_source": data_source,
},
)
if not table_doc:
continue
_columns = table_doc.get_columns()
for column in _columns:
columns.append(
{
"column": column.column,
"label": column.label,
"table": table.table,
"table_label": table.label,
"type": column.type,
"data_source": data_source,
}
)
return columns
@frappe.whitelist()
def get_query_columns(query):
# TODO: handle permissions
return frappe.get_cached_doc("Insights Query", query).fetch_columns()
def get_dashboard_public_key(name):
existing_key = frappe.db.get_value(
"Insights Dashboard", name, "public_key", cache=True
)
if existing_key:
return existing_key
public_key = frappe.generate_hash()
frappe.db.set_value("Insights Dashboard", name, "public_key", public_key)
return public_key
@frappe.whitelist()
def get_dashboard_file(filename):
file = frappe.get_doc("File", filename)
dashboard = file.get_content()
dashboard = frappe.parse_json(dashboard)
queries = [frappe.parse_json(query) for query in dashboard.get("queries").values()]
data_sources = [query.metadata["data_source"] for query in queries]
return {
"data_sources": list(set(data_sources)),
"queries": queries,
"dashboard": dashboard.get("dashboard"),
}
|
2302_79757062/insights
|
insights/insights/doctype/insights_dashboard/insights_dashboard.py
|
Python
|
agpl-3.0
| 5,587
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import chardet
import frappe
def get_frontend_file(file_path):
frontend_path = frappe.get_app_path("insights", "../frontend")
with open(frontend_path + file_path, "rb") as f:
data = f.read()
encoding = chardet.detect(data)["encoding"]
return data.decode(encoding)
def guess_layout_for_chart(chart_type, dashboard):
file = get_frontend_file("/src/widgets/widgetDimensions.json")
dimensions = frappe.parse_json(file)
layout = {"x": 0, "y": 0}
if chart_type in dimensions:
layout["w"] = dimensions[chart_type]["defaultWidth"]
layout["h"] = dimensions[chart_type]["defaultHeight"]
else:
layout["w"] = 4
layout["h"] = 4
max_y = 0
for item in dashboard.items:
item_layout = frappe.parse_json(item.layout)
max_y = max(max_y, item_layout["y"] + item_layout["h"])
layout["y"] = max_y
return layout
|
2302_79757062/insights
|
insights/insights/doctype/insights_dashboard/utils.py
|
Python
|
agpl-3.0
| 1,030
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class InsightsDashboardItem(Document):
pass
|
2302_79757062/insights
|
insights/insights/doctype/insights_dashboard_item/insights_dashboard_item.py
|
Python
|
agpl-3.0
| 229
|
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Insights Data Source", {
refresh: function (frm) {
if (frm.name == "Query Store") {
frm.set_read_only();
}
},
});
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/insights_data_source.js
|
JavaScript
|
agpl-3.0
| 264
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from functools import cached_property
import frappe
from frappe.model.document import Document
from frappe.utils.caching import redis_cache, site_cache
from insights import notify
from insights.insights.doctype.insights_query.insights_query import InsightsQuery
from insights.insights.doctype.insights_team.insights_team import (
check_table_permission,
get_permission_filter,
)
from .sources.base_database import BaseDatabase, DatabaseConnectionError
from .sources.frappe_db import FrappeDB, SiteDB, is_frappe_db
from .sources.mariadb import MariaDB
from .sources.postgresql import PostgresDatabase
from .sources.query_store import QueryStore
from .sources.sqlite import SQLiteDB
class InsightsDataSourceDocument:
def before_insert(self):
if self.is_site_db and frappe.db.exists(
"Insights Data Source", {"is_site_db": 1}
):
frappe.throw("Only one site database can be configured")
def before_save(self: "InsightsDataSource"):
self.status = "Active" if self.test_connection() else "Inactive"
def on_trash(self):
if self.is_site_db:
frappe.throw("Cannot delete the site database. It is needed for Insights.")
if self.name == "Query Store":
frappe.throw("Cannot delete the Query Store. It is needed for Insights.")
linked_doctypes = ["Insights Table"]
for doctype in linked_doctypes:
for name in frappe.db.get_all(
doctype, {"data_source": self.name}, pluck="name"
):
frappe.delete_doc(doctype, name)
def validate(self):
if self.is_site_db or self.name == "Query Store":
return
if self.database_type == "SQLite":
self.validate_sqlite_fields()
else:
self.validate_remote_db_fields()
def validate_sqlite_fields(self):
mandatory = ("database_name",)
for field in mandatory:
if not self.get(field):
frappe.throw(f"{field} is mandatory for SQLite")
def validate_remote_db_fields(self):
if self.connection_string:
return
mandatory = ("host", "port", "username", "password", "database_name")
for field in mandatory:
if not self.get(field):
frappe.throw(f"{field} is mandatory for Database")
class InsightsDataSourceClient:
@frappe.whitelist()
@redis_cache(ttl=60 * 60 * 24)
def get_tables(self):
return frappe.get_list(
"Insights Table",
filters={
"data_source": self.name,
**get_permission_filter("Insights Table"),
},
fields=[
"name",
"table",
"label",
"hidden",
"is_query_based",
"data_source",
],
order_by="hidden asc, label asc",
)
@frappe.whitelist()
def get_queries(self):
return frappe.get_list(
"Insights Query",
filters={
"data_source": self.name,
**get_permission_filter("Insights Query"),
},
fields=[
"name",
"title",
"data_source",
],
)
@frappe.whitelist()
def get_schema(self):
return get_data_source_schema(self.name)
@frappe.whitelist()
def enqueue_sync_tables(self):
from frappe.utils.scheduler import is_scheduler_inactive
if is_scheduler_inactive():
notify(
**{
"title": "Error",
"message": "Scheduler is inactive",
"type": "error",
}
)
frappe.enqueue_doc(
doctype=self.doctype,
name=self.name,
method="sync_tables",
job_name="sync_data_source",
queue="long",
timeout=3600,
now=True,
)
@frappe.whitelist()
def update_table_link(self, data):
data = frappe._dict(data)
data_source = self.name
primary_table = data.primary_table
foreign_table = data.foreign_table
primary_column = data.primary_column
foreign_column = data.foreign_column
cardinality = data.cardinality
check_table_permission(data_source, primary_table)
doc = frappe.get_doc(
"Insights Table",
{
"data_source": data_source,
"table": primary_table,
},
)
link = {
"primary_key": primary_column,
"foreign_key": foreign_column,
"foreign_table": foreign_table,
}
existing_link = doc.get("table_links", link)
if not existing_link:
link["cardinality"] = cardinality
doc.append("table_links", link)
doc.save()
elif existing_link[0].cardinality != cardinality:
existing_link[0].cardinality = cardinality
doc.save()
@frappe.whitelist()
def delete_table_link(self, data):
data = frappe._dict(data)
data_source = self.name
primary_table = data.primary_table
foreign_table = data.foreign_table
primary_column = data.primary_column
foreign_column = data.foreign_column
check_table_permission(data_source, primary_table)
doc = frappe.get_doc(
"Insights Table",
{
"data_source": data_source,
"table": primary_table,
},
)
for link in doc.table_links:
if (
link.primary_key == primary_column
and link.foreign_key == foreign_column
and link.foreign_table == foreign_table
):
doc.remove(link)
doc.save()
break
class InsightsDataSource(
InsightsDataSourceDocument, InsightsDataSourceClient, Document
):
@cached_property
def _db(self) -> BaseDatabase:
if self.is_site_db:
return SiteDB(data_source=self.name)
if self.name == "Query Store":
return QueryStore()
if self.database_type == "SQLite":
return SQLiteDB(data_source=self.name, database_name=self.database_name)
password = self.get_password(raise_exception=False)
conn_args = {
"data_source": self.name,
"host": self.host,
"port": self.port,
"use_ssl": self.use_ssl,
"username": self.username,
"password": password,
"database_name": self.database_name,
"connection_string": self.connection_string,
}
if is_frappe_db(conn_args):
return FrappeDB(**conn_args)
if self.database_type == "MariaDB":
return MariaDB(**conn_args)
if self.database_type == "PostgreSQL":
return PostgresDatabase(**conn_args)
frappe.throw(f"Unsupported database type: {self.database_type}")
def test_connection(self, raise_exception=False):
try:
return self._db.test_connection()
except DatabaseConnectionError:
return False
except Exception as e:
frappe.log_error("Testing Data Source connection failed", e)
if raise_exception:
raise e
def sync_tables(self, tables=None, force=False):
notify(
type="info",
title="Syncing Data Source",
message="This may take a while. Please wait...",
)
self._db.sync_tables(tables=tables, force=force)
notify(
type="success",
title="Syncing Data Source",
message="Syncing completed.",
)
def build_query(self, query: InsightsQuery):
return self._db.build_query(query)
def run_query(self, query: InsightsQuery):
return self._db.run_query(query)
def execute_query(self, query: str, **kwargs):
return self._db.execute_query(query, **kwargs)
def get_table_columns(self, table):
# TODO: deprecate this method, used only once in insights_table.py
return self._db.get_table_columns(table)
def get_column_options(self, table, column, search_text=None, limit=50):
return self._db.get_column_options(table, column, search_text, limit)
def get_table_preview(self, table, limit=100):
return self._db.get_table_preview(table, limit)
@site_cache(maxsize=128)
def get_data_source_schema(data_source):
Table = frappe.qb.DocType("Insights Table")
TableColumn = frappe.qb.DocType("Insights Table Column")
schema_list = (
frappe.qb.from_(Table)
.select(
Table.table,
Table.label,
Table.is_query_based,
TableColumn.column,
TableColumn.label,
TableColumn.type,
)
.left_join(TableColumn)
.on(Table.name == TableColumn.parent)
.where((Table.data_source == data_source) & (Table.hidden == 0))
.run(as_dict=True)
)
schema = {}
for table in schema_list:
schema.setdefault(
table.table,
{
"label": table.label,
"is_query_based": table.is_query_based,
"columns": [],
},
)
schema[table.table]["columns"].append(
{
"column": table.column,
"label": table.label,
"type": table.type,
}
)
return schema
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/insights_data_source.py
|
Python
|
agpl-3.0
| 9,812
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import re
import frappe
from sqlalchemy.sql import text
from insights.insights.doctype.insights_table_import.insights_table_import import (
InsightsTableImport,
)
from insights.utils import ResultColumn
from .utils import (
add_limit_to_sql,
cache_results,
compile_query,
execute_and_log,
get_cached_results,
replace_query_tables_with_cte,
)
class DatabaseConnectionError(frappe.ValidationError):
pass
class DatabaseCredentialsError(frappe.ValidationError):
pass
class DatabaseParallelConnectionError(frappe.ValidationError):
pass
class Database:
def test_connection(self):
raise NotImplementedError
def connect(self):
raise NotImplementedError
def build_query(self, query):
raise NotImplementedError
def run_query(self, query):
raise NotImplementedError
def execute_query(self):
raise NotImplementedError
def sync_tables(self):
raise NotImplementedError
def get_table_columns(self):
raise NotImplementedError
def get_column_options(self):
raise NotImplementedError
def get_table_preview(self):
raise NotImplementedError
def table_exists(self, table: str):
"""
While importing a csv file, check if the table exists in the database
"""
raise NotImplementedError
def import_table(self, import_doc: InsightsTableImport):
"""
Imports the table into the database
"""
raise NotImplementedError
class BaseDatabase(Database):
def __init__(self):
self.engine = None
self.data_source = None
self.connection = None
self.query_builder = None
self.table_factory = None
def test_connection(self, log_errors=True):
with self.connect(log_errors=log_errors) as connection:
res = connection.execute(text("SELECT 1"))
return res.fetchone()
def connect(self, *, log_errors=True):
try:
return self.engine.connect()
except Exception as e:
log_errors and frappe.log_error("Error connecting to database")
self.handle_db_connection_error(e)
def handle_db_connection_error(self, e):
raise DatabaseConnectionError(e) from e
def build_query(self, query):
"""Used to update the sql in insights query"""
query_str = self.query_builder.build(query)
query_str = (
self.process_subquery(query_str) if not query.is_native_query else query_str
)
return query_str
def run_query(self, query):
sql = self.query_builder.build(query)
return self.execute_query(sql, return_columns=True, query_name=query.name)
def execute_query(
self,
sql, # can be a string or a sqlalchemy query object or text object
pluck=False,
return_columns=False,
cached=False,
query_name=None,
log_errors=True,
):
if sql is None:
return []
if isinstance(sql, str) and not sql.strip():
return []
sql = self.compile_query(sql)
sql = self.process_subquery(sql)
sql = self.set_row_limit(sql)
sql = self.replace_template_tags(sql)
sql = self.escape_special_characters(sql)
self.validate_native_sql(sql)
if cached:
cached_results = get_cached_results(sql, self.data_source)
if cached_results:
return cached_results
with self.connect(log_errors=log_errors) as connection:
res = execute_and_log(connection, sql, self.data_source, query_name)
cols = [ResultColumn.from_args(d[0]) for d in res.cursor.description]
rows = [list(r) for r in res.fetchall()]
rows = [r[0] for r in rows] if pluck else rows
ret = [cols] + rows if return_columns else rows
cached and cache_results(sql, self.data_source, ret)
return ret
def compile_query(self, query):
if hasattr(query, "compile"):
compiled = compile_query(query, self.engine.dialect)
query = str(compiled) if compiled else None
return query
def process_subquery(self, sql):
allow_subquery = frappe.db.get_single_value(
"Insights Settings", "allow_subquery"
)
if allow_subquery:
sql = replace_query_tables_with_cte(
sql, self.data_source, self.engine.dialect
)
return sql
def escape_special_characters(self, sql):
# to fix special characters in query like %
if self.engine.dialect.name in ("mysql", "postgresql"):
sql = re.sub(r"(%{1,})", r"%%", sql)
return sql
def replace_template_tags(self, sql):
# replace template tags with actual values
# {{ QRY_1203 }} -> SELECT * FROM `tabSales Invoice`
# find all the template tags in the query
# match all character between {{ and }}
matches = re.findall(r"{{(.*?)}}", sql)
if not matches:
return sql
context = {}
for match in matches:
query_name = match.strip().replace("_", "-")
if (
not query_name
or not query_name.startswith("QRY")
or not frappe.db.exists("Insights Query", query_name)
):
continue
query = frappe.get_doc("Insights Query", query_name)
key = query_name.replace("-", "_")
context[key] = self.build_query(query)
sql = frappe.render_template(sql, context)
return sql
def set_row_limit(self, sql):
# set a hard max limit to prevent long running queries
# there's no use case to view more than 500 rows in the UI
# TODO: while exporting as csv, we can remove this limit
max_rows = (
frappe.db.get_single_value("Insights Settings", "query_result_limit") or 500
)
return add_limit_to_sql(sql, max_rows)
def validate_native_sql(self, query):
select_or_with = str(query).strip().lower().startswith(("select", "with"))
if not select_or_with:
frappe.throw("Only SELECT and WITH queries are allowed")
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/sources/base_database.py
|
Python
|
agpl-3.0
| 6,416
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe import _dict
from sqlalchemy import column as Column
from sqlalchemy import table as Table
from sqlalchemy import text
from sqlalchemy.engine.base import Connection
from insights.insights.query_builders.sql_builder import SQLQueryBuilder
from .base_database import (
BaseDatabase,
DatabaseCredentialsError,
DatabaseParallelConnectionError,
)
from .mariadb import MARIADB_TO_GENERIC_TYPES, MariaDB
from .utils import create_insights_table, get_sqlalchemy_engine
class FrappeTableFactory:
"""Fetchs tables and columns from database and links from doctype"""
def __init__(self, data_source) -> None:
self.db_conn: Connection
self.data_source = data_source
def sync_tables(self, connection, tablenames, force=False):
self.db_conn = connection
self.columns_by_tables = self.get_columns_by_tables(tablenames)
for tablename, columns in self.columns_by_tables.items():
table = self.get_table(tablename)
table.columns = columns
table.table_links = self.get_table_links(table.label)
create_insights_table(table, force=force)
def get_columns_by_tables(self, tablenames=None):
t = Table(
"columns",
Column("table_name"),
Column("column_name"),
Column("data_type"),
Column("table_schema"),
schema="information_schema",
)
query = t.select().where(t.c.table_schema == text("DATABASE()"))
if tablenames:
query = query.where(t.c.table_name.in_(tablenames))
columns = self.db_conn.execute(query).fetchall()
schema = {}
for [table_name, column_name, data_type, _] in columns:
if not table_name.startswith("tab"):
continue
schema.setdefault(table_name, []).append(
self.get_column(column_name, data_type)
)
return schema
def get_table(self, table_name):
return _dict(
{
"table": table_name,
"label": table_name.replace("tab", "").title(),
"data_source": self.data_source,
}
)
def get_column(self, column_name, column_type):
return _dict(
{
"column": column_name,
"label": frappe.unscrub(column_name),
"type": MARIADB_TO_GENERIC_TYPES.get(column_type, "String"),
}
)
def get_table_links(self, doctype):
if not hasattr(self, "_all_links") or not self._all_links:
self._all_links = self.get_all_links()
return self._all_links.get(doctype, [])
def get_all_links(self):
doctype_links = {}
DocField = frappe.qb.DocType("DocField")
query = (
frappe.qb.from_(DocField)
.select(
DocField.fieldname,
DocField.fieldtype,
DocField.options,
DocField.parent,
)
.where((DocField.fieldtype == "Link") | (DocField.fieldtype == "Table"))
.get_sql()
)
query = text(query)
standard_links = self.db_conn.execute(query).fetchall()
CustomField = frappe.qb.DocType("Custom Field")
query = (
frappe.qb.from_(CustomField)
.select(
CustomField.fieldname,
CustomField.fieldtype,
CustomField.options,
CustomField.dt.as_("parent"),
)
.where(
(CustomField.fieldtype == "Link") | (CustomField.fieldtype == "Table")
)
.get_sql()
)
query = text(query)
custom_links = self.db_conn.execute(query).fetchall()
for link_row in standard_links + custom_links:
link = _dict(link_row._asdict())
if link.fieldtype == "Link":
# User is linked with ToDo by `owner` field
# User.name = ToDo.owner
doctype_links.setdefault(link.options, []).append(
{
"primary_key": "name",
"foreign_key": link.fieldname,
"foreign_table": "tab" + link.parent,
"foreign_table_label": link.parent,
"cardinality": "1:N",
}
)
if link.fieldtype == "Table":
doctype_links.setdefault(link.parent, []).append(
{
"primary_key": "name",
"foreign_key": "parent",
"foreign_table": "tab" + link.options,
"foreign_table_label": link.options,
"cardinality": "1:N",
}
)
return doctype_links
def get_dynamic_link_map(self):
# copied from frappe.model.dynamic_links
DocField = frappe.qb.DocType("DocField")
DocType = frappe.qb.DocType("DocType")
CustomField = frappe.qb.DocType("Custom Field")
standard_dynamic_links_query = (
frappe.qb.from_(DocField)
.from_(DocType)
.select(
DocField.parent,
DocField.fieldname,
DocField.options,
DocType.issingle,
)
.where(
(DocField.fieldtype == "Dynamic Link")
& (DocType.name == DocField.parent)
)
.get_sql()
)
custom_dynamic_links_query = (
frappe.qb.from_(CustomField)
.from_(DocType)
.select(
CustomField.dt.as_("parent"),
CustomField.fieldname,
CustomField.options,
DocType.issingle,
)
.where(
(CustomField.fieldtype == "Dynamic Link")
& (DocType.name == CustomField.dt)
)
.get_sql()
)
dynamic_link_queries = [
text(standard_dynamic_links_query),
text(custom_dynamic_links_query),
]
dynamic_link_map = {}
dynamic_links = []
for query in dynamic_link_queries:
dynamic_links += self.db_conn.execute(query).fetchall()
for df_row in dynamic_links:
df = _dict(df_row._asdict())
if df.issingle:
dynamic_link_map.setdefault(df.parent, []).append(df)
else:
try:
links = self.db_conn.execute(
text(f"""select distinct {df.options} from `tab{df.parent}`""")
).fetchall()
except Exception:
continue
links = [l[0] for l in links]
for doctype in links:
dynamic_link_map.setdefault(doctype, []).append(df)
return dynamic_link_map
class FrappeDB(MariaDB):
def __init__(
self, data_source, host, port, username, password, database_name, use_ssl, **_
):
self.data_source = data_source
self.engine = get_sqlalchemy_engine(
dialect="mysql",
driver="pymysql",
username=username,
password=password,
database=database_name,
host=host,
port=port,
ssl=use_ssl,
ssl_verify_cert=True,
charset="utf8mb4",
use_unicode=True,
connect_args={"connect_timeout": 1, "read_timeout": 1, "write_timeout": 1},
)
self.query_builder: SQLQueryBuilder = SQLQueryBuilder(self.engine)
self.table_factory: FrappeTableFactory = FrappeTableFactory(data_source)
def test_connection(self, log_errors=True):
return self.execute_query(
"select name from tabDocType limit 1", pluck=True, log_errors=log_errors
)
def handle_db_connection_error(self, e):
if "Access denied" in str(e):
raise DatabaseCredentialsError()
if "Packet sequence number wrong" in str(e):
raise DatabaseParallelConnectionError()
super().handle_db_connection_error(e)
def sync_tables(self, tables=None, force=False):
# "begin" ensures that the connection is committed and closed
with self.engine.begin() as connection:
self.table_factory.sync_tables(connection, tables, force)
def get_table_preview(self, table, limit=100):
data = self.execute_query(
f"""select * from `{table}` limit {limit}""", cached=True
)
length = self.execute_query(f"""select count(*) from `{table}`""", cached=True)[
0
][0]
return {
"data": data or [],
"length": length or 0,
}
def get_table_columns(self, table):
with self.connect() as connection:
self.table_factory.db_conn = connection
return self.table_factory.get_table_columns(table)
def get_column_options(self, table, column, search_text=None, limit=50):
t = Table(table, Column(column))
query = t.select().distinct().limit(limit)
if search_text:
query = query.where(Column(column).like(f"%{search_text}%"))
query = self.compile_query(query)
return self.execute_query(query, pluck=True)
class SiteDB(FrappeDB):
def __init__(self, data_source):
self.data_source = data_source
self.engine = get_sqlalchemy_engine(
dialect="mysql",
driver="pymysql",
username=frappe.conf.db_name,
password=frappe.conf.db_password,
database=frappe.conf.db_name,
host=frappe.conf.db_host or "127.0.0.1",
port=frappe.conf.db_port or "3306",
ssl=False,
ssl_verify_cert=True,
charset="utf8mb4",
use_unicode=True,
)
self.query_builder: SQLQueryBuilder = SQLQueryBuilder(self.engine)
self.table_factory: FrappeTableFactory = FrappeTableFactory(data_source)
from insights.cache_utils import get_or_set_cache, make_digest
def is_frappe_db(db_params):
def _is_frappe_db():
try:
FrappeDB(**db_params).test_connection(log_errors=False)
except Exception:
return False
return True
key = make_digest("is_frappe_db", db_params)
return get_or_set_cache(key, _is_frappe_db, expiry=None)
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/sources/frappe_db.py
|
Python
|
agpl-3.0
| 10,698
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from sqlalchemy import column as Column
from sqlalchemy import select as Select
from sqlalchemy import table as Table
from sqlalchemy import text
from sqlalchemy.engine.base import Connection
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
from insights.insights.query_builders.sql_builder import SQLQueryBuilder
from .base_database import (
BaseDatabase,
DatabaseCredentialsError,
DatabaseParallelConnectionError,
)
from .utils import create_insights_table, get_sqlalchemy_engine
MARIADB_TO_GENERIC_TYPES = {
"int": "Integer",
"bigint": "Long Int",
"decimal": "Decimal",
"text": "Text",
"longtext": "Long Text",
"date": "Date",
"datetime": "Datetime",
"time": "Time",
"varchar": "String",
}
class MariaDBTableFactory:
"""Fetchs tables and columns from database and links from doctype"""
def __init__(self, data_source) -> None:
self.db_conn: Connection
self.data_source = data_source
def sync_tables(self, connection, tablenames, force=False):
self.db_conn = connection
self.columns_by_tables = self.get_columns_by_tables(tablenames)
for tablename, columns in self.columns_by_tables.items():
table = self.get_table(tablename)
table.columns = columns
# infer table links from foreign key constraints
# table.table_links = self.get_table_links(table.label)
create_insights_table(table, force=force)
def get_table(self, table_name):
return frappe._dict(
{
"table": table_name,
"label": frappe.unscrub(table_name),
"data_source": self.data_source,
}
)
def get_columns_by_tables(self, tablenames=None):
t = Table(
"columns",
Column("table_name"),
Column("column_name"),
Column("data_type"),
Column("table_schema"),
schema="information_schema",
)
query = t.select().where(t.c.table_schema == text("DATABASE()"))
if tablenames:
query = query.where(t.c.table_name.in_(tablenames))
columns = self.db_conn.execute(query).fetchall()
schema = {}
for [table_name, column_name, data_type, _] in columns:
schema.setdefault(table_name, []).append(
self.get_column(column_name, data_type)
)
return schema
def get_column(self, column_name, column_type):
return frappe._dict(
{
"column": column_name,
"label": frappe.unscrub(column_name),
"type": MARIADB_TO_GENERIC_TYPES.get(column_type, "String"),
}
)
class MariaDB(BaseDatabase):
def __init__(
self, data_source, host, port, username, password, database_name, use_ssl, **_
):
self.data_source = data_source
self.engine = get_sqlalchemy_engine(
dialect="mysql",
driver="pymysql",
username=username,
password=password,
database=database_name,
host=host,
port=port,
ssl=use_ssl,
ssl_verify_cert=use_ssl,
charset="utf8mb4",
use_unicode=True,
connect_args={"connect_timeout": 1},
)
self.query_builder: SQLQueryBuilder = SQLQueryBuilder(self.engine)
self.table_factory: MariaDBTableFactory = MariaDBTableFactory(data_source)
@retry(
retry=retry_if_exception_type((DatabaseParallelConnectionError,)),
stop=stop_after_attempt(3),
wait=wait_fixed(1),
reraise=True,
)
def connect(self, *args, **kwargs):
return super().connect(*args, **kwargs)
def handle_db_connection_error(self, e):
if "Access denied" in str(e):
raise DatabaseCredentialsError()
if "Packet sequence number wrong" in str(e):
raise DatabaseParallelConnectionError()
super().handle_db_connection_error(e)
def sync_tables(self, tables=None, force=False):
with self.engine.begin() as connection:
self.table_factory.sync_tables(connection, tables, force)
def get_table_preview(self, table, limit=100):
data = self.execute_query(
f"""select * from `{table}` limit {limit}""", cached=True
)
length = self.execute_query(f"""select count(*) from `{table}`""", cached=True)[
0
][0]
return {
"data": data or [],
"length": length or 0,
}
def get_table_columns(self, table):
with self.connect() as connection:
self.table_factory.db_conn = connection
return self.table_factory.get_table_columns(table)
def get_column_options(self, table, column, search_text=None, limit=50):
query = Select(Column(column)).select_from(Table(table)).distinct().limit(limit)
if search_text:
query = query.where(Column(column).like(f"%{search_text}%"))
query = self.compile_query(query)
return self.execute_query(query, pluck=True)
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/sources/mariadb.py
|
Python
|
agpl-3.0
| 5,331
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import re
import frappe
from sqlalchemy import column as Column
from sqlalchemy import inspect
from sqlalchemy import select as Select
from sqlalchemy import table as Table
from sqlalchemy import text
from sqlalchemy.engine.base import Connection
from insights.insights.query_builders.postgresql.builder import PostgresQueryBuilder
from .base_database import BaseDatabase
from .utils import create_insights_table, get_sqlalchemy_engine
IGNORED_TABLES = ["__.*"]
POSTGRESQL_TO_GENERIC_TYPES = {
"integer": "Integer",
"bigint": "Long Int",
"numeric": "Decimal",
"text": "Text",
"varchar": "String",
"date": "Date",
"timestamp": "Datetime",
"time": "Time",
"longtext": "Long Text",
"boolean": "String", # TODO: change to boolean
}
class PostgresTableFactory:
"""Fetchs tables and columns from database and links from doctype"""
def __init__(self, data_source) -> None:
self.db_conn: Connection
self.data_source = data_source
def sync_tables(self, connection, tables, force=False):
self.db_conn = connection
for table in self.get_tables(table_names=tables):
# when force is true, it will overwrite the existing columns & links
create_insights_table(table, force=force)
def get_tables(self, table_names=None):
tables = []
for table in self.get_db_tables(table_names):
table.columns = self.get_table_columns(table.table)
# TODO: process foreign keys as links
tables.append(table)
return tables
def get_db_tables(self, table_names=None):
inspector = inspect(self.db_conn)
tables = set(inspector.get_table_names()) | set(
inspector.get_foreign_table_names()
)
if table_names:
tables = [table for table in tables if table in table_names]
return [
self.get_table(table) for table in tables if not self.should_ignore(table)
]
def should_ignore(self, table_name):
return any(re.match(pattern, table_name) for pattern in IGNORED_TABLES)
def get_table(self, table_name):
return frappe._dict(
{
"table": table_name,
"label": frappe.unscrub(table_name),
"data_source": self.data_source,
}
)
def get_all_columns(self):
inspector = inspect(self.db_conn)
tables = inspector.get_table_names()
columns_by_table = {}
for table in tables:
columns = inspector.get_columns(table)
for col in columns:
columns_by_table.setdefault(table, []).append(
self.get_column(col["name"], col["type"])
)
return columns_by_table
def get_table_columns(self, table):
if not hasattr(self, "_all_columns") or not self._all_columns:
self._all_columns = self.get_all_columns()
return self._all_columns.get(table, [])
def get_column(self, column_name, column_type):
return frappe._dict(
{
"column": column_name,
"label": frappe.unscrub(column_name),
"type": POSTGRESQL_TO_GENERIC_TYPES.get(column_type, "String"),
}
)
class PostgresDatabase(BaseDatabase):
def __init__(self, **kwargs):
connect_args = {"connect_timeout": 1}
self.data_source = kwargs.pop("data_source")
if connection_string := kwargs.pop("connection_string", None):
self.engine = get_sqlalchemy_engine(
connection_string=connection_string, connect_args=connect_args
)
else:
self.engine = get_sqlalchemy_engine(
dialect="postgresql",
driver="psycopg2",
username=kwargs.pop("username"),
password=kwargs.pop("password"),
database=kwargs.pop("database_name"),
host=kwargs.pop("host"),
port=kwargs.pop("port"),
sslmode="require" if kwargs.pop("use_ssl") else "disable",
connect_args=connect_args,
)
self.query_builder: PostgresQueryBuilder = PostgresQueryBuilder(self.engine)
self.table_factory: PostgresTableFactory = PostgresTableFactory(
self.data_source
)
def sync_tables(self, tables=None, force=False):
with self.engine.begin() as connection:
self.table_factory.sync_tables(connection, tables, force)
def get_table_preview(self, table, limit=100):
data = self.execute_query(
f"""select * from "{table}" limit {limit}""", cached=True
)
length = self.execute_query(f'''select count(*) from "{table}"''', cached=True)[
0
][0]
return {
"data": data or [],
"length": length or 0,
}
def get_table_columns(self, table):
with self.connect() as connection:
self.table_factory.db_conn = connection
return self.table_factory.get_table_columns(table)
def get_column_options(self, table, column, search_text=None, limit=50):
query = Select(Column(column)).select_from(Table(table)).distinct().limit(limit)
if search_text:
query = query.where(Column(column).like(f"%{search_text}%"))
query = self.compile_query(query)
return self.execute_query(query, pluck=True)
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/sources/postgresql.py
|
Python
|
agpl-3.0
| 5,611
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
import pandas as pd
from sqlalchemy import create_engine, text
from insights.insights.doctype.insights_data_source.sources.sqlite import SQLiteDB
from insights.insights.query_builders.sqlite.sqlite_query_builder import (
SQLiteQueryBuilder,
)
from .utils import create_insights_table
class StoredQueryTableFactory:
# a factory for creating table objects
# creates a list of tables objects from a list of queries that are marked as stored
def __init__(self) -> None:
self.data_source = "Query Store"
def sync_tables(self, connection, tables=None, force=False):
self.connection = connection
to_sync = self.get_stored_queries() if tables is None else tables
for docname in to_sync:
if not frappe.db.exists("Insights Query", docname):
continue
doc = frappe.get_doc("Insights Query", docname)
# fetch results internally imports them into the db
# also updates the insights table
doc.fetch_results()
force and create_insights_table(self.make_table(doc), force=True)
def make_table(self, query):
return frappe._dict(
{
"table": query.name,
"label": query.title,
"data_source": self.data_source,
"columns": self.make_columns(query.get_columns()),
}
)
def get_stored_queries(self):
# get all queries that are marked as stored
return frappe.get_all("Insights Query", filters={"is_stored": 1}, pluck="name")
def make_columns(self, columns):
return [
frappe._dict(
{
"column": column.label, # use label as column name
"label": column.label,
"type": column.type,
}
)
for column in columns
]
class QueryStore(SQLiteDB):
def __init__(self) -> None:
self.data_source = "Query Store"
database_path = frappe.get_site_path(
"private", "files", "insights_query_store.sqlite"
)
self.engine = create_engine(f"sqlite:///{database_path}")
self.table_factory = StoredQueryTableFactory()
self.query_builder = SQLiteQueryBuilder(self.engine)
def sync_tables(self, tables=None, force=False):
with self.engine.begin() as connection:
self.table_factory.sync_tables(connection, tables, force=force)
def get_table_columns(self, table):
query = frappe.get_doc("Insights Query", table)
return query.get_columns()
def store_query(self, query, results):
if not results:
with self.engine.begin() as connection:
connection.execute(text(f"DROP TABLE IF EXISTS '{query.name}'"))
return
create_insights_table(self.table_factory.make_table(query))
columns = [col["label"] for col in results[0]]
df = pd.DataFrame(results[1:], columns=columns, dtype=str)
df.to_sql(query.name, self.engine, if_exists="replace", index=False)
def sync_query_store(tables=None, force=False):
query_store = QueryStore()
query_store.sync_tables(tables, force)
def store_query(query, results):
query_store = QueryStore()
query_store.store_query(query, results)
def remove_stored_query(query):
query_store = QueryStore()
query_store.store_query(query, [])
frappe.db.delete(
"Insights Table", {"table": query.name, "data_source": "Query Store"}
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/sources/query_store.py
|
Python
|
agpl-3.0
| 3,686
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
import pandas as pd
from sqlalchemy import column as Column
from sqlalchemy import create_engine
from sqlalchemy import table as Table
from sqlalchemy import text
from sqlalchemy.engine.base import Connection
from insights.insights.query_builders.sqlite.sqlite_query_builder import (
SQLiteQueryBuilder,
)
from insights.utils import detect_encoding
from ...insights_table_import.insights_table_import import InsightsTableImport
from .base_database import BaseDatabase
from .utils import create_insights_table
class SQLiteTableFactory:
def __init__(self, data_source) -> None:
self.db_conn: Connection
self.data_source = data_source
def sync_tables(self, connection, tables, force=False):
self.db_conn = connection
for table in self.get_tables(table_names=tables):
# when force is true, it will overwrite the existing columns & links
create_insights_table(table, force=force)
def get_tables(self, table_names=None):
tables = []
for table in self.get_db_tables(table_names):
table.columns = self.get_table_columns(table.table)
tables.append(table)
return tables
def get_db_tables(self, table_names=None):
t = Table(
"sqlite_master",
Column("name"),
Column("type"),
)
query = t.select().where(t.c.type == "table")
if table_names:
query = query.where(t.c.name.in_(table_names))
tables = self.db_conn.execute(query).fetchall()
return [self.get_table(table[0]) for table in tables]
def get_table(self, table_name):
return frappe._dict(
{
"table": table_name,
"label": frappe.unscrub(table_name),
"data_source": self.data_source,
}
)
def get_table_columns(self, table_name):
columns = self.db_conn.execute(
text(f"PRAGMA table_info({table_name})")
).fetchall()
return [
frappe._dict(
{
"column": column[1],
"label": frappe.unscrub(column[1]),
"type": self.get_column_type(column[2]),
}
)
for column in columns
]
def get_column_type(self, column_type):
TYPE_MAP = {
"NULL": "Integer",
"INTEGER": "Integer",
"REAL": "Decimal",
"TEXT": "String",
"BLOB": "String",
}
return TYPE_MAP.get(column_type, "String")
class SQLiteDB(BaseDatabase):
def __init__(self, data_source, database_name) -> None:
database_path = frappe.get_site_path(
"private", "files", f"{database_name}.sqlite"
)
self.engine = create_engine(f"sqlite:///{database_path}")
self.data_source = data_source
self.table_factory = SQLiteTableFactory(data_source)
self.query_builder = SQLiteQueryBuilder(self.engine)
def sync_tables(self, tables=None, force=False):
with self.engine.begin() as connection:
self.table_factory.sync_tables(connection, tables, force)
def get_table_preview(self, table, limit=100):
data = self.execute_query(f"""select * from `{table}` limit {limit}""")
length = self.execute_query(f"""select count(*) from `{table}`""")[0][0]
return {
"data": data or [],
"length": length or 0,
}
def get_table_columns(self, table):
with self.connect() as connection:
self.table_factory.db_conn = connection
return self.table_factory.get_table_columns(table)
def get_column_options(self, table, column, search_text=None, limit=50):
t = Table(table, Column(column))
query = t.select().distinct().limit(limit)
if search_text:
query = query.where(Column(column).like(f"%{search_text}%"))
query = self.compile_query(query)
return self.execute_query(query, pluck=True)
def table_exists(self, table):
return self.execute_query(
f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table}'",
)
def import_table(self, import_doc: InsightsTableImport):
encoding = detect_encoding(import_doc._filepath)
df = pd.read_csv(import_doc._filepath, encoding=encoding)
df.columns = [frappe.scrub(c) for c in df.columns]
columns_to_import = [c.column for c in import_doc.columns]
df = df[columns_to_import]
table = import_doc.table_name
df.to_sql(
name=table,
con=self.engine,
index=False,
if_exists="replace",
)
create_insights_table(
frappe._dict(
{
"table": import_doc.table_name,
"label": import_doc.table_label,
"data_source": import_doc.data_source,
"columns": [
frappe._dict(
{
"column": column.column,
"label": column.label,
"type": column.type,
}
)
for column in import_doc.columns
],
}
),
force=True,
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/sources/sqlite.py
|
Python
|
agpl-3.0
| 5,593
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import time
from typing import TYPE_CHECKING, Callable, Optional
from urllib import parse
import frappe
import sqlparse
from frappe.utils.data import flt
from sqlalchemy import NullPool, create_engine
from sqlalchemy.engine.base import Engine
from insights.cache_utils import make_digest
if TYPE_CHECKING:
from sqlalchemy.engine.interfaces import Dialect
def get_sqlalchemy_engine(connect_args=None, **kwargs) -> Engine:
connect_args = connect_args or {}
if kwargs.get("connection_string"):
return create_engine(
kwargs.pop("connection_string"),
poolclass=NullPool,
connect_args=connect_args,
**kwargs,
)
dialect = kwargs.pop("dialect")
driver = kwargs.pop("driver")
user = kwargs.pop("username")
password = parse.quote(kwargs.pop("password"))
database = kwargs.pop("database")
host = kwargs.pop("host", "localhost")
port = kwargs.pop("port") or 3306
extra_params = "&".join(f"{k}={v}" for k, v in kwargs.items())
uri = f"{dialect}+{driver}://{user}:{password}@{host}:{port}/{database}?{extra_params}"
# TODO: cache the engine by uri
return create_engine(uri, poolclass=NullPool, connect_args={})
def create_insights_table(table, force=False):
exists = frappe.db.exists(
"Insights Table",
{
"data_source": table.data_source,
"table": table.table,
"is_query_based": table.is_query_based or 0,
},
)
doc_before = None
if docname := exists:
doc = frappe.get_doc("Insights Table", docname)
# using doc.get_doc_before_save() doesn't work here
doc_before = frappe.get_cached_doc("Insights Table", docname)
else:
doc = frappe.get_doc(
{
"doctype": "Insights Table",
"data_source": table.data_source,
"table": table.table,
"label": table.label,
"is_query_based": table.is_query_based or 0,
}
)
doc.label = table.label
if force:
doc.columns = []
doc.table_links = []
for table_link in table.table_links or []:
if not doc.get("table_links", table_link):
doc.append("table_links", table_link)
column_added = False
for column in table.columns or []:
# do not overwrite existing columns, since type or label might have been changed
if any(doc_column.column == column.column for doc_column in doc.columns):
continue
doc.append("columns", column)
column_added = True
column_removed = False
column_names = [c.column for c in table.columns]
for column in doc.columns:
if column.column not in column_names:
doc.columns.remove(column)
column_removed = True
version = frappe.new_doc("Version")
# if there's some update to store only then save the doc
doc_changed = (
version.update_version_info(doc_before, doc) or column_added or column_removed
)
is_new = not exists
if is_new or doc_changed or force:
# need to ignore permissions when creating/updating a table in query store
# a user may have access to create a query and store it, but not to create a table
doc.save(ignore_permissions=True)
return doc.name
def parse_sql_tables(sql: str):
parsed = sqlparse.parse(sql)
tables = []
identifier = None
for statement in parsed:
for token in statement.tokens:
is_keyword = token.ttype is sqlparse.tokens.Keyword
is_from_clause = is_keyword and token.value.lower() == "from"
is_join_clause = is_keyword and "join" in token.value.lower()
if is_from_clause or is_join_clause:
identifier = token.value.lower()
if identifier and isinstance(token, sqlparse.sql.Identifier):
tables.append(token.get_real_name())
identifier = None
if identifier and isinstance(token, sqlparse.sql.IdentifierList):
for item in token.get_identifiers():
tables.append(item.get_real_name())
identifier = None
return [strip_quotes(table) for table in tables]
def get_stored_query_sql(
sql: str, data_source: Optional[str] = None, dialect: Optional["Dialect"] = None
):
"""
Takes a native sql query and returns a map of table name to the query along with the subqueries
For example, if the query is
SELECT * FROM `QRY-001`
LEFT JOIN `QRY-002` ON `QRY-001`.`name` = `QRY-002`.`name`
LEFT JOIN `QRY-003` ON `QRY-001`.`name` = `QRY-003`.`name`
and QRY-001 = SELECT name FROM `QRY-004`
and QRY-002 = SELECT name FROM `Customer`
and QRY-003 = SELECT name FROM `Supplier`
and QRY-004 = SELECT name FROM `Item`
Then the returned map will be
{
'QRY-001': 'WITH `QRY-004` AS (SELECT name FROM `Item`) SELECT name FROM `QRY-004`',
'QRY-002': 'SELECT name FROM `Customer`',
'QRY-003': 'SELECT name FROM `Supplier)'
}
If any one of the table belongs to any other data source
then stop and return None
"""
# parse the sql to get the tables
sql_tables = parse_sql_tables(sql)
if not sql_tables:
return None
# get the sql for the queries
queries = frappe.get_all(
"Insights Query",
filters={
"name": ("in", set(sql_tables)),
"data_source": data_source,
},
fields=["name", "sql", "data_source", "is_native_query"],
)
if not queries:
return None
# queries = [
# { "name": "QRY-001", "sql": "SELECT name FROM `QRY-004`", "data_source": "Query Store" },
# { "name": "QRY-002","sql": "SELECT name FROM `Customer`","data_source": "Demo" },
# { "name": "QRY-003","sql": "SELECT name FROM `Supplier`","data_source": "Demo" },
# ]
stored_query_sql = {}
# NOTE: The following works because we don't support multiple data sources in a single query
quoted = make_wrap_table_fn(dialect=dialect, data_source=data_source)
for sql in queries:
if data_source is None:
data_source = sql.data_source
if data_source and sql.data_source != data_source:
frappe.throw(
"Cannot use queries from different data sources in a single query"
)
stored_query_sql[sql.name] = sql.sql
if not sql.is_native_query:
# non native queries are already processed and stored in the db
continue
sub_stored_query_sql = get_stored_query_sql(
sql.sql, data_source, dialect=dialect
)
# sub_stored_query_sql = { 'QRY-004': 'SELECT name FROM `Item`' }
if not sub_stored_query_sql:
continue
cte = "WITH"
for table, sub_query in sub_stored_query_sql.items():
cte += f" {quoted(table)} AS ({sub_query}),"
cte = cte[:-1]
stored_query_sql[sql.name] = f"{cte} {sql.sql}"
return stored_query_sql
def make_wrap_table_fn(
dialect: Optional["Dialect"] = None, data_source: Optional[str] = None
) -> Callable[[str], str]:
if dialect:
return dialect.identifier_preparer.quote_identifier
elif data_source:
quote = (
"`"
if frappe.get_cached_value(
"Insights Data Source", data_source, "database_type"
)
== "MariaDB"
else '"'
)
return lambda table: f"{quote}{table}{quote}"
return lambda table: table
def process_cte(main_query, data_source=None, dialect=None):
"""
Replaces stored queries in the main query with the actual query using CTE
"""
stored_query_sql = get_stored_query_sql(main_query, data_source, dialect=dialect)
if not stored_query_sql:
return main_query
# stored_query_sql is a dict of table name and query
# for example, if the query is
# SELECT * FROM `QRY-001`
# LEFT JOIN `QRY-002` ON `QRY-001`.`name` = `QRY-002`.`name`
# and the sql for
# - `QRY-001` is SELECT name FROM `QRY-004`
# - `QRY-002` is SELECT name FROM `Customer`
# - `QRY-004` is SELECT name FROM `Item`
# then the stored_query_sql will be
# {
# 'QRY-001': 'WITH `QRY-004` AS (SELECT name FROM `Item`) SELECT name FROM `QRY-004`',
# 'QRY-002': 'SELECT name FROM `Customer`',
# }
# the query will be replaced with
# WITH
# `QRY-001` AS (
# WITH `QRY-004` AS (SELECT name FROM `Item`) SELECT name FROM `QRY-004`
# ),
# `QRY-002` AS (SELECT name FROM `Customer`)
# SELECT * FROM `QRY-001`
# LEFT JOIN `QRY-002` ON `QRY-001`.`name` = `QRY-002`.`name`
# append the WITH clause to the query
cte = "WITH"
quoted = make_wrap_table_fn(dialect=dialect, data_source=data_source)
for query_name, sql in stored_query_sql.items():
cte += f" {quoted(query_name)} AS ({sql}),"
cte = cte[:-1]
return f"{cte} {main_query}"
def strip_quotes(table):
if (
(table.startswith("`") and table.endswith("`"))
or (table.startswith('"') and table.endswith('"'))
or (table.startswith("'") and table.endswith("'"))
):
return table[1:-1]
return table
def add_limit_to_sql(sql, limit=1000):
stripped_sql = str(sql).strip().rstrip(";")
return f"WITH limited AS ({stripped_sql}) SELECT * FROM limited LIMIT {limit};"
def replace_query_tables_with_cte(sql, data_source, dialect=None):
try:
return process_cte(
str(sql).strip().rstrip(";"), data_source=data_source, dialect=dialect
)
except Exception:
frappe.log_error(title="Failed to process CTE")
frappe.throw("Failed to replace query tables with CTE")
def compile_query(query, dialect=None):
compile_args = {"compile_kwargs": {"literal_binds": True}, "dialect": dialect}
compiled = query.compile(**compile_args)
return compiled
def execute_and_log(conn, sql, data_source, query_name):
with Timer() as t:
try:
result = conn.exec_driver_sql(sql)
except Exception as e:
handle_query_execution_error(e)
create_execution_log(sql, data_source, t.elapsed, query_name)
return result
def handle_query_execution_error(e):
err_lower = str(e).lower()
if "duplicate column name" in err_lower:
frappe.throw(
"Duplicate column name. Please make sure the column labels are unique."
)
if "syntax" in err_lower and "error" in err_lower:
frappe.throw(
"Syntax error in the query. Please check the browser console for more details."
)
frappe.throw(str(e).split("\n", 1)[0])
def cache_results(sql, data_source, results):
key = make_digest(sql, data_source)
frappe.cache().set_value(
f"insights_query_result:{data_source}:{key}",
frappe.as_json(results),
expires_in_sec=60 * 5,
)
def get_cached_results(sql, data_source):
key = make_digest(sql, data_source)
return frappe.parse_json(
frappe.cache().get_value(f"insights_query_result:{data_source}:{key}")
)
def create_execution_log(sql, data_source, time_taken=0, query_name=None):
frappe.get_doc(
{
"doctype": "Insights Query Execution Log",
"data_source": data_source,
"query": query_name,
"sql": sqlparse.format(str(sql), reindent=True, keyword_case="upper"),
"time_taken": time_taken,
}
).insert(ignore_permissions=True)
class Timer:
# a class to find the time taken to execute a line of code
# usage:
# with Timer() as t:
# # do something
# print(t.elapsed)
def __init__(self):
self.elapsed = None
def __enter__(self):
self.start = time.monotonic()
return self
def __exit__(self, *args):
self.end = time.monotonic()
self.elapsed = flt(self.end - self.start, 3)
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source/sources/utils.py
|
Python
|
agpl-3.0
| 12,192
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import os
from frappe.utils import get_files_path
def get_duckdb_connection_string(data_source):
database_path = get_files_path(is_private=1)
database_path = os.path.join(database_path, f"{data_source.database_name}.duckdb")
database_path = os.path.abspath(database_path)
database_path = database_path.lstrip("/")
return f"duckdb:///{database_path}"
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/connectors/duckdb.py
|
Python
|
agpl-3.0
| 493
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
import ibis
from frappe import _dict
from ibis import _
from .mariadb import get_mariadb_connection_string
from .postgresql import get_postgres_connection_string
def get_frappedb_connection_string(data_source):
if data_source.database_type == "PostgreSQL":
return get_postgres_connection_string(data_source)
else:
return get_mariadb_connection_string(data_source)
def get_sitedb_connection_string():
data_source = frappe.new_doc("Insights Data Source v3")
data_source.database_type = (
"PostgreSQL" if frappe.conf.db_type == "postgres" else "MariaDB"
)
data_source.host = frappe.conf.db_host
data_source.port = frappe.conf.db_port
data_source.database_name = frappe.conf.db_name
data_source.username = frappe.conf.db_name
data_source.password = frappe.conf.db_password
data_source.use_ssl = False
return get_frappedb_connection_string(data_source)
def is_frappe_db(data_source):
connection_string = get_frappedb_connection_string(data_source)
try:
db = ibis.connect(connection_string)
db.raw_sql("SET SESSION time_zone='+00:00'")
db.raw_sql("SET collation_connection = 'utf8mb4_unicode_ci'")
res = db.raw_sql("SELECT name FROM tabDocType LIMIT 1").fetchall()
db.con.close()
return len(res) > 0
except Exception:
return False
def get_frappedb_table_links(data_source):
db = data_source._get_ibis_backend()
docfield = db.table("tabDocField")
custom_field = db.table("tabCustom Field")
standard_links = (
docfield.select(
_.fieldname,
_.fieldtype,
_.options,
_.parent,
)
.filter((_.fieldtype == "Link") | (_.fieldtype == "Table"))
.execute()
)
custom_links = (
custom_field.select(
_.fieldname,
_.fieldtype,
_.options,
_.dt.name("parent"),
)
.filter((_.fieldtype == "Link") | (_.fieldtype == "Table"))
.execute()
)
standard_links = standard_links.to_dict(orient="records")
custom_links = custom_links.to_dict(orient="records")
all_links = []
for link_row in standard_links + custom_links:
link = _dict(link_row)
if link.fieldtype == "Link":
all_links.append(
_dict(
{
"left_table": link.options,
"left_column": "name",
"right_table": link.parent,
"right_column": link.fieldname,
}
)
)
if link.fieldtype == "Table":
all_links.append(
_dict(
{
"left_table": link.parent,
"left_column": "name",
"right_table": link.options,
"right_column": "parent",
}
)
)
return all_links
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/connectors/frappe_db.py
|
Python
|
agpl-3.0
| 3,173
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
def get_mariadb_connection_string(data_source):
password = data_source.get_password(raise_exception=False)
connection_string = (
f"mysql://{data_source.username}:{password}"
f"@{data_source.host}:{data_source.port}/{data_source.database_name}"
"?charset=utf8mb4&use_unicode=true"
)
if data_source.use_ssl:
connection_string += "&ssl=true&ssl_verify_cert=true"
return connection_string
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/connectors/mariadb.py
|
Python
|
agpl-3.0
| 558
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
def get_postgres_connection_string(data_source):
if data_source.connection_string:
return data_source.connection_string
else:
password = data_source.get_password(raise_exception=False)
connection_string = (
f"postgresql://{data_source.username}:{password}"
f"@{data_source.host}:{data_source.port}/{data_source.database_name}"
)
if data_source.use_ssl:
connection_string += "?sslmode=require"
return connection_string
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/connectors/postgresql.py
|
Python
|
agpl-3.0
| 631
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import os
import frappe
import pandas as pd
from frappe.utils import get_files_path
from insights.utils import detect_encoding
from ...insights_table_import.insights_table_import import InsightsTableImport
def import_table(self, import_doc: InsightsTableImport):
encoding = detect_encoding(import_doc._filepath)
df = pd.read_csv(import_doc._filepath, encoding=encoding)
df.columns = [frappe.scrub(c) for c in df.columns]
columns_to_import = [c.column for c in import_doc.columns]
df = df[columns_to_import]
table = import_doc.table_name
df.to_sql(
name=table,
con=self.engine,
index=False,
if_exists="replace",
)
# create_insights_table()
def get_sqlite_connection_string(data_source):
database_path = get_files_path(is_private=1)
database_path = os.path.join(database_path, f"{data_source.database_name}.sqlite")
database_path = os.path.abspath(database_path)
database_path = database_path.lstrip("/")
return f"sqlite:///{database_path}"
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/connectors/sqlite.py
|
Python
|
agpl-3.0
| 1,156
|
import os
import frappe
import frappe.utils
import ibis
from frappe.utils import get_files_path
from ibis import BaseBackend
WAREHOUSE_DB_NAME = "insights.duckdb"
class DataWarehouse:
def __init__(self):
self.warehouse_path = get_warehouse_folder_path()
self.db_path = os.path.join(self.warehouse_path, WAREHOUSE_DB_NAME)
@property
def db(self) -> BaseBackend:
if not os.path.exists(self.db_path):
ddb = ibis.duckdb.connect(self.db_path)
ddb.disconnect()
if WAREHOUSE_DB_NAME not in frappe.local.insights_db_connections:
ddb = ibis.duckdb.connect(self.db_path, read_only=True)
frappe.local.insights_db_connections[WAREHOUSE_DB_NAME] = ddb
return frappe.local.insights_db_connections[WAREHOUSE_DB_NAME]
def get_table(self, data_source, table_name, use_live_connection=True):
if use_live_connection:
return self.get_remote_table(data_source, table_name)
else:
return self.get_warehouse_table(data_source, table_name)
def get_warehouse_table(self, data_source, table_name, sync=True):
parquet_file = get_parquet_filepath(data_source, table_name)
warehouse_table = get_warehouse_table_name(data_source, table_name)
if not os.path.exists(parquet_file):
if sync:
self.import_remote_table(data_source, table_name)
return self.db.read_parquet(parquet_file, table_name=warehouse_table)
else:
frappe.throw(
f"{table_name} of {data_source} is not synced to the data warehouse."
)
if not self.db.list_tables(warehouse_table):
return self.db.read_parquet(parquet_file, table_name=warehouse_table)
else:
return self.db.table(warehouse_table)
def get_remote_table(self, data_source, table_name):
ds = frappe.get_doc("Insights Data Source v3", data_source)
remote_db = ds._get_ibis_backend()
return remote_db.table(table_name)
def import_remote_table(self, data_source, table_name, force=False):
path = get_parquet_filepath(data_source, table_name)
if os.path.exists(path) and not force:
print(
f"Skipping creation of parquet file for {table_name} of {data_source} as it already exists. "
"Skipping insights table creation as well."
)
return
ds = frappe.get_doc("Insights Data Source v3", data_source)
remote_db = ds._get_ibis_backend()
table = remote_db.table(table_name)
if hasattr(table, "creation"):
max_records_to_sync = frappe.db.get_single_value(
"Insights Settings", "max_records_to_sync"
)
max_records_to_sync = max_records_to_sync or 10_00_000
table = table.order_by(ibis.desc("creation")).limit(max_records_to_sync)
table.to_parquet(path, compression="snappy")
def get_warehouse_folder_path():
path = os.path.realpath(get_files_path(is_private=1))
path = os.path.join(path, "insights_data_warehouse")
if not os.path.exists(path):
os.makedirs(path)
return path
def get_warehouse_table_name(data_source, table_name):
return f"{frappe.scrub(data_source)}.{frappe.scrub(table_name)}"
def get_parquet_filepath(data_source, table_name):
warehouse_path = get_warehouse_folder_path()
warehouse_table = get_warehouse_table_name(data_source, table_name)
return os.path.join(warehouse_path, f"{warehouse_table}.parquet")
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/data_warehouse.py
|
Python
|
agpl-3.0
| 3,607
|
import frappe
import ibis
from ibis import _
from ibis import selectors as s
from ibis.expr.types import Column, NumericColumn, StringColumn, TimestampColumn, Value
# generic functions
f_count = Column.count
f_min = Column.min
f_max = Column.max
f_group_concat = Column.group_concat
f_is_in = Value.isin
f_is_not_in = Value.notin
f_is_set = Value.notnull
f_is_not_set = Value.isnull
f_is_between = Value.between
f_coalesce = Value.coalesce
f_distinct_count = Column.nunique
f_sum_if = lambda condition, column: f_sum(column, where=condition)
f_count_if = lambda condition, column: f_count(column, where=condition)
f_if_else = (
lambda condition, true_value, false_value: ibis.case()
.when(condition, true_value)
.else_(false_value)
.end()
)
f_case = lambda *args: ibis.case().when(*args).end()
f_sql = lambda query: _.sql(query)
f_asc = ibis.asc
f_desc = ibis.desc
# number Functions
f_abs = NumericColumn.abs
f_sum = NumericColumn.sum
f_avg = NumericColumn.mean
f_round = NumericColumn.round
f_floor = NumericColumn.floor
f_ceil = NumericColumn.ceil
# String Functions
f_lower = StringColumn.lower
f_upper = StringColumn.upper
f_concat = StringColumn.concat
f_replace = StringColumn.replace
f_substring = StringColumn.substr
f_contains = StringColumn.contains
f_not_contains = lambda args, kwargs: ~f_contains(args, kwargs)
f_starts_with = StringColumn.startswith
f_ends_with = StringColumn.endswith
# date functions
f_year = TimestampColumn.year
f_quarter = TimestampColumn.quarter
f_month = TimestampColumn.month
f_week_of_year = TimestampColumn.week_of_year
f_day_of_year = TimestampColumn.day_of_year
f_day_of_week = TimestampColumn.day_of_week
f_day = TimestampColumn.day
f_hour = TimestampColumn.hour
f_minute = TimestampColumn.minute
f_second = TimestampColumn.second
f_microsecond = TimestampColumn.microsecond
f_now = ibis.now
f_today = ibis.today
f_format_date = TimestampColumn.strftime
f_date_diff = TimestampColumn.delta
f_start_of = lambda unit, date: None # TODO
f_is_within = lambda args, kwargs: None # TODO
# utility functions
f_to_inr = lambda curr, amount, rate=83: f_if_else(curr == "USD", amount * rate, amount)
f_to_usd = lambda curr, amount, rate=83: f_if_else(curr == "INR", amount / rate, amount)
f_literal = ibis.literal
f_row_number = ibis.row_number
f_previous_period_value = lambda column, date_column, offset=1: column.lag(offset).over(
group_by=(~s.numeric() & ~s.matches(date_column)),
order_by=ibis.asc(date_column),
)
f_next_period_value = lambda column, date_column, offset=1: column.lead(offset).over(
group_by=(~s.numeric() & ~s.matches(date_column)),
order_by=ibis.asc(date_column),
)
def get_functions():
context = frappe._dict()
functions = globals()
for key in functions:
if key.startswith("f_"):
context[key[2:]] = functions[key]
selectors = frappe._dict()
for key in get_whitelisted_selectors():
selectors[key] = getattr(s, key)
context["s"] = selectors
context["selectors"] = selectors
return context
@frappe.whitelist()
def get_function_list():
return [key for key in get_functions() if not key.startswith("_")]
def get_whitelisted_selectors():
# all the selectors that are decorated with @public
# are added to __all__ in the selectors module
# check: ibis.selectors.py & public.py
try:
whitelisted_selectors = s.__dict__["__all__"]
except KeyError:
whitelisted_selectors = []
return whitelisted_selectors
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/ibis_functions.py
|
Python
|
agpl-3.0
| 3,509
|
import ast
import time
from io import StringIO
import frappe
import ibis
import numpy as np
import pandas as pd
from frappe.utils.data import flt
from frappe.utils.safe_exec import safe_eval, safe_exec
from ibis import _
from ibis import selectors as s
from ibis.expr.datatypes import DataType
from ibis.expr.operations.relations import DatabaseTable, Field
from ibis.expr.types import Expr
from ibis.expr.types import Table as IbisQuery
from insights.cache_utils import make_digest
from insights.insights.doctype.insights_table_v3.insights_table_v3 import (
InsightsTablev3,
)
from insights.insights.query_builders.sql_functions import handle_timespan
from insights.utils import create_execution_log
from insights.utils import deep_convert_dict_to_dict as _dict
from .ibis_functions import get_functions
class IbisQueryBuilder:
def build(self, operations: list, use_live_connection=True) -> IbisQuery:
self.query = None
self.use_live_connection = use_live_connection
for operation in operations:
self.query = self.perform_operation(operation)
return self.query
def get_table(self, table):
return InsightsTablev3.get_ibis_table(
table.data_source,
table.table_name,
use_live_connection=self.use_live_connection,
)
def perform_operation(self, operation):
operation = _dict(operation)
if operation.type == "source":
return self.apply_source(operation)
elif operation.type == "join":
return self.apply_join(operation)
elif operation.type == "union":
return self.apply_union(operation)
elif operation.type == "filter":
return self.apply_filter(operation)
elif operation.type == "filter_group":
return self.apply_filter_group(operation)
elif operation.type == "select":
return self.apply_select(operation)
elif operation.type == "rename":
return self.apply_rename(operation)
elif operation.type == "remove":
return self.apply_remove(operation)
elif operation.type == "mutate":
return self.apply_mutate(operation)
elif operation.type == "cast":
return self.apply_cast(operation)
elif operation.type == "summarize":
return self.apply_summary(operation)
elif operation.type == "order_by":
return self.apply_order_by(operation)
elif operation.type == "limit":
return self.apply_limit(operation)
elif operation.type == "pivot_wider":
return self.apply_pivot(operation, "wider")
elif operation.type == "custom_operation":
return self.apply_custom_operation(operation)
return self.query
def apply_source(self, source_args):
return self.get_table(source_args.table)
def apply_join(self, join_args):
right_table = self.get_right_table(join_args)
join_condition = self.translate_join_condition(join_args, right_table)
join_type = "outer" if join_args.join_type == "full" else join_args.join_type
return self.query.join(
right_table,
join_condition,
how=join_type,
).select(~s.endswith("right"))
def get_table_or_query(self, table_args):
_table = None
if table_args.type == "table":
_table = self.get_table(table_args)
if table_args.type == "query":
_table = IbisQueryBuilder().build(
table_args.operations,
use_live_connection=self.use_live_connection,
)
if _table is None:
frappe.throw("Invalid join table")
return _table
def get_right_table(self, join_args):
right_table = self.get_table_or_query(join_args.table)
if not join_args.select_columns:
return right_table
select_columns = [col.column_name for col in join_args.select_columns]
if join_args.right_column:
select_columns.append(join_args.right_column.column_name)
if join_args.join_condition and join_args.join_condition.right_column:
select_columns.append(join_args.join_condition.right_column.column_name)
if join_args.join_condition and join_args.join_condition.join_expression:
expression = self.evaluate_expression(
join_args.join_condition.join_expression.expression,
additonal_context={
"t1": self.query,
"t2": right_table,
},
)
right_table_columns = self.get_columns_from_expression(
expression, table=join_args.table.table_name
)
select_columns.extend(right_table_columns)
return right_table.select(select_columns)
def get_columns_from_expression(
self,
expression: Expr,
table: str | None = None,
):
exp_columns = expression.op().find_topmost(Field)
if not table:
return list({col.name for col in exp_columns})
columns = set()
for col in exp_columns:
col_table = col.rel.find_topmost(DatabaseTable)[0]
if col_table and col_table.name == table:
columns.add(col.name)
return list(columns)
def translate_join_condition(self, join_args, right_table):
def left_eq_right_condition(left_column, right_column):
if (
left_column
and right_column
and left_column.column_name
and right_column.column_name
):
rt = right_table
lc = getattr(_, left_column.column_name)
rc = getattr(rt, right_column.column_name)
return lc.cast(rc.type()) == rc
frappe.throw("Join condition is not valid")
if join_args.join_condition and join_args.join_condition.join_expression:
return self.evaluate_expression(
join_args.join_condition.join_expression.expression,
{
"t1": _,
"t2": right_table,
},
)
else:
return left_eq_right_condition(
join_args.left_column or join_args.join_condition.left_column,
join_args.right_column or join_args.join_condition.right_column,
)
def apply_union(self, union_args):
other_table = self.get_table_or_query(union_args.table)
# Ensure both tables have the same columns
# Add missing columns with None values
for col, dtype in self.query.schema().items():
if col not in other_table.columns:
other_table = other_table.mutate(
**{
col: ibis.literal(None).cast(dtype).name(col),
}
)
for col, dtype in other_table.schema().items():
if col not in self.query.columns:
self.query = self.query.mutate(
**{
col: ibis.literal(None).cast(dtype).name(col),
}
)
return self.query.union(other_table, distinct=union_args.distinct)
def apply_filter(self, filter_args):
condition = self.make_filter_condition(filter_args)
return self.query.filter(condition)
def make_filter_condition(self, filter_args):
if hasattr(filter_args, "expression") and filter_args.expression:
return self.evaluate_expression(filter_args.expression.expression)
filter_column = filter_args.column
filter_operator = filter_args.operator
filter_value = filter_args.value
left = getattr(_, filter_column.column_name)
operator_fn = self.get_operator(filter_operator)
if operator_fn is None:
frappe.throw(f"Operator {filter_operator} is not supported")
right_column = (
getattr(_, filter_value.column_name)
if hasattr(filter_value, "column_name")
else None
)
right_value = right_column or filter_value
return operator_fn(left, right_value)
def get_operator(self, operator):
return {
">": lambda x, y: x > y,
"<": lambda x, y: x < y,
"=": lambda x, y: x == y,
"!=": lambda x, y: x != y,
">=": lambda x, y: x >= y,
"<=": lambda x, y: x <= y,
"in": lambda x, y: x.isin(y),
"not_in": lambda x, y: ~x.isin(y),
"is_set": lambda x, y: (x.notnull()) & (x != ""),
"is_not_set": lambda x, y: (x.isnull()) | (x == ""),
"contains": lambda x, y: x.like(y),
"not_contains": lambda x, y: ~x.like(y),
"starts_with": lambda x, y: x.like(f"{y}%"),
"ends_with": lambda x, y: x.like(f"%{y}"),
"between": lambda x, y: x.between(y[0], y[1]),
"within": lambda x, y: handle_timespan(x, y),
}[operator]
def apply_filter_group(self, filter_group_args):
filters = filter_group_args.filters
if not filters:
return self.query
logical_operator = filter_group_args.logical_operator
conditions = [self.make_filter_condition(filter) for filter in filters]
if logical_operator == "And":
return self.query.filter(ibis.and_(*conditions))
elif logical_operator == "Or":
return self.query.filter(ibis.or_(*conditions))
frappe.throw(f"Logical operator {logical_operator} is not supported")
def apply_select(self, select_args):
select_args = _dict(select_args)
return self.query.select(select_args.column_names)
def apply_rename(self, rename_args):
old_name = rename_args.column.column_name
new_name = frappe.scrub(rename_args.new_name)
return self.query.rename(**{new_name: old_name})
def apply_remove(self, remove_args):
return self.query.drop(*remove_args.column_names)
def apply_cast(self, cast_args):
col_name = cast_args.column.column_name
dtype = self.get_ibis_dtype(cast_args.data_type)
return self.query.cast({col_name: dtype})
def get_ibis_dtype(self, data_type):
return {
"String": "string",
"Integer": "int64",
"Decimal": "float64",
"Date": "date",
"Datetime": "timestamp",
"Time": "time",
"Text": "string",
}[data_type]
def apply_mutate(self, mutate_args):
new_name = frappe.scrub(mutate_args.new_name)
dtype = self.get_ibis_dtype(mutate_args.data_type)
new_column = self.evaluate_expression(mutate_args.expression.expression)
new_column = new_column.cast(dtype)
return self.query.mutate(**{new_name: new_column})
def apply_summary(self, summarize_args):
aggregates = {
frappe.scrub(measure.measure_name): self.translate_measure(measure)
for measure in summarize_args.measures
}
group_bys = [
self.translate_dimension(dimension)
for dimension in summarize_args.dimensions
]
return self.query.aggregate(**aggregates, by=group_bys)
def apply_order_by(self, order_by_args):
order_fn = ibis.asc if order_by_args.direction == "asc" else ibis.desc
return self.query.order_by(order_fn(order_by_args.column.column_name))
def apply_limit(self, limit_args):
return self.query.limit(limit_args.limit)
def apply_pivot(self, pivot_args, pivot_type):
rows = {
dimension.column_name: self.translate_dimension(dimension)
for dimension in pivot_args["rows"]
}
columns = {
dimension.column_name: self.translate_dimension(dimension)
for dimension in pivot_args["columns"]
}
values = {
frappe.scrub(measure.measure_name): self.translate_measure(measure)
for measure in pivot_args["values"]
}
if pivot_type == "wider":
names = self.query.select(columns.keys()).distinct().limit(10).execute()
return (
self.query.group_by(*rows.values(), *columns.values())
.aggregate(**values)
.filter(
ibis.or_(
*[getattr(_, col).isin(names[col]) for col in columns.keys()]
)
)
.pivot_wider(
id_cols=rows.keys(),
names_from=columns.keys(),
values_from=values.keys(),
values_agg="sum",
)
)
return self.query
def apply_custom_operation(self, operation):
return self.evaluate_expression(
operation.expression.expression,
additonal_context={
"q": self.query,
},
)
def translate_measure(self, measure):
if measure.column_name == "count" and measure.aggregation == "count":
return _.count()
if "expression" in measure:
column = self.evaluate_expression(measure.expression.expression)
dtype = self.get_ibis_dtype(measure.data_type)
measure_name = frappe.scrub(measure.measure_name)
return column.cast(dtype).name(measure_name)
column = getattr(_, measure.column_name)
return self.apply_aggregate(column, measure.aggregation)
def translate_dimension(self, dimension):
col = getattr(_, dimension.column_name)
if (
dimension.data_type in ["Date", "Time", "Datetime"]
and dimension.granularity
):
col = self.apply_granularity(col, dimension.granularity)
col = col.cast(self.get_ibis_dtype(dimension.data_type))
col = col.name(dimension.column_name)
return col
def apply_aggregate(self, column, aggregate_function):
return {
"sum": column.sum(),
"avg": column.mean(),
"count": column.count(),
"min": column.min(),
"max": column.max(),
"count_distinct": column.nunique(),
}[aggregate_function]
def apply_granularity(self, column, granularity):
if granularity == "week":
week_starts_on = 6
day_of_week = column.day_of_week.index().cast("int32")
adjusted_week_start = (day_of_week - week_starts_on + 7) % 7
week_start = column - adjusted_week_start.to_interval(unit="D")
return week_start.strftime("%Y-%m-%d").name(column.get_name())
if granularity == "quarter":
year = column.year()
quarter = column.quarter()
month = (quarter * 3) - 2
quarter_start = ibis.date(year, month, 1)
return quarter_start.strftime("%Y-%m-%d").name(column.get_name())
format_str = {
"day": "%Y-%m-%d",
"month": "%Y-%m-01",
"year": "%Y-01-01",
}
if not format_str.get(granularity):
frappe.throw(f"Granularity {granularity} is not supported")
return column.strftime(format_str[granularity]).name(column.get_name())
def evaluate_expression(self, expression, additonal_context=None):
if not expression or not expression.strip():
raise ValueError(f"Invalid expression: {expression}")
context = frappe._dict()
context.q = _
context.update(self.get_current_columns())
context.update(get_functions())
context.update(additonal_context or {})
return exec_with_return(expression, context)
def get_current_columns(self):
# TODO: handle collisions with function names
return {col: getattr(_, col) for col in self.query.schema().names}
def execute_ibis_query(
query: IbisQuery, query_name=None, limit=100, cache=False
) -> pd.DataFrame:
query = query.head(limit) if limit else query
sql = ibis.to_sql(query)
if cache and has_cached_results(sql):
return get_cached_results(sql)
start = time.monotonic()
res: pd.DataFrame = query.execute()
create_execution_log(sql, flt(time.monotonic() - start, 3), query_name)
res = res.replace({pd.NaT: None, np.nan: None})
if cache:
cache_results(sql, res)
return res
def get_columns_from_schema(schema: ibis.Schema):
return [
{
"name": col,
"type": to_insights_type(dtype),
}
for col, dtype in schema.items()
]
def to_insights_type(dtype: DataType):
if dtype.is_string():
return "String"
if dtype.is_integer():
return "Integer"
if dtype.is_floating():
return "Decimal"
if dtype.is_decimal():
return "Decimal"
if dtype.is_timestamp():
return "Datetime"
if dtype.is_date():
return "Date"
if dtype.is_time():
return "Time"
if dtype.is_boolean():
return "Boolean"
if dtype.is_uuid():
return "UUID"
frappe.throw(f"Cannot infer data type for: {dtype}")
def cache_results(sql, result: pd.DataFrame):
cache_key = make_digest(sql)
cache_key = "insights:query_results:" + cache_key
frappe.cache().set_value(cache_key, result.to_json(), expires_in_sec=3600)
def get_cached_results(sql) -> pd.DataFrame:
cache_key = make_digest(sql)
cache_key = "insights:query_results:" + cache_key
res = frappe.cache().get_value(cache_key)
return pd.read_json(StringIO(res)) if res else None
def has_cached_results(sql):
cache_key = make_digest(sql)
cache_key = "insights:query_results:" + cache_key
return frappe.cache().get_value(cache_key) is not None
def exec_with_return(
code: str,
_globals: dict | None = None,
_locals: dict | None = None,
):
a = ast.parse(code)
last_expression = None
if a.body:
if isinstance(a_last := a.body[-1], ast.Expr):
last_expression = ast.unparse(a.body.pop())
elif isinstance(a_last, ast.Assign):
last_expression = ast.unparse(a_last.targets[0])
elif isinstance(a_last, ast.AnnAssign | ast.AugAssign):
last_expression = ast.unparse(a_last.target)
_globals = _globals or {}
_locals = _locals or {}
if last_expression:
safe_exec(ast.unparse(a), _globals, _locals)
return safe_eval(last_expression, _globals, _locals)
else:
return safe_eval(code, _globals, _locals)
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/ibis_utils.py
|
Python
|
agpl-3.0
| 18,755
|
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Insights Data Source v3", {
refresh: function (frm) {},
});
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/insights_data_source_v3.js
|
JavaScript
|
agpl-3.0
| 202
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import re
import frappe
import ibis
from frappe.model.document import Document
from ibis import BaseBackend
from insights.insights.doctype.insights_data_source_v3.data_warehouse import (
WAREHOUSE_DB_NAME,
)
from insights.insights.doctype.insights_table_link_v3.insights_table_link_v3 import (
InsightsTableLinkv3,
)
from insights.insights.doctype.insights_table_v3.insights_table_v3 import (
InsightsTablev3,
)
from .connectors.duckdb import get_duckdb_connection_string
from .connectors.frappe_db import (
get_frappedb_connection_string,
get_frappedb_table_links,
get_sitedb_connection_string,
is_frappe_db,
)
from .connectors.mariadb import get_mariadb_connection_string
from .connectors.postgresql import get_postgres_connection_string
from .connectors.sqlite import get_sqlite_connection_string
class InsightsDataSourceDocument:
def autoname(self):
self.name = frappe.scrub(self.title)
def before_insert(self):
if self.name == WAREHOUSE_DB_NAME:
frappe.throw("Cannot create a Data Source with this name")
if (
not frappe.flags.in_migrate
and self.is_site_db
and frappe.db.exists("Insights Data Source v3", {"is_site_db": 1})
):
frappe.throw("Only one site database can be configured")
def on_update(self: "InsightsDataSourcev3"):
if self.is_site_db and not self.is_frappe_db:
self.db_set("is_frappe_db", 1)
credentials_changed = self.has_credentials_changed()
if (
not self.is_site_db
and credentials_changed
and self.database_type in ["MariaDB", "PostgreSQL"]
):
self.db_set("is_frappe_db", is_frappe_db(self))
self.status = "Active" if self.test_connection() else "Inactive"
self.db_set("status", self.status)
if self.status == "Active" and credentials_changed:
self.update_table_list()
def has_credentials_changed(self):
doc_before = self.get_doc_before_save()
if not doc_before:
return True
return (
self.database_name != doc_before.database_name
or self.password != doc_before.password
or self.username != doc_before.username
or self.host != doc_before.host
or self.port != doc_before.port
or self.use_ssl != doc_before.use_ssl
)
def on_trash(self):
if self.is_site_db:
frappe.throw("Cannot delete the site database. It is needed for Insights.")
linked_doctypes = ["Insights Table v3", "Insights Table Link v3"]
for doctype in linked_doctypes:
for name in frappe.db.get_all(
doctype,
{"data_source": self.name},
pluck="name",
):
frappe.delete_doc(doctype, name)
def validate(self):
if self.is_site_db:
return
if self.database_type == "SQLite" or self.database_type == "DuckDB":
self.validate_database_name()
else:
self.validate_remote_db_fields()
def validate_database_name(self):
mandatory = ("database_name",)
for field in mandatory:
if not self.get(field):
frappe.throw(f"{field} is mandatory for {self.database_type} Database")
def validate_remote_db_fields(self):
if self.connection_string:
return
mandatory = ("host", "port", "username", "password", "database_name")
for field in mandatory:
if not self.get(field):
frappe.throw(f"{field} is mandatory for Database")
class InsightsDataSourcev3(InsightsDataSourceDocument, Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
connection_string: DF.Text | None
database_name: DF.Data | None
database_type: DF.Literal["MariaDB", "PostgreSQL", "SQLite", "DuckDB"]
host: DF.Data | None
is_frappe_db: DF.Check
is_site_db: DF.Check
password: DF.Password | None
port: DF.Int
status: DF.Literal["Inactive", "Active"]
title: DF.Data
use_ssl: DF.Check
username: DF.Data | None
# end: auto-generated types
def _get_ibis_backend(self) -> BaseBackend:
if self.name in frappe.local.insights_db_connections:
return frappe.local.insights_db_connections[self.name]
connection_string = self._get_connection_string()
db: BaseBackend = ibis.connect(connection_string)
print(f"Connected to {self.name} ({self.title})")
if self.database_type == "MariaDB":
db.raw_sql("SET SESSION time_zone='+00:00'")
db.raw_sql("SET collation_connection = 'utf8mb4_unicode_ci'")
frappe.local.insights_db_connections[self.name] = db
return db
def _get_connection_string(self):
if self.is_site_db:
return get_sitedb_connection_string()
if self.database_type == "SQLite":
return get_sqlite_connection_string(self)
if self.database_type == "DuckDB":
return get_duckdb_connection_string(self)
if self.is_frappe_db:
return get_frappedb_connection_string(self)
if self.database_type == "MariaDB":
return get_mariadb_connection_string(self)
if self.database_type == "PostgreSQL":
return get_postgres_connection_string(self)
frappe.throw(f"Unsupported database type: {self.database_type}")
def test_connection(self, raise_exception=False):
try:
db = self._get_ibis_backend()
res = db.raw_sql("SELECT 1").fetchall()
return res[0][0] == 1
except Exception as e:
frappe.log_error("Testing Data Source connection failed", e)
if raise_exception:
raise e
def update_table_list(self, force=False):
blacklist_patterns = ["^_", "^sqlite_"]
blacklisted = lambda table: any(re.match(p, table) for p in blacklist_patterns)
remote_db = self._get_ibis_backend()
tables = remote_db.list_tables()
tables = [t for t in tables if not blacklisted(t)]
if force:
frappe.db.delete(
"Insights Table v3",
{"data_source": self.name},
)
if not tables or len(tables) == frappe.db.count(
"Insights Table v3",
{"data_source": self.name},
):
print("No new tables to sync")
return
InsightsTablev3.bulk_create(self.name, tables)
self.update_table_links(force)
def update_table_links(self, force=False):
links = []
if self.is_site_db or self.is_frappe_db:
links = get_frappedb_table_links(self)
if force:
frappe.db.delete(
"Insights Table Link v3",
{"data_source": self.name},
)
for link in links:
InsightsTableLinkv3.create(
self.name,
link.left_table,
link.right_table,
link.left_column,
link.right_column,
)
def before_request():
if not hasattr(frappe.local, "insights_db_connections"):
frappe.local.insights_db_connections = {}
def after_request():
for db in frappe.local.insights_db_connections.values():
catch_error(db.disconnect)
def catch_error(fn):
try:
return fn(), None
except Exception as e:
print(f"Error: {e}")
return None, e
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/insights_data_source_v3.py
|
Python
|
agpl-3.0
| 7,911
|
import frappe
def execute():
"""copy_data_sources from Insights Data Source to Insights Data Source v3"""
data_sources = frappe.get_all(
"Insights Data Source",
filters={"name": ["not in", ["Query Store", "Site DB"]]},
pluck="name",
)
frappe.db.delete("Insights Data Source v3")
for data_source in data_sources:
try:
data_source_doc = frappe.get_doc("Insights Data Source", data_source)
data_source_v3 = frappe.get_doc(
{
"doctype": "Insights Data Source v3",
"creation": data_source_doc.creation,
"title": data_source_doc.title,
"database_type": data_source_doc.database_type,
"database_name": data_source_doc.database_name,
"username": data_source_doc.username,
"password": data_source_doc.get_password(raise_exception=False),
"host": data_source_doc.host,
"port": data_source_doc.port,
"use_ssl": data_source_doc.use_ssl,
"connection_string": data_source_doc.connection_string,
}
)
data_source_v3.insert()
except Exception as e:
print(f"Error copying {data_source}: {e}")
|
2302_79757062/insights
|
insights/insights/doctype/insights_data_source_v3/patches/copy_data_sources.py
|
Python
|
agpl-3.0
| 1,350
|
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("Insights Notebook", {
// refresh(frm) {
// },
// });
|
2302_79757062/insights
|
insights/insights/doctype/insights_notebook/insights_notebook.js
|
JavaScript
|
agpl-3.0
| 200
|
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.model.document import Document
class InsightsNotebook(Document):
def on_trash(self):
if self.name == "Uncategorized":
frappe.throw("Cannot delete the default notebook")
|
2302_79757062/insights
|
insights/insights/doctype/insights_notebook/insights_notebook.py
|
Python
|
agpl-3.0
| 341
|
// Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
// frappe.ui.form.on("Insights Notebook Page", {
// refresh(frm) {
// },
// });
|
2302_79757062/insights
|
insights/insights/doctype/insights_notebook_page/insights_notebook_page.js
|
JavaScript
|
agpl-3.0
| 205
|
# Copyright (c) 2023, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class InsightsNotebookPage(Document):
pass
|
2302_79757062/insights
|
insights/insights/doctype/insights_notebook_page/insights_notebook_page.py
|
Python
|
agpl-3.0
| 228
|
import frappe
from pypika.terms import CustomFunction
def execute():
if not frappe.db.count("Insights Notebook Page", {"content": ["like", '%"query-builder"%']}):
return
pages = frappe.get_all(
"Insights Notebook Page",
filters={"content": ["like", '%"query-builder"%']},
pluck="name",
)
for page in pages:
content = frappe.get_value("Insights Notebook Page", page, "content")
content = content.replace('"query-builder"', '"query-editor"')
frappe.db.set_value(
"Insights Notebook Page", page, "content", content, update_modified=False
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_notebook_page/patches/replace_query_builder_with_editor.py
|
Python
|
agpl-3.0
| 632
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from insights.insights.doctype.insights_data_source.sources.query_store import (
sync_query_store,
)
from insights.utils import InsightsDataSource, InsightsQuery, InsightsTable
from .utils import (
BaseNestedQueryImporter,
Column,
Query,
apply_cumulative_sum,
get_columns_with_inferred_types,
update_sql,
)
DEFAULT_JSON = {
"table": {},
"joins": [],
"columns": [],
"calculations": [],
"filters": [],
"measures": [],
"dimensions": [],
"orders": [],
"limit": None,
}
class InsightsAssistedQueryController:
def __init__(self, doc):
self.doc = doc
def validate(self):
if not frappe.parse_json(self.doc.json):
self.doc.json = frappe.as_json(DEFAULT_JSON)
def validate_if_all_column_tables_are_selected(self):
columns = self.query_json.get_columns()
tables = self.query_json.get_tables()
for column in columns:
if (
not column.is_valid()
or column.is_expression()
or column.get("column") == "count"
):
continue
if column.get("table") not in tables:
frappe.throw(
msg=f"Table {column.get('table')} for column {column.get('label')} not selected. Add the table to the query and try again.",
title="Missing Column Table",
)
def before_save(self):
update_sql(self.doc)
self.doc.json = frappe.as_json(self.query_json)
@property
def query_json(self):
query = frappe.parse_json(self.doc.json)
return Query(**query)
def get_columns_from_results(self, results):
if not results:
return []
query_columns = self.query_json.get_columns()
inferred_column_types = get_columns_with_inferred_types(results)
if not query_columns:
return inferred_column_types
def get_inferred_column_type(result_column):
for ic in inferred_column_types:
if ic.get("label") == result_column.get("label"):
return ic.get("type")
return "String"
def add_format_options(result_column):
result_column["format_options"] = {}
result_column["type"] = get_inferred_column_type(result_column)
for qc in query_columns:
label_matches = qc.get("label") == result_column.get("label")
alias_matches = qc.get("alias") == result_column.get("label")
if not label_matches and not alias_matches:
continue
result_column["label"] = qc.get("alias") or qc.get("label")
# temporary fix until we change format_options in result columns from dict to str
result_column["format_options"] = {"date_format": qc.get("granularity")}
result_column["type"] = qc.get("type")
break
return frappe._dict(result_column)
result_columns = results[0]
return [add_format_options(rc) for rc in result_columns]
def get_tables_columns(self):
columns = []
selected_tables = self.get_selected_tables()
selected_tables_names = [t.get("table") for t in selected_tables]
for table in set(selected_tables_names):
table_doc = InsightsTable.get_doc(
data_source=self.doc.data_source, table=table
)
table_columns = table_doc.get_columns()
columns += [
frappe._dict(
{
**Column(**c.as_dict()),
"data_source": self.doc.data_source,
"table_label": table_doc.label,
"table": table_doc.table,
}
)
for c in table_columns
]
return columns
def get_selected_tables(self):
if not self.query_json.table:
return []
tables = [self.query_json.table]
join_tables = [join.right_table for join in self.query_json.joins]
return tables + join_tables
def before_fetch(self):
update_sql(self.doc)
self.validate_if_all_column_tables_are_selected()
if self.doc.data_source != "Query Store":
return
sub_queries = [
t.get("table")
for t in self.get_selected_tables()
if t.get("table") != self.doc.name
]
sync_query_store(sub_queries)
def after_fetch(self, results):
if not self.has_cumulative_columns():
return results
columns = [
col
for col in self.query_json.get_columns()
if col.aggregation and "cumulative" in col.aggregation.lower()
]
return apply_cumulative_sum(columns, results)
def has_cumulative_columns(self):
return any(
col.aggregation and "cumulative" in col.aggregation.lower()
for col in self.query_json.get_columns()
)
def fetch_results(self, additional_filters=None):
query = self.doc
if additional_filters:
query = self.apply_additional_filters(additional_filters)
return InsightsDataSource.get_doc(self.doc.data_source).run_query(query)
def apply_additional_filters(self, additional_filters):
query_json = self.query_json
for filter in additional_filters:
column = filter.get("column")
value = filter.get("value")
operator = filter.get("operator")
query_json.add_filter(column, operator, value)
self.doc.json = frappe.as_json(query_json)
return self.doc
def export_query(self):
subqueries = frappe.get_all(
"Insights Table",
filters={
"table": ["in", self.query_json.get_tables()],
"is_query_based": 1,
},
pluck="table",
)
dependencies = {}
for subquery in subqueries:
if subquery in dependencies:
continue
query = InsightsQuery.get_doc(subquery)
dependencies[query.name] = frappe.parse_json(query.export())
return {"query": self.query_json, "subqueries": dependencies}
def import_query(self, exported_query):
return AssistedQueryImporter(exported_query, self.doc).import_query()
class AssistedQueryImporter(BaseNestedQueryImporter):
def _update_doc(self):
self.doc.json = frappe.as_json(self.data.query)
def _update_subquery_references(self):
for old_name, new_name in self.imported_queries.items():
self._rename_subquery_in_table(old_name, new_name)
self._rename_subquery_in_joins(old_name, new_name)
self._rename_subquery_in_columns(old_name, new_name)
self._rename_subquery_in_filters(old_name, new_name)
self._rename_subquery_in_calculations(old_name, new_name)
self._rename_subquery_in_measures(old_name, new_name)
self._rename_subquery_in_dimensions(old_name, new_name)
self._rename_subquery_in_orders(old_name, new_name)
def _rename_subquery_in_table(self, old_name, new_name):
if self.data.query["table"]["table"] == old_name:
self.data.query["table"]["table"] = new_name
def _rename_subquery_in_joins(self, old_name, new_name):
for join in self.data.query["joins"]:
if join["left_table"]["table"] == old_name:
join["left_table"]["table"] = new_name
if join["right_table"]["table"] == old_name:
join["right_table"]["table"] = new_name
if join["left_column"]["table"] == old_name:
join["left_column"]["table"] = new_name
if join["right_column"]["table"] == old_name:
join["right_column"]["table"] = new_name
def _rename_subquery_in_columns(self, old_name, new_name):
for column in self.data.query["columns"]:
if column["table"] == old_name:
column["table"] = new_name
def _rename_subquery_in_filters(self, old_name, new_name):
for filter in self.data.query["filters"]:
if filter["column"]["table"] == old_name:
filter["column"]["table"] = new_name
def _rename_subquery_in_calculations(self, old_name, new_name):
for calculation in self.data.query["calculations"]:
if calculation["table"] == old_name:
calculation["table"] = new_name
def _rename_subquery_in_measures(self, old_name, new_name):
for measure in self.data.query["measures"]:
if measure["table"] == old_name:
measure["table"] = new_name
def _rename_subquery_in_dimensions(self, old_name, new_name):
for dimension in self.data.query["dimensions"]:
if dimension["table"] == old_name:
dimension["table"] = new_name
def _rename_subquery_in_orders(self, old_name, new_name):
for order in self.data.query["orders"]:
if order["table"] == old_name:
order["table"] = new_name
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_assisted_query.py
|
Python
|
agpl-3.0
| 9,395
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from copy import deepcopy
from json import dumps
import frappe
from frappe.utils.data import cstr
from insights.api.data_sources import fetch_column_values, get_tables
from insights.utils import InsightsDataSource, InsightsQuery, InsightsTable
from ..insights_data_source.sources.query_store import sync_query_store
from .insights_legacy_query_utils import (
convert_into_simple_filter,
convert_to_expression,
)
from .utils import (
BaseNestedQueryImporter,
apply_cumulative_sum,
get_columns_with_inferred_types,
update_sql,
)
DEFAULT_FILTERS = dumps(
{
"type": "LogicalExpression",
"operator": "&&",
"level": 1,
"position": 1,
"conditions": [],
},
indent=2,
)
class InsightsLegacyQueryClient:
@frappe.whitelist()
def add_table(self, table):
new_table = {
"label": table.get("label"),
"table": table.get("table"),
}
self.append("tables", new_table)
self.save()
@frappe.whitelist()
def update_table(self, table):
for row in self.tables:
if row.get("name") != table.get("name"):
continue
if table.get("join"):
row.join = dumps(
table.get("join"),
default=cstr,
indent=2,
)
else:
row.join = ""
self.save()
return
@frappe.whitelist()
def remove_table(self, table):
for row in self.tables:
if row.get("name") == table.get("name"):
self.remove(row)
break
self.save()
@frappe.whitelist()
def add_column(self, column):
new_column = {
"type": column.get("type"),
"label": column.get("label"),
"table": column.get("table"),
"column": column.get("column"),
"table_label": column.get("table_label"),
"aggregation": column.get("aggregation"),
"is_expression": column.get("is_expression"),
"expression": dumps(column.get("expression"), indent=2),
"format_option": dumps(column.get("format_option"), indent=2),
}
self.append("columns", new_column)
self.save()
@frappe.whitelist()
def move_column(self, from_index, to_index):
self.columns.insert(to_index, self.columns.pop(from_index))
for row in self.columns:
row.idx = self.columns.index(row) + 1
self.save()
@frappe.whitelist()
def update_column(self, column):
for row in self.columns:
if row.get("name") == column.get("name"):
row.type = column.get("type")
row.label = column.get("label")
row.table = column.get("table")
row.column = column.get("column")
row.order_by = column.get("order_by")
row.aggregation = column.get("aggregation")
row.table_label = column.get("table_label")
row.aggregation_condition = column.get("aggregation_condition")
format_option = column.get("format_option")
if format_option:
# check if format option is an object
row.format_option = (
dumps(format_option, indent=2)
if isinstance(format_option, dict)
else format_option
)
expression = column.get("expression")
if expression:
# check if expression is an object
row.expression = (
dumps(expression, indent=2)
if isinstance(expression, dict)
else expression
)
break
self.save()
@frappe.whitelist()
def remove_column(self, column):
for row in self.columns:
if row.get("name") == column.get("name"):
self.remove(row)
break
self.save()
@frappe.whitelist()
def update_filters(self, filters):
sanitized_conditions = self.sanitize_conditions(filters.get("conditions"))
filters["conditions"] = sanitized_conditions or []
self.filters = dumps(filters, indent=2, default=cstr)
self.save()
def sanitize_conditions(self, conditions):
if not conditions:
return
_conditions = deepcopy(conditions)
for idx, condition in enumerate(_conditions):
if "conditions" not in condition:
# TODO: validate if condition is valid
continue
sanitized_conditions = self.sanitize_conditions(condition.get("conditions"))
if sanitized_conditions:
conditions[idx]["conditions"] = sanitized_conditions
else:
# remove the condition if it has zero conditions
conditions.remove(condition)
return conditions
@frappe.whitelist()
def fetch_tables(self):
with_query_tables = frappe.db.get_single_value(
"Insights Settings", "allow_subquery"
)
return get_tables(self.data_source, with_query_tables)
@frappe.whitelist()
def fetch_columns(self):
return self.variant_controller.get_tables_columns()
@frappe.whitelist()
def fetch_column_values(self, column, search_text=None):
return fetch_column_values(
column.get("data_source") or self.data_source,
column.get("table"),
column.get("column"),
search_text,
)
@frappe.whitelist()
def fetch_join_options(self, left_table, right_table):
left_doc = frappe.get_cached_doc(
"Insights Table",
{
"table": left_table,
"data_source": self.data_source,
},
)
right_doc = frappe.get_cached_doc(
"Insights Table",
{
"table": right_table,
"data_source": self.data_source,
},
)
links = []
for link in left_doc.table_links:
if link.foreign_table == right_table:
links.append(
frappe._dict(
{
"left": link.primary_key,
"right": link.foreign_key,
}
)
)
return {
"left_columns": left_doc.get_columns(),
"right_columns": right_doc.get_columns(),
"saved_links": links,
}
class InsightsLegacyQueryValidation:
def validate(self):
self.validate_tables()
self.validate_limit()
self.validate_filters()
self.validate_columns()
def validate_tables(self):
tables = [row.table for row in self.doc.tables]
tables = frappe.get_all(
"Insights Table",
filters={"name": ("in", tables)},
fields=["table", "data_source", "hidden"],
)
for table in tables:
if table.hidden:
frappe.throw(f"Table {table.table} is hidden. You cannot query it")
if table.data_source != self.doc.data_source:
frappe.throw(f"Table {table.table} is not in the same data source")
def validate_limit(self):
if self.doc.limit and self.doc.limit < 1:
frappe.throw("Limit must be greater than 0")
def validate_filters(self):
if not self.doc.filters:
self.filters = DEFAULT_FILTERS
def validate_columns(self):
if frappe.flags.in_test:
return
# check if no duplicate labelled columns
labels = []
for row in self.doc.columns:
if row.label and row.label in labels:
frappe.throw(f"Duplicate Column {row.label}")
labels.append(row.label)
class InsightsLegacyQueryController(InsightsLegacyQueryValidation):
def __init__(self, doc):
self.doc = doc
def before_save(self):
update_sql(self.doc)
def after_reset(self):
self.doc.filters = DEFAULT_FILTERS
def get_columns_from_results(self, results):
if not results:
return []
query_columns = self.doc.columns
inferred_column_types = get_columns_with_inferred_types(results)
if not query_columns:
return inferred_column_types
def get_inferred_column_type(result_column):
for ic in inferred_column_types:
if ic.get("label") == result_column.get("label"):
return ic.get("type")
return "String"
def add_format_options(result_column):
result_column["format_options"] = {}
result_column["type"] = get_inferred_column_type(result_column)
for qc in query_columns:
label_matches = qc.get("label") == result_column.get("label")
column_matches = qc.get("column") == result_column.get("label")
if not label_matches and not column_matches:
continue
result_column["format_options"] = qc.get("format_option")
result_column["type"] = qc.get("type")
break
return frappe._dict(result_column)
result_columns = results[0]
return [add_format_options(rc) for rc in result_columns]
def get_tables_columns(self):
columns = []
selected_tables = self.get_selected_tables()
for table in selected_tables:
table_doc = InsightsTable.get_doc(
data_source=self.doc.data_source,
table=table.table,
)
table_columns = table_doc.get_columns()
columns += [
frappe._dict(
{
"data_source": self.doc.data_source,
"table_label": table.get("label"),
"table": table.get("table"),
"column": c.get("column"),
"label": c.get("label"),
"type": c.get("type"),
}
)
for c in table_columns
]
return columns
def get_selected_tables(self):
join_tables = []
for table in self.doc.tables:
if table.join:
join = frappe.parse_json(table.join)
join_tables.append(
frappe._dict(
table=join.get("with").get("value"),
label=join.get("with").get("label"),
)
)
return self.doc.tables + join_tables
def before_fetch(self):
if self.doc.data_source != "Query Store":
return
sub_stored_queries = [
t.table for t in self.get_selected_tables() if t.table != self.doc.name
]
sync_query_store(sub_stored_queries)
def after_fetch(self, results):
if not self.has_cumulative_columns():
return results
columns = [
col
for col in self.doc.columns
if col.aggregation and "Cumulative" in col.aggregation
]
return apply_cumulative_sum(columns, results)
def has_cumulative_columns(self):
return any(
col.aggregation and "Cumulative" in col.aggregation
for col in self.doc.columns
)
def fetch_results(self, additional_filters=None):
query = self.doc
if additional_filters:
query = self.apply_additional_filters(additional_filters)
return InsightsDataSource.get_doc(self.doc.data_source).run_query(query)
def apply_additional_filters(self, additional_filters):
filter_conditions = []
for chart_filter in additional_filters:
chart_filter = frappe._dict(chart_filter)
filter_conditions.append(
convert_to_expression(
chart_filter.column.get("table"),
chart_filter.column.get("column"),
chart_filter.operator,
chart_filter.value,
chart_filter.column_type,
)
)
filters = frappe.parse_json(self.doc.filters)
new_filters = frappe.parse_json(self.doc.filters)
for new_filter in filter_conditions:
found = False
# TODO: FIX: additional_filters was simple filter, got converted to expression, then again converted to simple filter
if new_simple_filter := convert_into_simple_filter(new_filter):
for index, exisiting_filter in enumerate(filters.conditions):
existing_simple_filter = convert_into_simple_filter(
exisiting_filter
)
if not existing_simple_filter:
continue
if existing_simple_filter["column"] == new_simple_filter["column"]:
new_filters.conditions[index] = new_filter
found = True
break
if not found:
new_filters.conditions.append(new_filter)
self.doc.filters = dumps(new_filters, indent=2)
return self.doc
def export_query(self):
selected_tables = self.get_selected_tables()
selected_table_names = [table.table for table in selected_tables]
subqueries = frappe.get_all(
"Insights Table",
filters={
"table": ["in", selected_table_names],
"is_query_based": 1,
},
pluck="table",
)
dependencies = {}
for subquery in subqueries:
if subquery in dependencies:
continue
query = InsightsQuery.get_doc(subquery)
dependencies[query.name] = frappe.parse_json(query.export())
query_dict = self.doc.as_dict()
return {
"query": {
"tables": query_dict["tables"],
"columns": query_dict["columns"],
"filters": query_dict["filters"],
"limit": query_dict["limit"],
},
"subqueries": dependencies,
}
def import_query(self, exported_query):
return LegacyQueryImporter(exported_query, self.doc).import_query()
class LegacyQueryImporter(BaseNestedQueryImporter):
def _update_doc(self):
self.doc.set("tables", self.data.query["tables"])
self.doc.set("columns", self.data.query["columns"])
self.doc.set("filters", self.data.query["filters"])
self.doc.set("limit", self.data.query["limit"])
def _update_subquery_references(self):
for old_name, new_name in self.imported_queries.items():
self._rename_subquery_in_table(old_name, new_name)
self._rename_subquery_in_joins(old_name, new_name)
self._rename_subquery_in_columns(old_name, new_name)
self._rename_subquery_in_filters(old_name, new_name)
def _rename_subquery_in_table(self, old_name, new_name):
for table in self.data.query["tables"]:
if table["table"] == old_name:
table["table"] = new_name
def _rename_subquery_in_joins(self, old_name, new_name):
for table in self.data.query["tables"]:
if not table["join"]:
continue
join = frappe.parse_json(table["join"])
if join["with"]["value"] == old_name:
join["with"]["value"] = new_name
join["with"]["table"] = new_name
table["join"] = dumps(join, indent=2)
def _rename_subquery_in_columns(self, old_name, new_name):
for column in self.data.query["columns"]:
if column["table"] == old_name:
column["table"] = new_name
def _rename_subquery_in_filters(self, old_name, new_name):
# do a hacky string replace for now
self.data.query["filters"] = self.data.query["filters"].replace(
old_name, new_name
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_legacy_query.py
|
Python
|
agpl-3.0
| 16,498
|
"""
utitilies to help convert filters to expressions
"""
BINARY_OPERATORS = [
"=",
"!=",
"<",
">",
"<=",
">=",
]
FUNCTION_OPERATORS = [
"is",
"in",
"not_in",
"between",
"timespan",
"starts_with",
"ends_with",
"contains",
"not_contains",
]
def convert_to_expression(table, column, filter_operator, filter_value, value_type):
if filter_operator in BINARY_OPERATORS:
return make_binary_expression(
table, column, filter_operator, filter_value, value_type
)
if filter_operator in FUNCTION_OPERATORS:
return make_call_expression(
table, column, filter_operator, filter_value, value_type
)
def make_binary_expression(table, column, filter_operator, filter_value, value_type):
return {
"type": "BinaryExpression",
"operator": filter_operator,
"left": {
"type": "Column",
"value": {
"column": column,
"table": table,
},
},
"right": {
"type": "Number" if value_type in ("Integer", "Decimal") else "String",
"value": filter_value,
},
}
def make_call_expression(table, column, filter_operator, filter_value, value_type):
operator_function = filter_operator
if filter_operator == "is":
operator_function = "is_set" if filter_value == "set" else "is_not_set"
return {
"type": "CallExpression",
"function": operator_function,
"arguments": [
{
"type": "Column",
"value": {
"column": column,
"table": table,
},
},
*make_args_for_call_expression(operator_function, filter_value, value_type),
],
}
def make_args_for_call_expression(operator_function, filter_value, value_type):
if operator_function == "is":
return []
if operator_function == "between":
values = [v.strip() for v in filter_value.split(",")]
return [
{
"type": "Number" if value_type == "Number" else "String",
"value": v,
}
for v in values
]
if operator_function in ["in", "not_in"]:
return [{"type": "String", "value": v} for v in filter_value]
return [
{
"type": "Number" if value_type == "Number" else "String",
"value": filter_value,
}
]
def is_string_or_number(arg):
return arg.get("type") == "String" or arg.get("type") == "Number"
def is_simple_filter(condition):
return (
condition.get("type") == "BinaryExpression"
and condition.get("left").get("type") == "Column"
and is_string_or_number(condition.get("right"))
) or (
condition.get("type") == "CallExpression"
and condition.get("arguments")[0].get("type") == "Column"
and all(is_string_or_number(arg) for arg in condition.get("arguments")[1:])
)
def convert_into_simple_filter(expression):
if not expression:
return
if not is_simple_filter(expression):
print("Not a simple filter")
return
if is_binary_operator(expression.get("operator")):
column = expression.get("left").get("value")
operator = expression.get("operator")
value = expression.get("right").get("value")
return {"column": column, "operator": operator, "value": value}
if is_call_function(expression.get("function")):
column = expression.get("arguments")[0].get("value")
operator = get_operator_from_call_function(expression.get("function"))
label, value = make_value_from_call_function(expression)
return {"column": column, "operator": operator, "value": value}
FILTER_FUNCTIONS = {
"is": "is",
"in": "one of",
"not_in": "not one of",
"between": "between",
"timespan": "within",
"starts_with": "starts with",
"ends_with": "ends with",
"contains": "contains",
"not_contains": "not contains",
}
def get_operator_from_call_function(function_name):
if FILTER_FUNCTIONS.get(function_name):
return function_name
if "set" in function_name:
return "is"
return None
def is_binary_operator(operator):
if not operator:
return False
return operator in BINARY_OPERATORS
def is_call_function(function_name):
if not function_name:
return False
return bool(FILTER_FUNCTIONS.get(get_operator_from_call_function(function_name)))
def make_value_from_call_function(expression):
if expression.get("function") == "is_set":
return ["Set", "Set"]
if expression.get("function") == "is_not_set":
return ["Not Set", "Not Set"]
if expression.get("function") == "between":
value = (
expression.get("arguments")[1].get("value")
+ ", "
+ expression.get("arguments")[2].get("value")
)
return [value, value]
if expression.get("function") in ["in", "not_in"]:
values = [a.get("value") for a in expression.get("arguments")[1:]]
label = str(len(values)) + " values" if len(values) > 1 else values[0]
return [label, values]
value = expression.get("arguments")[1].get("value")
return [value, value]
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_legacy_query_utils.py
|
Python
|
agpl-3.0
| 5,376
|
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Insights Query", {
refresh(frm) {
if (frm.doc.status == "Pending Execution") {
frm.add_custom_button(__("Run"), () => {
frm.call({
method: "run",
doc: frm.doc,
callback: (r) => {
frm.reload_doc();
},
});
});
}
},
});
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_query.js
|
JavaScript
|
agpl-3.0
| 404
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import time
from contextlib import suppress
from functools import cached_property
import frappe
from frappe import _dict
from frappe.model.document import Document
from frappe.utils import flt
from insights.decorators import log_error
from insights.insights.doctype.insights_data_source.sources.utils import (
create_insights_table,
)
from insights.utils import (
InsightsChart,
InsightsQueryResult,
InsightsTable,
ResultColumn,
)
from ..insights_data_source.sources.query_store import store_query
from ..insights_table_column.insights_table_column import InsightsTableColumn
from .insights_assisted_query import InsightsAssistedQueryController
from .insights_legacy_query import (
InsightsLegacyQueryClient,
InsightsLegacyQueryController,
)
from .insights_query_client import InsightsQueryClient
from .insights_raw_query import InsightsRawQueryController
from .insights_script_query import InsightsScriptQueryController
from .utils import (
QueryStatus,
apply_cumulative_sum,
apply_pivot_transform,
apply_transpose_transform,
apply_unpivot_transform,
export_query,
)
class InsightsQuery(InsightsLegacyQueryClient, InsightsQueryClient, Document):
def before_validate(self):
if not self.title and self.name:
self.title = self.name.replace("-", " ").replace("QRY", "Query")
def after_insert(self):
self.link_chart()
def before_save(self):
self.variant_controller.before_save()
def on_update(self):
self.link_chart()
self.update_linked_docs()
if self.is_stored and self.has_value_changed("is_stored"):
store_query(self, self.retrieve_results())
def link_chart(self):
chart_name = InsightsChart.get_name(query=self.name)
if not chart_name:
self.create_default_chart()
if not self.chart and chart_name:
self.db_set("chart", chart_name)
def on_trash(self):
self.delete_default_chart()
self.delete_query_results()
self.delete_insights_table()
@property
def is_saved_as_table(self):
table_name = frappe.db.exists(
"Insights Table", {"table": self.name, "is_query_based": 1}
)
return bool(table_name)
@property
def result_name(self):
query_result = InsightsQueryResult.get_name(query=self.name)
if not query_result:
InsightsQueryResult.new_doc(query=self.name).db_insert()
query_result = InsightsQueryResult.get_name(query=self.name)
return query_result
@property
def results_row_count(self):
return InsightsQueryResult.get_value(
filters={"query": self.name}, fieldname="results_row_count"
)
@cached_property
def variant_controller(self):
if self.is_native_query:
return InsightsRawQueryController(self)
if self.is_assisted_query:
return InsightsAssistedQueryController(self)
if self.is_script_query:
return InsightsScriptQueryController(self)
return InsightsLegacyQueryController(self)
def validate(self):
self.variant_controller.validate()
def reset(self):
new_query = frappe.new_doc("Insights Query")
new_query.name = self.name
new_query.title = self.name.replace("-", " ").replace("QRY", "Query")
new_query.data_source = self.data_source
new_query.is_native_query = self.is_native_query
new_query.is_assisted_query = self.is_assisted_query
new_query.is_script_query = self.is_script_query
new_query_dict = new_query.as_dict(no_default_fields=True)
self.update(new_query_dict)
self.status = QueryStatus.SUCCESS.value
self.update_query_results([])
self.after_reset()
def after_reset(self):
if hasattr(self.variant_controller, "after_reset"):
self.variant_controller.after_reset()
def create_default_chart(self):
chart = frappe.new_doc("Insights Chart")
chart.query = self.name
chart.save(ignore_permissions=True)
self.db_set("chart", chart.name, update_modified=False)
return chart
def update_query_based_table(self):
with suppress(Exception):
create_insights_table(self.make_table())
def make_table(self):
return _dict(
table=self.name,
label=self.title,
is_query_based=1,
data_source=self.data_source,
columns=InsightsTableColumn.from_dicts(
self.get_columns(),
),
table_links=[],
)
def get_columns(self):
return self.get_columns_from_results(self.retrieve_results())
def update_linked_docs(self):
old_self = self.get("_doc_before_save")
old_title = old_self.title if old_self else None
if not old_title or old_title == self.title:
return
table = frappe.qb.DocType("Insights Table")
_ = (
frappe.qb.update(table)
.set(table.label, self.title)
.where(table.table == self.name)
.run()
)
def delete_insights_table(self):
table_name = InsightsTable.get_name(table=self.name)
frappe.delete_doc_if_exists("Insights Table", table_name)
def delete_default_chart(self):
frappe.db.delete("Insights Chart", {"query": self.name})
def delete_query_results(self):
InsightsQueryResult.delete_doc(self.result_name)
def retrieve_results(self, fetch_if_not_cached=False):
if hasattr(self, "_results"):
return self._results
if not InsightsQueryResult.exists(query=self.name):
if fetch_if_not_cached:
return self.fetch_results()
return []
query_result = InsightsQueryResult.get_doc(query=self.name)
return frappe.parse_json(query_result.results)
def fetch_results(self, additional_filters=None):
self.before_fetch()
self._results = []
start = time.monotonic()
try:
self._results = self.variant_controller.fetch_results(additional_filters)
self._results = self.after_fetch(self._results)
self._results = self.process_results_columns(self._results)
self.db_set(
{
"status": QueryStatus.SUCCESS.value,
"execution_time": flt(time.monotonic() - start, 3),
"last_execution": frappe.utils.now(),
},
update_modified=False,
commit=True,
)
except Exception as e:
self._results = []
frappe.db.rollback()
frappe.log_error(str(e)[:140])
self.db_set("status", QueryStatus.FAILED.value, commit=True)
raise
finally:
# custom results for dashboard is cached by dashboard
if not additional_filters:
self.update_query_results(self._results)
self.update_query_based_table()
self.is_stored and store_query(self, self._results)
return self._results
def update_query_results(self, results=None):
results = results or []
query_result: Document = InsightsQueryResult.get_or_create_doc(query=self.name)
query_result.update(
{
"results": frappe.as_json(results),
"results_row_count": len(results) - 1,
}
)
with suppress(frappe.exceptions.UniqueValidationError):
query_result.db_update()
def before_fetch(self):
if hasattr(self.variant_controller, "before_fetch"):
self.variant_controller.before_fetch()
@log_error(raise_exc=True)
def process_results_columns(self, results):
if not results:
return results
results[0] = ResultColumn.from_dicts(self.get_columns_from_results(results))
return results
def get_columns_from_results(self, results):
return self.variant_controller.get_columns_from_results(results)
def after_fetch(self, results):
if self.transforms:
results = self.apply_transforms(results)
results = self.variant_controller.after_fetch(results)
return results
def apply_transforms(self, results):
self.validate_transforms()
for transform in self.transforms:
if transform.type == "Pivot":
return apply_pivot_transform(results, transform.options)
if transform.type == "Unpivot":
return apply_unpivot_transform(results, transform.options)
if transform.type == "Transpose":
return apply_transpose_transform(results, transform.options)
cumulative_sum_transforms = [
t for t in self.transforms if t.type == "CumulativeSum"
]
if cumulative_sum_transforms:
columns = []
for transform in cumulative_sum_transforms:
options = frappe.parse_json(transform.options)
if not options.get("column") or options.get("column") in columns:
continue
columns.append(options.get("column"))
return apply_cumulative_sum([{"label": c} for c in columns], results)
return results
def validate_transforms(self):
pivot_transforms = [t for t in self.transforms if t.type == "Pivot"]
unpivot_transforms = [t for t in self.transforms if t.type == "Unpivot"]
transpose_transforms = [t for t in self.transforms if t.type == "Transpose"]
if len(pivot_transforms) > 1:
frappe.throw("Only one Pivot transform is allowed")
if len(unpivot_transforms) > 1:
frappe.throw("Only one Unpivot transform is allowed")
if len(transpose_transforms) > 1:
frappe.throw("Only one Transpose transform is allowed")
if pivot_transforms and unpivot_transforms:
frappe.throw("Pivot and Unpivot transforms cannot be used together")
if pivot_transforms and transpose_transforms:
frappe.throw("Pivot and Transpose transforms cannot be used together")
if unpivot_transforms and transpose_transforms:
frappe.throw("Unpivot and Transpose transforms cannot be used together")
@frappe.whitelist()
def get_tables_columns(self):
return self.variant_controller.get_tables_columns()
def get_selected_tables(self):
return self.variant_controller.get_selected_tables()
def export(self):
return export_query(self)
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_query.py
|
Python
|
agpl-3.0
| 10,821
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe.utils import cint
from insights.insights.doctype.insights_data_source.sources.query_store import (
remove_stored_query,
)
from insights.insights.doctype.insights_query.insights_assisted_query import (
DEFAULT_JSON,
)
from insights.insights.doctype.insights_query.patches.migrate_old_query_to_new_query_structure import (
convert_classic_to_assisted,
)
class InsightsQueryClient:
@frappe.whitelist()
def set_status(self, status):
# since status is auto set based on the sql, we need some way to override it
self.db_set("status", status)
@frappe.whitelist()
def duplicate(self):
new_query = frappe.copy_doc(self)
new_query.save()
return new_query.name
@frappe.whitelist()
def add_transform(self, type, options):
existing = self.get("transforms", {"type": type})
if existing:
existing[0].options = frappe.as_json(options)
else:
self.append(
"transforms",
{
"type": type,
"options": frappe.as_json(options),
},
)
self.run()
@frappe.whitelist()
def reset_transforms(self):
self.transforms = []
self.run()
@frappe.whitelist()
def set_limit(self, limit):
validated_limit = cint(limit)
if not validated_limit or validated_limit < 0:
frappe.throw("Limit must be a positive integer")
self.limit = validated_limit
self.save()
@frappe.whitelist()
def run(self):
self.fetch_results()
self.save()
@frappe.whitelist()
def reset_and_save(self):
self.reset()
self.save()
@frappe.whitelist()
def store(self):
self.is_stored = 1
self.save()
@frappe.whitelist()
def unstore(self):
remove_stored_query(self)
self.is_stored = 0
self.save()
@frappe.whitelist()
def convert(self):
self.is_native_query = not self.is_native_query
self.save()
@frappe.whitelist()
def convert_to_native(self):
if self.is_native_query:
return
self.is_native_query = 1
self.save()
@frappe.whitelist()
def convert_to_assisted(self):
if self.is_assisted_query:
return
self.is_assisted_query = 1
self.save()
@frappe.whitelist()
def save_as_table(self):
return self.update_insights_table(force=True)
@frappe.whitelist()
def delete_linked_table(self):
return self.delete_insights_table()
@frappe.whitelist()
def switch_query_type(self):
if self.is_assisted_query:
self.is_assisted_query = 0
else:
self.is_native_query = 0
self.is_script_query = 0
self.is_assisted_query = 1
self.json = convert_classic_to_assisted(self) or self.json
self.save()
@frappe.whitelist()
def fetch_related_tables_columns(self, search_txt=None):
if not self.is_assisted_query:
return []
if search_txt and not isinstance(search_txt, str):
frappe.throw("Search query must be a string")
tables = self.variant_controller.get_selected_tables()
table_names = [table["table"] for table in tables if table["table"]]
if not table_names:
return []
related_table_names = get_related_table_names(table_names, self.data_source)
selected_table_cols = get_matching_columns_from(
table_names, self.data_source, search_txt
)
related_table_cols = get_matching_columns_from(
related_table_names, self.data_source, search_txt
)
columns = []
for col in selected_table_cols + related_table_cols:
col_added = any(
col["column"] == column["column"] and col["table"] == column["table"]
for column in columns
)
if col_added:
continue
columns.append(
{
"column": col.column,
"label": col.label,
"type": col.type,
"table": col.table,
"table_label": col.table_label,
"data_source": col.data_source,
}
)
return columns
def get_related_table_names(table_names, data_source):
insights_table = frappe.qb.DocType("Insights Table")
insights_table_link = frappe.qb.DocType("Insights Table Link")
referenced_tables = (
frappe.qb.from_(insights_table)
.left_join(insights_table_link)
.on(insights_table.name == insights_table_link.parent)
.where(
(insights_table.data_source == data_source)
& (insights_table.table.isin(table_names))
)
.select(insights_table_link.foreign_table)
.groupby(insights_table_link.foreign_table)
.run(pluck=True)
)
referencing_tables = (
frappe.qb.from_(insights_table)
.left_join(insights_table_link)
.on(insights_table.name == insights_table_link.parent)
.where(
(insights_table.data_source == data_source)
& (insights_table_link.foreign_table.isin(table_names))
)
.select(insights_table.table)
.groupby(insights_table.table)
.run(pluck=True)
)
return list(set(referenced_tables + referencing_tables) - set(table_names))
def get_matching_columns_from(tables, data_source, search_txt=None, limit=200):
if not tables:
return []
if not search_txt or not isinstance(search_txt, str):
search_txt = ""
insights_table = frappe.qb.DocType("Insights Table")
insights_table_column = frappe.qb.DocType("Insights Table Column")
fields_to_select = [
insights_table_column.column,
insights_table_column.label,
insights_table_column.type,
insights_table.table,
insights_table.data_source,
insights_table.label.as_("table_label"),
]
search_cond = insights_table.name.isnotnull()
if search_txt:
column_matches = insights_table_column.column.like(f"%{search_txt}%")
label_matches = insights_table_column.label.like(f"%{search_txt}%")
search_cond = column_matches | label_matches
return (
frappe.qb.from_(insights_table)
.left_join(insights_table_column)
.on(insights_table.name == insights_table_column.parent)
.select(*fields_to_select)
.where(
(insights_table.data_source == data_source)
& insights_table.table.isin(tables)
& (search_cond)
)
.groupby(insights_table.table, insights_table_column.column)
.limit(limit)
.run(as_dict=True)
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_query_client.py
|
Python
|
agpl-3.0
| 7,038
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from .utils import InsightsDataSource, get_columns_with_inferred_types, update_sql
class InsightsRawQueryController:
def __init__(self, doc):
self.doc = doc
def validate(self):
pass
def before_save(self):
update_sql(self.doc)
def get_columns_from_results(self, results):
if not results:
return []
return get_columns_with_inferred_types(results)
def before_fetch(self):
pass
def after_fetch(self, results):
return results
def get_tables_columns(self):
return []
def get_selected_tables(self):
return []
def fetch_results(self, additional_filters=None):
return InsightsDataSource.get_doc(self.doc.data_source).run_query(self.doc)
def export_query(self):
return {"sql": self.doc.sql}
def import_query(self, exported_query):
self.doc.sql = exported_query.get("sql")
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_raw_query.py
|
Python
|
agpl-3.0
| 1,062
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
import pandas as pd
from frappe.utils.password import get_decrypted_password
from frappe.utils.safe_exec import safe_exec
from insights import notify
from insights.utils import ResultColumn
from .utils import get_columns_with_inferred_types
class ScriptQueryExecutionError(frappe.ValidationError):
pass
class InsightsScriptQueryController:
def __init__(self, doc):
self.doc = doc
def validate(self):
pass
def before_save(self):
pass
def get_columns_from_results(self, results):
if not results:
return []
return get_columns_with_inferred_types(results)
def fetch_results(self, additional_filters=None):
script = self.doc.script
if not script:
return []
def get_value(variable):
return get_decrypted_password(
variable.doctype, variable.name, "variable_value"
)
results = []
try:
self.reset_script_log()
variables = self.doc.get("variables") or []
variables = {var.variable_name: get_value(var) for var in variables}
_locals = {"results": results, **variables}
safe_exec(
script,
_globals=get_globals(),
_locals=_locals,
restrict_commit_rollback=True,
)
self.update_script_log()
results = _locals["results"]
except Exception as e:
frappe.throw(
f"Error while executing script: {e}",
title="Insights Script Query Error",
)
results = self.validate_and_sanitize_results(results)
return results
def reset_script_log(self):
self.doc.db_set(
"script_log",
"",
commit=True,
update_modified=False,
)
def update_script_log(self):
self.doc.db_set(
"script_log",
"\n".join(frappe.debug_log),
commit=True,
update_modified=False,
)
def validate_and_sanitize_results(self, results: list | pd.DataFrame | None):
if (
(results is None)
or (isinstance(results, list) and not results)
or (isinstance(results, pd.DataFrame) and results.empty)
):
notify(
"The script should declare a variable named 'results' that contains the data."
)
return []
if isinstance(results, pd.DataFrame):
results = results.fillna("")
columns = [ResultColumn.from_args(col) for col in results.columns]
values = results.values.tolist()
return [columns, *values]
if not all(isinstance(row, list) for row in results):
notify("All rows should be lists.")
return []
if all(isinstance(col, str) for col in results[0]):
new_columns = [ResultColumn.from_args(col) for col in results[0]]
return [new_columns] + results[1:]
return results
def before_fetch(self):
return
def after_fetch(self, results):
return results
def get_tables_columns(self):
return []
def get_selected_tables(self):
return []
def get_globals():
pandas = frappe._dict()
pandas.DataFrame = pd.DataFrame
pandas.read_csv = pd.read_csv
pandas.json_normalize = pd.json_normalize
# mock out to_csv and to_json to prevent users from writing to disk
pandas.DataFrame.to_csv = lambda *args, **kwargs: None
pandas.DataFrame.to_json = lambda *args, **kwargs: None
return {
"pandas": pandas,
"get_query_results": get_query_results,
}
def get_query_results(query_name):
if not isinstance(query_name, str):
raise ScriptQueryExecutionError("Query name should be a string.")
doc = frappe.get_doc("Insights Query", query_name)
return doc.retrieve_results(fetch_if_not_cached=True)
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/insights_script_query.py
|
Python
|
agpl-3.0
| 4,145
|
import frappe
def execute():
"""
Flatten the columns in the JSON of all assisted queries in the Insights Query doctype.
"""
queries = frappe.get_all(
"Insights Query", filters={"is_assisted_query": 1}, fields=["name", "json"]
)
for query in queries:
query_json = frappe.parse_json(query.json)
query_json.columns = [flatten_if_needed(c) for c in query_json.columns or []]
query_json.calculations = [flatten_if_needed(c) for c in query_json.calculations or []]
query_json.measures = [flatten_if_needed(c) for c in query_json.measures or []]
query_json.dimensions = [flatten_if_needed(c) for c in query_json.dimensions or []]
query_json.orders = [flatten_if_needed(c) for c in query_json.orders or []]
frappe.db.set_value("Insights Query", query.name, "json", frappe.as_json(query_json))
frappe.db.commit()
def flatten_if_needed(column):
if isinstance(column, dict):
return column.get("column")
return column
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/patches/flatten_columns_in_query_json.py
|
Python
|
agpl-3.0
| 1,022
|
import frappe
def execute():
"""make_query_variable_value_password_field"""
if not frappe.db.exists("DocType", "Insights Query Variable"):
return
query_variables = frappe.get_all(
"Insights Query Variable", fields=["name", "parent", "variable_value"]
)
frappe.reload_doc("insights", "doctype", "insights_query")
frappe.reload_doc("insights", "doctype", "insights_query_variable")
for query_variable in query_variables:
doc = frappe.get_doc("Insights Query", query_variable["parent"])
for var in doc.variables:
if var.name == query_variable["name"]:
var.variable_value = query_variable["variable_value"]
doc.save()
break
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/patches/make_query_variable_value_password_field.py
|
Python
|
agpl-3.0
| 744
|
import frappe
from insights.insights.doctype.insights_query.insights_legacy_query_utils import (
convert_into_simple_filter,
is_simple_filter,
)
def execute():
old_queries = frappe.get_all(
"Insights Query",
filters={
"is_assisted_query": 0,
"is_native_query": 0,
"is_script_query": 0,
"sql": ("is", "set"),
},
)
for old_query in old_queries:
doc = frappe.get_doc("Insights Query", old_query.name)
doc.db_set("json", convert_classic_to_assisted(doc), update_modified=False)
doc.db_set("is_assisted_query", 1, update_modified=False)
def convert_classic_to_assisted(old_query):
if not old_query.sql or not old_query.tables or not old_query.tables[0].table:
return "{}"
return frappe.as_json(
{
"table": get_table(old_query),
"joins": get_joins(old_query),
"filters": get_filters(old_query),
"columns": get_columns(old_query),
"calculations": [],
"measures": [],
"dimensions": [],
"orders": [],
"limit": old_query.limit,
}
)
def get_table(old_query):
table = {}
table["table"] = old_query.tables[0].table
table["label"] = old_query.tables[0].label
return table
def get_joins(old_query):
joins = []
for table in old_query.tables:
if not table.get("join"):
continue
join_data = frappe.parse_json(table.get("join"))
right_table = join_data.get("with") or {}
condition = join_data.get("condition") or {}
left_column = condition.get("left")
right_column = condition.get("right")
if not right_table or not condition or not left_column or not right_column:
continue
joins.append(
{
"join_type": join_data.get("type"),
"left_table": {
"table": table.get("table"),
"label": table.get("label"),
},
"left_column": {
"table": table.get("table"),
"column": left_column.get("value"),
"label": left_column.get("label"),
},
"right_table": {
"table": right_table.get("value"),
"label": right_table.get("label"),
},
"right_column": {
"table": right_table.get("value"),
"column": right_column.get("value"),
"label": right_column.get("label"),
},
}
)
return joins
def get_filters(old_query):
filters = []
old_filters = frappe.parse_json(old_query.get("filters"))
for condition in old_filters.get("conditions"):
if condition.get("is_expression"):
filters.append(
{
"expression": {
"raw": condition.get("raw"),
"ast": condition,
}
}
)
elif is_simple_filter(condition):
simple_filter = convert_into_simple_filter(condition)
filters.append(
{
"column": {
"table": simple_filter.get("column").get("table"),
"column": simple_filter.get("column").get("column"),
},
"operator": {
"label": OPERATOR_MAP.get(simple_filter.get("operator")),
"value": simple_filter.get("operator"),
},
"value": {
"label": simple_filter.get("value"),
"value": simple_filter.get("value"),
},
"expression": {},
}
)
return filters
def get_columns(old_query):
columns = []
for column in old_query.get("columns"):
columns.append(
{
"table": column.get("table"),
"table_label": column.get("table_label"),
"column": column.get("column"),
"label": column.get("label"),
"alias": column.get("label"),
"type": column.get("type"),
"aggregation": (column.get("aggregation") or "").lower(),
"order": column.get("order_by"),
"expression": frappe.parse_json(column.get("expression"))
if column.get("is_expression")
else {},
"granularity": frappe.parse_json(column.get("format_option")).get("date_format")
if frappe.parse_json(column.get("format_option"))
else "",
}
)
return columns
OPERATOR_MAP = {
"=": "equals",
"!=": "not equals",
"is": "is",
"contains": "contains",
"not_contains": "not contains",
"starts_with": "starts with",
"ends_with": "ends with",
"in": "one of",
"not_in": "not one of",
">": "greater than",
"<": "smaller than",
">=": "greater than equal to",
"<=": "smaller than equal to",
"between": "between",
"timespan": "within",
}
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/patches/migrate_old_query_to_new_query_structure.py
|
Python
|
agpl-3.0
| 5,311
|
import frappe
def execute():
"""rename untitled query to query name"""
query_names = frappe.get_all(
"Insights Query",
fields=["name", "title"],
filters={"title": "Untitled Query"},
)
for query in query_names:
frappe.db.set_value(
"Insights Query",
query.name,
"title",
query.name.replace("-", " ").replace("QRY", "Query"),
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/patches/rename_untitled_query_to_query_name.py
|
Python
|
agpl-3.0
| 434
|
import frappe
def execute():
# set chart name for existing insights query
frappe.db.sql(
"""
UPDATE `tabInsights Query` q
SET chart = (select name from `tabInsights Chart` where query = q.name limit 1)
WHERE chart IS NULL
"""
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/patches/set_chart_name.py
|
Python
|
agpl-3.0
| 294
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from dataclasses import dataclass
from enum import Enum
import frappe
import pandas as pd
import sqlparse
from frappe import _dict
from insights.utils import InsightsDataSource, ResultColumn
class QueryStatus(Enum):
PENDING = "Pending Execution"
SUCCESS = "Execution Successful"
FAILED = "Execution Failed"
def update_sql(query):
query.status = QueryStatus.SUCCESS.value
if not query.data_source:
return
data_source = InsightsDataSource.get_doc(query.data_source)
sql = data_source.build_query(query)
sql = format_query(sql)
if query.sql == sql:
return
query.sql = sql
query.update_query_results()
query.status = QueryStatus.PENDING.value if sql else QueryStatus.SUCCESS.value
def format_query(query):
return (
sqlparse.format(
str(query),
keyword_case="upper",
reindent_aligned=True,
strip_comments=True, # see: process_cte in sources/utils.py
)
if query
else ""
)
def apply_pivot_transform(results, options):
options = frappe.parse_json(options)
if not (options.get("column") and options.get("index") and options.get("value")):
return results
pivot_column = [c for c in results[0] if c["label"] == options.get("column")]
index_column = [c for c in results[0] if c["label"] == options.get("index")]
value_column = [c for c in results[0] if c["label"] == options.get("value")]
if not (pivot_column and index_column and value_column):
frappe.throw("Invalid Pivot Options")
pivot_column = pivot_column[0]
index_column = index_column[0]
value_column = value_column[0]
if pivot_column["label"] == index_column["label"]:
frappe.throw("Pivot and Index columns cannot be the same")
column_names = [d.get("label") for d in results[0]]
results_df = pd.DataFrame(results[1:], columns=column_names)
pivot_column_values = results_df[pivot_column["label"]]
index_column_values = results_df[index_column["label"]]
value_column_values = results_df[value_column["label"]]
pivot_df = pd.DataFrame(
{
index_column["label"]: index_column_values,
pivot_column["label"]: pivot_column_values,
value_column["label"]: value_column_values,
}
)
pivoted = pivot_df.pivot_table(
index=index_column["label"],
columns=pivot_column["label"],
values=value_column["label"],
aggfunc="sum",
fill_value=0,
sort=False,
)
pivoted = pivoted.reset_index()
pivoted = pivoted.fillna(0)
new_columns = pivoted.columns.to_list()
result_index_column = ResultColumn.from_dict(index_column)
result_columns = [
ResultColumn.from_args(c, value_column["type"]) for c in new_columns[1:]
]
new_columns = [result_index_column] + result_columns
return [new_columns] + pivoted.values.tolist()
def apply_unpivot_transform(results, options):
options = frappe.parse_json(options)
index_column = [c for c in results[0] if c["label"] == options.get("index_column")]
new_column_label = options.get("column_label")
value_label = options.get("value_label")
if not (index_column and new_column_label and value_label):
frappe.throw("Invalid Unpivot Options")
column_names = [d.get("label") for d in results[0]]
results_df = pd.DataFrame(results[1:], columns=column_names)
index_column = index_column[0]
results_columns = [
ResultColumn.from_dict(index_column),
ResultColumn.from_args(new_column_label, "String"),
ResultColumn.from_args(value_label, "Decimal"),
]
unpivoted = results_df.melt(
id_vars=index_column["label"],
var_name=new_column_label,
value_name=value_label,
)
results_data = unpivoted.values.tolist()
return [results_columns] + results_data
def apply_transpose_transform(results, options):
options = frappe.parse_json(options)
index_column = [c for c in results[0] if c["label"] == options.get("index_column")]
new_column_label = options.get("column_label")
if not (index_column and new_column_label):
frappe.throw("Invalid Transpose Options")
column_names = [d.get("label") for d in results[0]]
results_df = pd.DataFrame(results[1:], columns=column_names)
index_column = index_column[0]
results_df = results_df.set_index(index_column["label"])
results_df_transposed = results_df.transpose()
results_df_transposed = results_df_transposed.reset_index()
results_df_transposed.columns.name = None
new_columns = results_df_transposed.columns.to_list()
results_columns = [
ResultColumn.from_args(new_column_label, "String"),
*[
ResultColumn.from_args(c, infer_type_from_list(results_df_transposed[c]))
for c in new_columns[1:]
],
]
results_data = results_df_transposed.values.tolist()
return [results_columns] + results_data
def apply_cumulative_sum(columns, results):
if not columns:
return results
column_names = [d["label"] for d in results[0]]
results_df = pd.DataFrame(results[1:], columns=column_names)
for column in columns:
results_df[column.get("label")] = (
results_df[column.get("label")].astype(float).cumsum().fillna(0)
)
return [results[0]] + results_df.values.tolist()
def infer_type(value):
try:
# test if decimal
val = pd.to_numeric(value)
if val % 1 == 0:
return "Integer"
return "Decimal"
except Exception:
try:
# test if datetime
pd.to_datetime(value)
return "Datetime"
except Exception:
return "String"
def infer_type_from_list(values):
inferred_types = [infer_type(v) for v in values]
if "String" in inferred_types:
return "String"
elif "Decimal" in inferred_types:
return "Decimal"
elif "Integer" in inferred_types:
return "Integer"
elif "Datetime" in inferred_types:
return "Datetime"
else:
return "String"
def get_columns_with_inferred_types(results):
columns = ResultColumn.from_dicts(results[0])
column_names = [column.label for column in columns]
results_df = pd.DataFrame(results[1:], columns=column_names)
column_types = (
infer_type_from_list(results_df[column_name]) for column_name in column_names
)
for column, column_type in zip(columns, column_types):
column.type = column_type
return columns
# assisted query utils
@dataclass
class Column(frappe._dict):
def __init__(self, *args, **kwargs):
self.table = kwargs.get("table")
self.column = kwargs.get("column")
self.type = kwargs.get("type") or "String"
self.order = kwargs.get("order")
self.aggregation = kwargs.get("aggregation")
self.expression = frappe.parse_json(kwargs.get("expression", {}))
self.label = kwargs.get("label") or kwargs.get("alias") or kwargs.get("column")
self.alias = kwargs.get("alias") or kwargs.get("label") or kwargs.get("column")
self.format = kwargs.get("format")
self.meta = kwargs.get("meta")
self.granularity = kwargs.get("granularity")
def __repr__(self) -> str:
return f"""Column(table={self.table}, column={self.column}, type={self.type}, label={self.label}, alias={self.alias}, aggregation={self.aggregation}, expression={self.is_expression()})"""
def is_valid(self):
return bool(self.table and self.column) or bool(self.is_expression())
@staticmethod
def from_dicts(dicts):
return [Column(**d) for d in dicts]
def is_aggregate(self):
return (
self.aggregation
and self.aggregation.lower() != "custom"
and self.aggregation.lower() != "group by"
)
def is_expression(self):
return (
self.expression
and self.expression.get("raw")
and self.expression.get("ast")
and self.alias
)
def is_formatted(self):
return self.format
def has_granularity(self):
return self.is_date_type() and self.granularity
def is_date_type(self):
return self.type in ["Date", "Datetime"]
def is_numeric_type(self):
return self.type in ["Integer", "Decimal"]
def is_string_type(self):
return self.type in ["String", "Text"]
def is_measure(self):
# TODO: if is_expression and is_aggregate then it is a measure (can't determine if aggregation is set)
return (
self.is_numeric_type()
or self.is_aggregate()
or (self.is_expression() and self.is_numeric_type())
)
def is_dimension(self):
return not self.is_measure()
@dataclass
class LabelValue(frappe._dict):
def __init__(self, *args, **kwargs):
self.value = kwargs.get("value")
self.label = kwargs.get("label") or kwargs.get("value")
def is_valid(self):
return bool(self.value)
@dataclass
class Table(frappe._dict):
def __init__(self, *args, **kwargs):
self.table = kwargs.get("table")
self.label = kwargs.get("label") or kwargs.get("table")
def is_valid(self):
return bool(self.table)
class JoinColumn(frappe._dict):
def __init__(self, *args, **kwargs):
self.table = kwargs.get("table")
self.column = kwargs.get("column")
self.value = kwargs.get("column")
self.label = kwargs.get("label") or kwargs.get("column")
def is_valid(self):
return bool(self.table and self.column)
@dataclass
class Join(frappe._dict):
def __init__(self, *args, **kwargs):
self.left_table = Table(**kwargs.get("left_table"))
self.right_table = Table(**kwargs.get("right_table"))
self.join_type = LabelValue(**kwargs.get("join_type"))
self.left_column = JoinColumn(**kwargs.get("left_column"))
self.right_column = JoinColumn(**kwargs.get("right_column"))
def is_valid(self):
return (
self.left_table.is_valid()
and self.right_table.is_valid()
and self.left_column.is_valid()
and self.right_column.is_valid()
)
@staticmethod
def from_dicts(dicts):
joins = [Join(**d) for d in dicts]
return [j for j in joins if j]
@dataclass
class Filter(frappe._dict):
def __init__(self, *args, **kwargs):
self.column = Column(**(kwargs.get("column") or {}))
self.operator = LabelValue(**(kwargs.get("operator") or {}))
self.value = LabelValue(**(kwargs.get("value") or {}))
self.expression = frappe.parse_json(kwargs.get("expression", {}))
def is_valid(self):
if self.expression.get("raw") and self.expression.get("ast"):
return True
if self.operator.value in ["is_set", "is_not_set"]:
return self.column.is_valid() and self.operator.is_valid()
return (
self.column.is_valid()
and self.operator.is_valid()
and self.value.is_valid()
)
@classmethod
def from_dicts(cls, dicts):
filters = [cls(**d) for d in dicts]
return [f for f in filters if f]
@dataclass
class Query(frappe._dict):
def __init__(self, *args, **kwargs):
self.table = Table(**kwargs.get("table"))
self.joins = Join.from_dicts(kwargs.get("joins"))
self.filters = Filter.from_dicts(kwargs.get("filters"))
self.columns = Column.from_dicts(kwargs.get("columns"))
self.calculations = Column.from_dicts(kwargs.get("calculations"))
self.measures = Column.from_dicts(kwargs.get("measures"))
self.dimensions = Column.from_dicts(kwargs.get("dimensions"))
self.orders = Column.from_dicts(kwargs.get("orders"))
self.limit = kwargs.get("limit")
# not using __bool__ here because of a weird behavior
# where when __bool__ returns False, and column is empty,
# json.dumps will return empty dict instead of a dict with empty values
def is_valid(self):
return self.table.is_valid()
def add_filter(self, column, operator, value):
if not isinstance(value, dict):
value = {"value": value}
if not isinstance(operator, dict):
operator = {"value": operator}
if not column or not isinstance(column, dict):
frappe.throw("Invalid Column")
is_filter_applied_to_column = any(
f.column.column == column.get("column")
and f.column.table == column.get("table")
for f in self.filters
if f.column.is_valid()
)
if not is_filter_applied_to_column:
self.filters.append(Filter(column=column, value=value, operator=operator))
else:
# update existing filter
for f in self.filters:
if f.column.column == column.get(
"column"
) and f.column.table == column.get("table"):
f.value = LabelValue(**value)
f.operator = LabelValue(**operator)
break
def get_tables(self):
tables = set()
tables.add(self.table.table) if self.table else None
for j in self.joins:
tables.add(j.left_table.table) if j.left_table else None
tables.add(j.right_table.table) if j.right_table else None
return list(tables)
def get_columns(self):
return self._extract_columns()
def _extract_columns(self):
"""
Extract columns from columns, measures, dimensions
A column has the following format: { table, column, type, label, alias, format }
"""
columns = []
for c in self.columns:
columns.append(Column(**c))
for c in self.measures:
columns.append(Column(**c))
for c in self.dimensions:
columns.append(Column(**c))
return [c for c in columns if c]
def export_query(doc):
if not hasattr(doc.variant_controller, "export_query"):
frappe.throw("The selected query type does not support exporting")
exported_query = frappe._dict(
data=doc.variant_controller.export_query(),
metadata={
"data_source": doc.data_source,
"title": doc.title,
"transforms": doc.transforms,
"is_saved_as_table": doc.is_saved_as_table,
"type": (
"assisted"
if doc.is_assisted_query
else "native"
if doc.is_native_query
else "legacy"
),
},
)
return exported_query
def import_query(data_source, query):
query = frappe.parse_json(query)
query.metadata = _dict(query.metadata)
query_doc = frappe.new_doc("Insights Query")
query_doc.data_source = data_source
query_doc.title = query.metadata.title
query_doc.is_assisted_query = query.metadata.type == "assisted"
query_doc.is_native_query = query.metadata.type == "native"
query_doc.is_legacy_query = query.metadata.type == "legacy"
query_doc.set("transforms", query.metadata.transforms)
query_doc.variant_controller.import_query(query.data)
query_doc.save(ignore_permissions=True)
if query.metadata.is_saved_as_table:
query_doc.update_insights_table(force=True)
frappe.enqueue_doc(
"Insights Query",
query_doc.name,
"fetch_results",
queue="long",
)
return query_doc.name
class BaseNestedQueryImporter:
def __init__(self, data: dict, doc, imported_queries=None):
self.doc = doc
self.data = frappe._dict(data)
self.imported_queries = imported_queries or {}
def import_query(self):
self._import_subqueries()
self._update_subquery_references()
self._update_doc()
def _import_subqueries(self):
if not self.data.subqueries:
return
for name, subquery in self.data.subqueries.items():
if name in self.imported_queries:
continue
# FIX: imported_queries is not updated with the subqueries of the subquery
new_name = import_query(self.doc.data_source, subquery)
self.imported_queries[name] = new_name
def _update_subquery_references(self):
raise NotImplementedError
def _update_doc(self):
raise NotImplementedError
|
2302_79757062/insights
|
insights/insights/doctype/insights_query/utils.py
|
Python
|
agpl-3.0
| 16,743
|
// Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
// For license information, please see license.txt
frappe.ui.form.on("Insights Query Chart", {
// refresh: function(frm) {
// }
});
|
2302_79757062/insights
|
insights/insights/doctype/insights_query_chart/insights_query_chart.js
|
JavaScript
|
agpl-3.0
| 205
|
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from json import dumps
import frappe
from frappe import _dict
from frappe.model.document import Document
class InsightsQueryChart(Document):
@frappe.whitelist()
def update_doc(self, doc):
doc = _dict(doc)
self.title = doc.title
self.type = doc.type
self.config = dumps(doc.config, indent=2)
self.save()
@frappe.whitelist()
def add_to_dashboard(self, dashboard):
dashboard_doc = frappe.get_doc("Insights Dashboard", dashboard)
dashboard_doc.add_item(
{
"item_type": "Chart",
"chart": self.name,
}
)
|
2302_79757062/insights
|
insights/insights/doctype/insights_query_chart/insights_query_chart.py
|
Python
|
agpl-3.0
| 758
|