Skip to content

Commit

Permalink
refactor: use parse requests in dataflow processor info
Browse files Browse the repository at this point in the history
  • Loading branch information
Ellpeck committed Feb 6, 2024
1 parent 53d69de commit c0eb3fc
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 11 deletions.
2 changes: 1 addition & 1 deletion src/core/slicer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ export class SteppingSlicer<InterestedIn extends StepName | undefined = typeof L
break
case 2:
step = guardStep('dataflow')
result = executeSingleSubStep(step, this.results.normalize as NormalizedAst)
result = executeSingleSubStep(step, this.request, this.results.normalize as NormalizedAst)
break
case 3:
step = guardStep('ai')
Expand Down
2 changes: 1 addition & 1 deletion src/core/steps.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ export const STEPS_PER_FILE = {
} satisfies IStep<typeof normalize>,
'dataflow': {
description: 'Construct the dataflow graph',
processor: a => produceDataFlowGraph(a),
processor: (r, a) => produceDataFlowGraph(r, a),
required: 'once-per-file',
printer: {
[StepOutputFormat.Internal]: internalPrinter,
Expand Down
6 changes: 3 additions & 3 deletions src/dataflow/extractor.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import type { NormalizedAst, ParentInformation, RAssignmentOp, RBinaryOp} from '../r-bridge'
import type {NormalizedAst, ParentInformation, RAssignmentOp, RBinaryOp, RParseRequest} from '../r-bridge'
import { RType } from '../r-bridge'
import type { DataflowInformation } from './internal/info'
import type { DataflowProcessorInformation, DataflowProcessors} from './processor'
Expand Down Expand Up @@ -48,13 +48,13 @@ const processors: DataflowProcessors<any> = {
[RType.ExpressionList]: processExpressionList,
}

export function produceDataFlowGraph<OtherInfo>(ast: NormalizedAst<OtherInfo & ParentInformation>, initialScope: DataflowScopeName = LocalScope): DataflowInformation {
export function produceDataFlowGraph<OtherInfo>(request: RParseRequest, ast: NormalizedAst<OtherInfo & ParentInformation>, initialScope: DataflowScopeName = LocalScope): DataflowInformation {
return processDataflowFor<OtherInfo>(ast.ast, {
completeAst: ast,
activeScope: initialScope,
environments: initializeCleanEnvironments(),
processors: processors as DataflowProcessors<OtherInfo & ParentInformation>,
currentPath: 'initial',
currentRequest: request,
sourceReferences: new Map<string, string[]>()
})
}
Expand Down
7 changes: 4 additions & 3 deletions src/dataflow/internal/process/functions/source.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ export function processSourceCall<OtherInfo>(functionCall: RFunctionCall<OtherIn

// TODO we shouldn't skip a re-analysis *always*, just when it's a cycle - right?
// check if the sourced file has already been dataflow analyzed, and if so, skip it
if(data.sourceReferences.has(path)) {
if(data.sourceReferences.has(JSON.stringify(request))) {

This comment has been minimized.

Copy link
@EagleoutIce

EagleoutIce Feb 6, 2024

Member

maybe outsource this to a parseRequestFingerprint just so that we have set and get uniformly?
Theoretically, thanks to the slicing, we have objectHash from the object-hash library available for this. With this we could theoretically stat by building a "real" hash map abstraction (internet help us)

dataflowLogger.info(`Sourced file ${path} was already dataflow analyzed, skipping`)
return information
}
Expand All @@ -45,10 +45,11 @@ export function processSourceCall<OtherInfo>(functionCall: RFunctionCall<OtherIn
}

// make the currently analyzed file remember that it already referenced the path
data.sourceReferences.set(data.currentPath, [...(data.sourceReferences.get(data.currentPath) ?? []), path])
const currRequest = JSON.stringify(data.currentRequest)
data.sourceReferences.set(currRequest, [...(data.sourceReferences.get(currRequest) ?? []), path])

const normalized = executeSingleSubStep('normalize', parsed, executor.getTokenMap(), undefined, fileNameDeterministicCountingIdGenerator(path)) as NormalizedAst<OtherInfo & ParentInformation>
const dataflow = processDataflowFor(normalized.ast, {...data, currentPath: path, environments: information.environments})
const dataflow = processDataflowFor(normalized.ast, {...data, currentRequest: request, environments: information.environments})

// update our graph with the sourced file's information
const newInformation = {...information}
Expand Down
5 changes: 2 additions & 3 deletions src/dataflow/processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import type {
NormalizedAst,
ParentInformation, RNode,
RNodeWithParent
RNodeWithParent, RParseRequest
} from '../r-bridge'
import type { DataflowInformation } from './internal/info'
import type { DataflowScopeName, REnvironmentInformation } from './environments'
Expand All @@ -27,8 +27,7 @@ export interface DataflowProcessorInformation<OtherInfo> {
* Other processors to be called by the given functions
*/
readonly processors: DataflowProcessors<OtherInfo>
// TODO using "initial" as the default path doesn't allow us to skip re-sourcing the initial file - how do we find out the initial file's name/path?
readonly currentPath: string | 'initial'
readonly currentRequest: RParseRequest
readonly sourceReferences: Map<string, string[]>
}

Expand Down

0 comments on commit c0eb3fc

Please sign in to comment.