-
Notifications
You must be signed in to change notification settings - Fork 77
/
Configure.jl
343 lines (316 loc) · 11.2 KB
/
Configure.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
const TEST_TYPE = Float32
function test_operator(op::F, x::T, y=nothing) where {F,T}
local output
try
output = y === nothing ? op(x) : op(x, y)
catch e
error(
"The operator `$(op)` is not well-defined over the " *
((T <: Complex) ? "complex plane, " : "real line, ") *
"as it threw the error `$(typeof(e))` when evaluating the " *
(y === nothing ? "input $(x). " : "inputs $(x) and $(y). ") *
"You can work around this by returning " *
"NaN for invalid inputs. For example, " *
"`safe_log(x::T) where {T} = x > 0 ? log(x) : T(NaN)`.",
)
end
if !isa(output, T)
error(
"The operator `$(op)` returned an output of type `$(typeof(output))`, " *
"when it was given " *
(y === nothing ? "an input $(x) " : "inputs $(x) and $(y) ") *
"of type `$(T)`. " *
"Please ensure that your operators return the same type as their inputs.",
)
end
return nothing
end
const TEST_INPUTS = collect(range(-100, 100; length=99))
function assert_operators_well_defined(T, options::AbstractOptions)
test_input = if T <: Complex
(x -> convert(T, x)).(TEST_INPUTS .+ TEST_INPUTS .* im)
else
(x -> convert(T, x)).(TEST_INPUTS)
end
for x in test_input, y in test_input, op in options.operators.binops
test_operator(op, x, y)
end
for x in test_input, op in options.operators.unaops
test_operator(op, x)
end
end
# Check for errors before they happen
function test_option_configuration(
parallelism, datasets::Vector{D}, options::AbstractOptions, verbosity
) where {T,D<:Dataset{T}}
if options.deterministic && parallelism != :serial
error("Determinism is only guaranteed for serial mode.")
end
if parallelism == :multithreading && Threads.nthreads() == 1
verbosity > 0 &&
@warn "You are using multithreading mode, but only one thread is available. Try starting julia with `--threads=auto`."
end
if any(d -> d.X_units !== nothing || d.y_units !== nothing, datasets) &&
options.dimensional_constraint_penalty === nothing
verbosity > 0 &&
@warn "You are using dimensional constraints, but `dimensional_constraint_penalty` was not set. The default penalty of `1000.0` will be used."
end
for op in (options.operators.binops..., options.operators.unaops...)
if is_anonymous_function(op)
throw(
AssertionError(
"Anonymous functions can't be used as operators for SymbolicRegression.jl",
),
)
end
end
assert_operators_well_defined(T, options)
operator_intersection = intersect(options.operators.binops, options.operators.unaops)
if length(operator_intersection) > 0
throw(
AssertionError(
"Your configuration is invalid - $(operator_intersection) appear in both the binary operators and unary operators.",
),
)
end
end
# Check for errors before they happen
function test_dataset_configuration(
dataset::Dataset{T}, options::AbstractOptions, verbosity
) where {T<:DATA_TYPE}
n = dataset.n
if n != size(dataset.X, 2) ||
(dataset.y !== nothing && n != size(dataset.y::AbstractArray, 1))
throw(
AssertionError(
"Dataset dimensions are invalid. Make sure X is of shape [features, rows], y is of shape [rows] and if there are weights, they are of shape [rows].",
),
)
end
if size(dataset.X, 2) > 10000 && !options.batching && verbosity > 0
@info "Note: you are running with more than 10,000 datapoints. You should consider turning on batching (`options.batching`), and also if you need that many datapoints. Unless you have a large amount of noise (in which case you should smooth your dataset first), generally < 10,000 datapoints is enough to find a functional form."
end
if !(typeof(options.elementwise_loss) <: SupervisedLoss) &&
is_weighted(dataset) &&
!(3 in [m.nargs - 1 for m in methods(options.elementwise_loss)])
throw(
AssertionError(
"When you create a custom loss function, and are using weights, you need to define your loss function with three scalar arguments: f(prediction, target, weight).",
),
)
end
end
""" Move custom operators and loss functions to workers, if undefined """
function move_functions_to_workers(
procs, options::AbstractOptions, dataset::Dataset{T}, verbosity
) where {T}
# All the types of functions we need to move to workers:
function_sets = (
:unaops, :binops, :elementwise_loss, :early_stop_condition, :loss_function
)
for function_set in function_sets
if function_set == :unaops
ops = options.operators.unaops
example_inputs = (zero(T),)
elseif function_set == :binops
ops = options.operators.binops
example_inputs = (zero(T), zero(T))
elseif function_set == :elementwise_loss
if typeof(options.elementwise_loss) <: SupervisedLoss
continue
end
ops = (options.elementwise_loss,)
example_inputs = if is_weighted(dataset)
(zero(T), zero(T), zero(T))
else
(zero(T), zero(T))
end
elseif function_set == :early_stop_condition
if !(typeof(options.early_stop_condition) <: Function)
continue
end
ops = (options.early_stop_condition,)
example_inputs = (zero(T), 0)
elseif function_set == :loss_function
if options.loss_function === nothing
continue
end
ops = (options.loss_function,)
example_inputs = (Node(T; val=zero(T)), dataset, options)
else
error("Invalid function set: $function_set")
end
for op in ops
try
test_function_on_workers(example_inputs, op, procs)
catch e
undefined_on_workers = isa(e.captured.ex, UndefVarError)
if undefined_on_workers
copy_definition_to_workers(op, procs, options, verbosity)
else
throw(e)
end
end
test_function_on_workers(example_inputs, op, procs)
end
end
end
function copy_definition_to_workers(op, procs, options::AbstractOptions, verbosity)
name = nameof(op)
verbosity > 0 && @info "Copying definition of $op to workers..."
src_ms = methods(op).ms
# Thanks https://discourse.julialang.org/t/easy-way-to-send-custom-function-to-distributed-workers/22118/2
@everywhere procs @eval function $name end
for m in src_ms
@everywhere procs @eval $m
end
verbosity > 0 && @info "Finished!"
return nothing
end
function test_function_on_workers(example_inputs, op, procs)
futures = []
for proc in procs
push!(futures, @spawnat proc op(example_inputs...))
end
for future in futures
fetch(future)
end
end
function activate_env_on_workers(
procs, project_path::String, options::AbstractOptions, verbosity
)
verbosity > 0 && @info "Activating environment on workers."
@everywhere procs begin
Base.MainInclude.eval(
quote
using Pkg
Pkg.activate($$project_path)
end,
)
end
end
function import_module_on_workers(
procs, filename::String, options::AbstractOptions, verbosity
)
loaded_modules_head_worker = [k.name for (k, _) in Base.loaded_modules]
included_as_local = "SymbolicRegression" ∉ loaded_modules_head_worker
expr = if included_as_local
quote
include($filename)
using .SymbolicRegression
end
else
quote
using SymbolicRegression
end
end
# Need to import any extension code, if loaded on head node
relevant_extensions = [
:Bumper,
:CUDA,
:ClusterManagers,
:Enzyme,
:LoopVectorization,
:SymbolicUtils,
:Zygote,
]
filter!(m -> String(m) ∈ loaded_modules_head_worker, relevant_extensions)
# HACK TODO – this workaround is very fragile. Likely need to submit a bug report
# to JuliaLang.
for ext in relevant_extensions
push!(
expr.args,
quote
using $ext: $ext
end,
)
end
verbosity > 0 && if isempty(relevant_extensions)
@info "Importing SymbolicRegression on workers."
else
@info "Importing SymbolicRegression on workers as well as extensions $(join(relevant_extensions, ',' * ' '))."
end
@everywhere procs Core.eval(Core.Main, $expr)
verbosity > 0 && @info "Finished!"
return nothing
end
function test_module_on_workers(procs, options::AbstractOptions, verbosity)
verbosity > 0 && @info "Testing module on workers..."
futures = []
for proc in procs
push!(
futures,
@spawnat proc SymbolicRegression.gen_random_tree(3, options, 5, TEST_TYPE)
)
end
for future in futures
fetch(future)
end
verbosity > 0 && @info "Finished!"
return nothing
end
function test_entire_pipeline(
procs, dataset::Dataset{T}, options::AbstractOptions, verbosity
) where {T<:DATA_TYPE}
futures = []
verbosity > 0 && @info "Testing entire pipeline on workers..."
for proc in procs
push!(
futures,
@spawnat proc begin
tmp_pop = Population(
dataset;
population_size=20,
nlength=3,
options=options,
nfeatures=dataset.nfeatures,
)
tmp_pop = s_r_cycle(
dataset,
tmp_pop,
5,
5,
RunningSearchStatistics(; options=options);
verbosity=verbosity,
options=options,
record=RecordType(),
)[1]
tmp_pop = optimize_and_simplify_population(
dataset, tmp_pop, options, options.maxsize, RecordType()
)
end
)
end
for future in futures
fetch(future)
end
verbosity > 0 && @info "Finished!"
return nothing
end
function configure_workers(;
procs::Union{Vector{Int},Nothing},
numprocs::Int,
addprocs_function::Function,
options::AbstractOptions,
project_path,
file,
exeflags::Cmd,
verbosity,
example_dataset::Dataset,
runtests::Bool,
)
(procs, we_created_procs) = if procs === nothing
(addprocs_function(numprocs; lazy=false, exeflags), true)
else
(procs, false)
end
if we_created_procs
import_module_on_workers(procs, file, options, verbosity)
end
move_functions_to_workers(procs, options, example_dataset, verbosity)
if runtests
test_module_on_workers(procs, options, verbosity)
test_entire_pipeline(procs, example_dataset, options, verbosity)
end
return (procs, we_created_procs)
end