-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathHEAT_step3.m
333 lines (276 loc) · 13.8 KB
/
HEAT_step3.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
function [] = HEAT_step3(inputs,DataType,Variable)
% HEAT_step3(inputs,DataType,Variable)
%
% Run the third step of HEAT to output workflow data. This is data that is
% used by other OpenCLIM models in workflows on DAFNI. Currently, the only
% workflow that HEAT feeds into is the ARCADIA heat-mortality model. Other
% models can be added as required.
%
% It uses the same loading process as HEAT_step2, whereby each dataset
% (e.g. simulation) is loaded in turn either from raw or derived data for
% the correct spatial and temporal domain. There is an option to output
% regional means for the ARCADIA model ? if a region is specified then
% whole spatial domain is loaded before taking the regional mean.
%
% ARCADIA currently only requires daily mean temperature or sWBGT as far
% as I know. Daily max. and daily min. temperatures can be exported also.
%
% TO DO: Check if ARCADIA always requires a full annual cycle of data. Katie's
% document on Teams suggests so, in which case the AnnSummer subsetting
% should be removed and the calendar for UKCP18 models needs converted from
% 360 to 365 days (I can use the ISIMIP2b method for this).
%% Generate ARCADIA-ready csv if requested
disp(' ')
disp('Running Step 3: producing workflow output for ARCADIA')
disp('-----')
if strcmp(inputs.WorkflowOutput,'ARCADIA')
% Set directory paths
init_HEAT
% Load xyz data
load_xyz
%% Go through each simulation
for s = 1:length(inputs.Dataset)
Dataset = char(inputs.Dataset(s));
%% Go through UKCP18 data
if strcmp(DataType,'UKCP18')
% Find resolution
if strcmp(Dataset(1:2),'RC')
res = '12km/';
runn = ['run',Dataset(5:6)];
lats = lat_UK_RCM;
lons = long_UK_RCM;
LSM = LSM12;
elseif strcmp(Dataset(1:2),'CP')
res = '2km/';
runn = ['run',Dataset(5:6)];
lats = lat_UK_CPM;
lons = long_UK_CPM;
LSM = LSM2;
elseif strcmp(Dataset(1:2),'GC')
res = '60km/';
runn = ['run',Dataset(5:6)];
lats = lat_UK_GCM;
lons = long_UK_GCM;
LSM = LSM60;
elseif strcmp(Dataset(1:2),'CM')
res = '60km/';
runn = ['run',Dataset(7:8)];
lats = lat_UK_GCM;
lons = long_UK_GCM;
LSM = LSM60;
end
% Find location of netCDF data for the required variable
% Set default domain to load as whole of dataset for T vars
ncstarts = [1 1 1 1];
ncends = [Inf Inf Inf Inf];
% Dimension of yyyymmdd var for T vars
datedim = 2;
if strcmp(Variable,'Tmax')
% Find what files are available
var = 'tasmax';
% Directory of raw data for each required variable
vardir = [UKCP18dir,res,var,'/',runn,'/'];
% Find how many files are to be loaded/produced
files = dir([vardir '*.nc']);
elseif strcmp(Variable,'T')
% Find what files are available
var = 'tas';
% Directory of raw data for each required variable
vardir = [UKCP18dir,res,var,'/',runn,'/'];
% Find how many files are to be loaded/produced
files = dir([vardir '*.nc']);
elseif strcmp(Variable,'Tmin')
% Find what files are available
var = 'tasmin';
% Directory of raw data for each required variable
vardir = [UKCP18dir,res,var,'/',runn,'/'];
% Find how many files are to be loaded/produced
files = dir([vardir '*.nc']);
else % All other heat stress variables are found in Deriveddir
var = Variable;
vardir = Deriveddir;
% Set default domain to load as whole of dataset for all other vars
ncstarts = [1 1 1];
ncends = [Inf Inf Inf];
% Dimension of yyyymmdd for all other vars
datedim = 1;
% Find how many files are to be loaded/produced
files = dir([vardir,var,'-',inputs.Domain,'-',Dataset,'*.nc']);
end
%% Go through HadUK-Grid data
elseif strcmp(DataType,'HadUKGrid')
% Find resolution
if strcmp(Dataset(1:2),'12')
res = '12km/';
% runn = ['run',Dataset(5:6)];
lats = lat_UK_RCM;
lons = long_UK_RCM;
LSM = LSM12;
elseif strcmp(Dataset(1:2),'2k')
res = '2km/';
% runn = ['run',Dataset(5:6)];
lats = lat_UK_CPM;
lons = long_UK_CPM;
LSM = LSM2;
elseif strcmp(Dataset(1:2),'60')
res = '60km/';
% runn = ['run',Dataset(5:6)];
lats = lat_UK_GCM;
lons = long_UK_GCM;
LSM = LSM60;
elseif strcmp(Dataset(1:2),'1k')
res = '1km/';
% runn = ['run',Dataset(7:8)];
lats = lat_UK_HadUK1;
lons = long_UK_HadUK1;
LSM = LSM1;
end
% Find location of netCDF data for the required variable
% Set default domain to load as whole of dataset for T vars
ncstarts = [1 1 1];
ncends = [Inf Inf Inf];
% Dimension of yyyymmdd var for T vars
datedim = 2;
if strcmp(Variable,'Tmax')
% Find what files are available
var = 'tasmax';
% Directory of raw data for each required variable
vardir = [HadUKdir,var,'/',res];
% Find how many files are to be loaded/produced
files = dir([vardir '*.nc']);
elseif strcmp(Variable,'T')
% Find what files are available
var = 'T';
% Directory of raw data for each required variable
vardir = Deriveddir;
files = dir([vardir,'T-',inputs.Domain,'-HadUK-Grid-',Dataset,'*.nc']);
% Dimension of yyyymmdd for all other vars
datedim = 1;
elseif strcmp(Variable,'Tmin')
% Find what files are available
var = 'tasmin';
% Directory of raw data for each required variable
vardir = [HadUKdir,var,'/',res];
% Find how many files are to be loaded/produced
files = dir([vardir '*.nc']);
else % All other heat stress variables are found in Deriveddir
var = Variable;
vardir = Deriveddir;
% Set default domain to load as whole of dataset for all other vars
ncstarts = [1 1 1];
ncends = [Inf Inf Inf];
% Dimension of yyyymmdd for all other vars
datedim = 1;
files = dir([vardir,var,'-',inputs.Domain,'-HadUK-Grid-',Dataset,'*.nc']);
end
end
if ~isempty(files)
%% Spatially subset data as required
% Find corners of requested domain
if isfield(inputs,'SpatialRange')
if length(inputs.SpatialRange(1,:)) == 2 % lat-long box specified to load
[lon_id1,lat_id1] = find_location(inputs.SpatialRange(2,1),inputs.SpatialRange(1,1),lons,lats);
[lon_id2,lat_id2] = find_location(inputs.SpatialRange(2,2),inputs.SpatialRange(1,2),lons,lats);
ncstarts(1) = lon_id1; ncstarts(2) = lat_id1;
ncends(1) = 1+lon_id2-lon_id1; ncends(2) = 1+lat_id2-lat_id1;
% Create an ID field for subsetting e.g. lat-long, areas etc.
grid_idx = lon_id1:lon_id2;
grid_idy = lat_id1:lat_id2;
elseif length(inputs.SpatialRange(1,:)) == 1 % specific grid cell specified
[lon_id1,lat_id1] = find_location(inputs.SpatialRange(2,1),inputs.SpatialRange(1,1),lons,lats);
ncstarts(1) = lon_id1; ncstarts(2) = lat_id1;
ncends(1) = 1; ncends(2) = 1;
% Create an ID field for subsetting e.g. lat-long, areas etc.
grid_idx = lon_id1;
grid_idy = lat_id1;
end
end
%% Load only the files required for the temporal subset
% If specific years are required
if isfield(inputs,'TemporalRange')
% Find which files cover the required start and end dates
for i = 1:length(files)
% Find when the netCDF files start and end
fstart = files(i).name(end-19:end-12);
fend = files(i).name(end-10:end-3);
% Find netCDF file that contains required start
if str2double(fstart) <= inputs.TemporalRange(1) && str2double(fend) >= inputs.TemporalRange(1)
startload = i;
end
% Find netCDF file that contains required end
if str2double(fstart) <= inputs.TemporalRange(2) && str2double(fend) >= inputs.TemporalRange(2)
endload = i;
end
end
else
startload = 1;
endload = length(files);
end
% Load all of the files between the start and end file
for i = startload:endload
% File name
file = [files(i).folder,'/',files(i).name];
% Load temperature for the correct region and concatenate through time if necessary
if i == startload
data = double(ncread(file,var,ncstarts,ncends));
dates = ncread(file,'yyyymmdd');
else
data = cat(3,data,double(ncread(file,var,ncstarts,ncends)));
dates = cat(datedim,dates,ncread(file,'yyyymmdd'));
end
end
if datedim == 1
dates = dates';
end
%% Temporally subset to the specific required dates and summer type
% Pull out the required dates and times
[data,dates] = subset_temporal(data,dates,inputs.TemporalRange,inputs.AnnSummer);
%% Output ARCADIA data for grid cell or regional mean
% If interested in every single grid cell
if ~isfield(inputs,'OutputRegion')
% csv needs saved for every grid box: go through each lat-long
for i = 1:length(data(:,1,1))
for j = 1:length(data(1,:,1))
% Set csv output file name
if strcmp(DataType,'HadUKGrid')
csv_name = [Outputdir,'/',inputs.ExptName,'/',num2str(i),'_',num2str(j),'_HadUK-Grid-',Dataset,'_',Variable,'.csv'];
elseif strcmp(DataType,'UKCP18')
csv_name = [Outputdir,'/',inputs.ExptName,'/',num2str(i),'_',num2str(j),'_UKCP18-',res(1:end-1),'_',Variable,'.csv'];
end
% If csv has been created already
if exist(csv_name,'file')
% Load existing csv
data_csv = csvread(csv_name);
% Extract individual grid cell for current dataset
data_csv_temp = squeeze(data(i,j,:))';
% Concatenate this with the existing csv and resave
data_csv = cat(1,data_csv,data_csv_temp);
csvwrite(csv_name,data_csv)
else
% Otherwise extract individual grid cell for current
% dataset and create new csv
data_csv = squeeze(data(i,j,:))';
csvwrite(csv_name,data_csv)
end
end
end
else % Otherwise take a regional mean
regmean = calc_reg_mean(data,inputs.Region);
% Set csv output file name
csv_name = [Outputdir,'/',inputs.ExptName,'/',inputs.Region,'_',inputs.ExptName,'_',Variable,'.csv'];
% If csv has been created already
if exist(csv_name,'file')
% Load existing csv
data_csv = csvread(csv_name);
% Concatenate this with the existing csv and resave
data_csv = cat(1,data_csv,regmean');
csvwrite(csv_name,data_csv)
else
% Otherwise create new csv
csvwrite(csv_name,regmean')
end
end
end
end
end
end