Downscaling climate scenarios program using weather typing. More...
#include <dsclim.h>
Go to the source code of this file.
Functions | |
int | wt_downscaling (data_struct *data) |
Downscaling climate scenarios program using weather typing. |
Downscaling climate scenarios program using weather typing.
Definition in file wt_downscaling.c.
int wt_downscaling | ( | data_struct * | data | ) |
Downscaling climate scenarios program using weather typing.
[in] | data | MASTER data structure. |
Step 1: Read large-scale fields
Step 2: Compute climatologies and remove them from selected large scale fields
Step 3: Project selected large scale fields on EOF
CONTROL RUN
Step 4: Compute distance to clusters for the control run
Step 5: Compute mean and variance of secondary large-scale fields for the control run
MODEL RUN
Step 6: Compute mean secondary large-scale fields for the model run
Step 7: Compute distance to clusters (model run and optionally control run)
Step 8: Normalize the secondary large-scale fields by control-run mean and variance
Step 9: Compute the precipitation using the pre-computed regressions for the model run for each season
Step 10: Find the days : resampling
Step 11: Compute the secondary large-scale fields difference if wanted
Step 12: Reconstruct data using chosen resampled days and write output
Determine the number of elements given the seasons
Optionally save analog_days information in an output file
Definition at line 67 of file wt_downscaling.c.
References alloc_error(), field_struct::analog_days, field_struct::analog_days_year, analog_day_struct::analog_dayschoice, conf_struct::analog_file_ctrl, conf_struct::analog_file_other, conf_struct::analog_save, apply_regression(), conf_struct::cal_type, learning_data_struct::class_clusters, class_days_pc_clusters(), conf_struct::classif_type, conf_struct::compression, conf_struct::compression_level, compute_secondary_large_scale_diff(), data_struct::conf, conf_struct::config, CTRL_FIELD_LS, CTRL_SEC_FIELD_LS, learning_struct::data, field_struct::data, analog_day_struct::day, time_vect_struct::day, analog_day_struct::day_s, downscale_struct::days_class_clusters, downscale_struct::days_class_clusters_all, conf_struct::debug, downscale_struct::delta, downscale_struct::delta_all, downscale_struct::delta_dayschoice, downscale_struct::delta_dayschoice_all, conf_struct::deltat, downscale_struct::dist, downscale_struct::dist_all, dist_clusters_normctrl(), field_data_struct::down, period_struct::downscale, field_data_struct::eof_data, field_data_struct::eof_info, eof_data_struct::eof_ls, eof_info_struct::eof_project, eof_info_struct::eof_scale, extract_subdomain(), extract_subperiod_months(), FALSE, data_struct::field, mask_struct::field, field_data_struct::field_eof_ls, field_data_struct::field_ls, FIELD_LS, reg_struct::filename_save_ctrl_reg, reg_struct::filename_save_other_reg, info_field_struct::fillvalue, find_the_days(), field_data_struct::first_variance, conf_struct::format, data_struct::info, eof_info_struct::info, mask_struct::lat, field_struct::lat_eof_ls, data_struct::learning, mask_struct::lon, field_struct::lon_eof_ls, downscale_struct::mean, downscale_struct::mean_dist, mean_field_spatial(), mean_variance_dist_clusters(), mean_variance_field_spatial(), merge_seasonal_data(), merge_seasonal_data_2d(), merge_seasonal_data_i(), merge_seasons(), analog_day_struct::metric_norm, analog_day_struct::month, season_struct::month, time_vect_struct::month, analog_day_struct::month_s, field_struct::n_ls, NCAT, season_struct::nclusters, season_struct::ndays, analog_day_struct::ndayschoice, season_struct::ndayschoices, eof_info_struct::neof_ls, mask_struct::nlat, field_struct::nlat_eof_ls, field_struct::nlat_ls, mask_struct::nlon, field_struct::nlon_eof_ls, field_struct::nlon_ls, season_struct::nmonths, normalize_field(), normalize_field_2d(), normalize_pc(), reg_struct::npts, season_struct::nreg, conf_struct::nseasons, analog_day_struct::ntime, learning_data_struct::ntime, learning_struct::ntime, field_struct::ntime_ls, conf_struct::obs_var, conf_struct::only_wt, conf_struct::output, output_downscaled_analog(), conf_struct::output_month_begin, conf_struct::output_only, conf_struct::output_path, learning_struct::pc_normalized_var, conf_struct::period, conf_struct::period_ctrl, learning_data_struct::precip_index, field_struct::precip_index, learning_data_struct::precip_reg, learning_data_struct::precip_reg_cst, project_field_eof(), read_analog_data(), read_large_scale_eof(), read_large_scale_fields(), data_struct::reg, reg_struct::reg_save, remove_clim(), save_analog_data(), conf_struct::season, SEC_FIELD_LS, season_struct::secondary_choice, season_struct::secondary_cov, conf_struct::secondary_latitude_max, conf_struct::secondary_latitude_min, conf_struct::secondary_longitude_max, conf_struct::secondary_longitude_min, season_struct::secondary_main_choice, data_struct::secondary_mask, season_struct::shuffle, eof_data_struct::sing_ls, downscale_struct::smean, downscale_struct::smean_2d, downscale_struct::smean_norm, sub_period_common(), learning_data_struct::sup_index, learning_data_struct::sup_index_var, learning_struct::sup_nlat, learning_struct::sup_nlon, learning_data_struct::sup_val, downscale_struct::sup_val_norm, downscale_struct::svar_2d, analog_day_struct::time, field_struct::time_ls, time_mean_variance_field_2d(), learning_data_struct::time_s, learning_struct::time_s, field_struct::time_s, conf_struct::time_units, var_struct::timename, analog_day_struct::tindex, analog_day_struct::tindex_all, analog_day_struct::tindex_dayschoice, analog_day_struct::tindex_s_all, TRUE, conf_struct::use_downscaled_year, mask_struct::use_mask, downscale_struct::var, downscale_struct::var_dist, downscale_struct::var_pc_norm, learning_data_struct::weight, write_regression_fields(), analog_day_struct::year, time_vect_struct::year, and analog_day_struct::year_s.
Referenced by main().
00067 { 00074 double *buf_sub = NULL; /* Temporary buffer for sub-domain or sub-period */ 00075 double *buftmp = NULL; /* Temporary buffer */ 00076 double *buftmpf = NULL; /* Temporary buffer */ 00077 double *mask_subd = NULL; /* Mask covering subdomain in double format when reading */ 00078 short int *mask_sub = NULL; /* Mask covering subdomain in short int */ 00079 double *lon_mask = NULL; /* Longitudes of mask */ 00080 double *lat_mask = NULL; /* Latitudes of mask */ 00081 double *var_pc_norm_all = NULL; /* Temporary values of the norm of the principal components */ 00082 int **ntime_sub = NULL; /* Number of times for sub-periods. Dimensions number of field categories (NCAT) and number of seasons */ 00083 double **time_ls_sub = NULL; /* Time values used for regression diagnostics output */ 00084 int *merged_itimes = NULL; /* Time values in common merged time vector */ 00085 int ntimes_merged; /* Number of times in one particular season */ 00086 int curindex_merged; /* Current index in merged season vector */ 00087 short int *merged_times_flag = NULL; /* Flag variable for days in the year that are processed */ 00088 double *merged_times = NULL; /* Merge times in udunit */ 00089 int ntime_sub_tmp; /* Number of times for regression diagnostics output */ 00090 int ntime_sub_learn; /* Number of times for learning common sub-period with control run for a specific season */ 00091 int ntime_sub_learn_all; /* Number of times for learning common sub-period with control run for whole period */ 00092 int nlon_mask; /* Longitude dimension for mask subdomain */ 00093 int nlat_mask; /* Latitude dimension for mask subdomain */ 00094 00095 int istat; /* Function return diagnostic value */ 00096 int i; /* Loop counter */ 00097 int ii; /* Loop counter */ 00098 int s; /* Loop counter for seasons */ 00099 int cat; /* Loop counter for field categories */ 00100 int beg_cat; /* Beginning category to process in loop */ 00101 int maxndays; /* Maximum number of analog days choices within all seasons */ 00102 00103 char *analog_file = NULL; /* Analog data filename */ 00104 period_struct *period = NULL; /* Period structure for output */ 00105 00106 char *filename = NULL; /* Temporary filename for regression optional output */ 00107 00108 if (data->conf->output_only != TRUE) { 00109 00110 /* Allocate memory */ 00111 ntime_sub = (int **) malloc(NCAT * sizeof(int *)); 00112 if (ntime_sub == NULL) alloc_error(__FILE__, __LINE__); 00113 00114 if (data->reg->reg_save == TRUE) { 00115 time_ls_sub = (double **) malloc(data->conf->nseasons * sizeof(double *)); 00116 if (time_ls_sub == NULL) alloc_error(__FILE__, __LINE__); 00117 } 00118 00119 for (cat=0; cat<NCAT; cat++) { 00120 ntime_sub[cat] = (int *) malloc(data->conf->nseasons * sizeof(int)); 00121 if (ntime_sub[cat] == NULL) alloc_error(__FILE__, __LINE__); 00122 } 00123 00125 istat = read_large_scale_fields(data); 00126 if (istat != 0) return istat; 00127 00128 /* Prepare optional mask for secondary large-scale fields */ 00129 if (data->secondary_mask->use_mask == TRUE) { 00130 (void) extract_subdomain(&mask_subd, &lon_mask, &lat_mask, &nlon_mask, &nlat_mask, data->secondary_mask->field, 00131 data->secondary_mask->lon, data->secondary_mask->lat, 00132 data->conf->secondary_longitude_min, data->conf->secondary_longitude_max, 00133 data->conf->secondary_latitude_min, data->conf->secondary_latitude_max, 00134 data->secondary_mask->nlon, data->secondary_mask->nlat, 1); 00135 00136 if (data->conf->period_ctrl->downscale == TRUE) 00137 beg_cat = CTRL_SEC_FIELD_LS; 00138 else 00139 beg_cat = SEC_FIELD_LS; 00140 /* Loop over secondary field categories (model run and optionally control run) */ 00141 for (cat=beg_cat; cat>=SEC_FIELD_LS; cat--) { 00142 /* Loop over secondary large-scale fields */ 00143 for (i=0; i<data->field[cat].n_ls; i++) 00144 if (data->field[cat].nlon_ls != nlon_mask || data->field[cat].nlat_ls != nlat_mask) { 00145 (void) fprintf(stderr, "%s: The mask for secondary large-scale fields after selecting subdomain has invalid dimensions: nlon=%d nlat=%d. Expected: nlon=%d nlat=%d\nReverting to no-mask processing.", __FILE__, 00146 nlon_mask, nlat_mask, data->field[cat].nlon_ls, data->field[cat].nlat_ls); 00147 mask_sub = (short int *) NULL; 00148 data->secondary_mask->use_mask = FALSE; 00149 } 00150 } 00151 /* Dimensions are ok and we are using a mask. Get values into short int buffer. */ 00152 if (data->secondary_mask->use_mask == TRUE) { 00153 mask_sub = (short int *) malloc(data->field[SEC_FIELD_LS].nlon_ls*data->field[SEC_FIELD_LS].nlat_ls * sizeof(short int)); 00154 if (mask_sub == NULL) alloc_error(__FILE__, __LINE__); 00155 for (i=0; i<data->field[SEC_FIELD_LS].nlon_ls*data->field[SEC_FIELD_LS].nlat_ls; i++) 00156 mask_sub[i] = (short int) mask_subd[i]; 00157 } 00158 (void) free(mask_subd); 00159 (void) free(lon_mask); 00160 (void) free(lat_mask); 00161 } 00162 else 00163 mask_sub = (short int *) NULL; 00164 00165 if (mask_sub != NULL) 00166 printf("%s: Using a mask for secondary large-scale fields.\n", __FILE__); 00167 00169 istat = remove_clim(data); 00170 if (istat != 0) return istat; 00171 00174 /* Read EOFs and Singular Values */ 00175 istat = read_large_scale_eof(data); 00176 if (istat != 0) return istat; 00177 00178 /* Project selected large scale fields on EOF */ 00179 /* Loop over large-scale field categories (Control run and Model run) */ 00180 for (cat=CTRL_FIELD_LS; cat>=FIELD_LS; cat--) 00181 /* Loop over large-scale fields */ 00182 for (i=0; i<data->field[cat].n_ls; i++) { 00183 /* Check if we need to project field on EOFs */ 00184 if (data->field[cat].data[i].eof_info->eof_project == TRUE) { 00185 /* Allocate memory for projected large-scale field */ 00186 data->field[cat].data[i].field_eof_ls = (double *) malloc(data->field[cat].ntime_ls * data->field[cat].data[i].eof_info->neof_ls * 00187 sizeof(double)); 00188 if (data->field[cat].data[i].field_eof_ls == NULL) alloc_error(__FILE__, __LINE__); 00189 /* Project large-scale field on EOFs */ 00190 istat = project_field_eof(data->field[cat].data[i].field_eof_ls, data->field[cat].data[i].field_ls, 00191 data->field[cat].data[i].eof_data->eof_ls, data->field[cat].data[i].eof_data->sing_ls, 00192 data->field[cat].data[i].eof_info->info->fillvalue, 00193 data->field[cat].lon_eof_ls, data->field[cat].lat_eof_ls, 00194 data->field[cat].data[i].eof_info->eof_scale, 00195 data->field[cat].nlon_eof_ls, data->field[cat].nlat_eof_ls, data->field[cat].ntime_ls, 00196 data->field[cat].data[i].eof_info->neof_ls); 00197 if (istat != 0) return istat; 00198 } 00199 } 00200 00201 00205 /* Process control run only */ 00206 cat = CTRL_FIELD_LS; 00207 /* Loop over large-scale fields */ 00208 for (i=0; i<data->field[cat].n_ls; i++) { 00209 00210 /* Allocate memory for temporary buffer */ 00211 buftmp = (double *) malloc(data->field[cat].ntime_ls*data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00212 if (buftmp == NULL) alloc_error(__FILE__, __LINE__); 00213 00214 /* Normalisation of the principal component by the square root of the variance of the first one */ 00215 /* Select common time period between the learning period and the model period (control run) */ 00216 /* for first variance calculation */ 00217 istat = sub_period_common(&buf_sub, &ntime_sub_learn_all, data->field[cat].data[i].field_eof_ls, 00218 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00219 data->learning->time_s->year, data->learning->time_s->month, 00220 data->learning->time_s->day, 1, 00221 data->field[cat].data[i].eof_info->neof_ls, 1, data->field[cat].ntime_ls, data->learning->ntime); 00222 if (istat != 0) return istat; 00223 00224 /* Allocate memory for temporary buffer */ 00225 buftmpf = (double *) malloc(ntime_sub_learn_all*data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00226 if (buftmpf == NULL) alloc_error(__FILE__, __LINE__); 00227 00228 // for (s=0; s<data->field[cat].ntime_ls; s++) 00229 // printf("%d %lf\n",s,data->field[cat].data[i].field_eof_ls[s]); 00230 00231 /* Compute the norm and the variance of the first EOF of the control run as references */ 00232 printf("Compute the norm and the variance of the first EOF of the control run as references\n"); 00233 /* Only when first_variance is -9999.9999, the variance of the first EOF will be computed */ 00234 data->field[cat].data[i].first_variance = -9999.9999; 00235 (void) normalize_pc(data->field[cat].data[i].down->var_pc_norm, &(data->field[cat].data[i].first_variance), 00236 buftmpf, buf_sub, data->field[cat].data[i].eof_info->neof_ls, 00237 ntime_sub_learn_all); 00238 // for (ii=0; ii<9; ii++) printf("%d %lf\n",ii,sqrt(data->field[cat].data[i].down->var_pc_norm[ii])); 00239 /* Free temporary buffers */ 00240 (void) free(buf_sub); 00241 (void) free(buftmpf); 00242 00243 /* Normalize the large-scale field given the reference norm and variance */ 00244 printf("Normalize the large-scale field given the reference norm and variance.\n"); 00245 /* Allocate memory for temporary buffer */ 00246 var_pc_norm_all = (double *) malloc(data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00247 if (var_pc_norm_all == NULL) alloc_error(__FILE__, __LINE__); 00248 /* Normalize EOF-projected large-scale fields */ 00249 (void) normalize_pc(var_pc_norm_all, &(data->field[cat].data[i].first_variance), 00250 buftmp, data->field[cat].data[i].field_eof_ls, data->field[cat].data[i].eof_info->neof_ls, 00251 data->field[cat].ntime_ls); 00252 /* Free temporary buffer */ 00253 (void) free(var_pc_norm_all); 00254 00255 /* Loop over each season */ 00256 for (s=0; s<data->conf->nseasons; s++) { 00257 /* Compute mean and variance of principal components of selected large-scale fields */ 00258 00259 /* Allocate memory for season-specific mean and variance of distances to clusters */ 00260 data->field[cat].data[i].down->mean_dist[s] = (double *) malloc(data->conf->season[s].nclusters * sizeof(double)); 00261 if (data->field[cat].data[i].down->mean_dist[s] == NULL) alloc_error(__FILE__, __LINE__); 00262 data->field[cat].data[i].down->var_dist[s] = (double *) malloc(data->conf->season[s].nclusters * sizeof(double)); 00263 if (data->field[cat].data[i].down->var_dist[s] == NULL) alloc_error(__FILE__, __LINE__); 00264 00265 /* Select common time period between the learning period and the model period (control run) */ 00266 istat = sub_period_common(&buf_sub, &ntime_sub_learn, buftmp, 00267 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00268 data->learning->data[s].time_s->year, data->learning->data[s].time_s->month, 00269 data->learning->data[s].time_s->day, 1, 00270 data->field[cat].data[i].eof_info->neof_ls, 1, data->field[cat].ntime_ls, data->learning->data[s].ntime); 00271 if (istat != 0) return istat; 00272 00273 /* Compute mean and variance of distances to clusters */ 00274 (void) mean_variance_dist_clusters(data->field[cat].data[i].down->mean_dist[s], data->field[cat].data[i].down->var_dist[s], 00275 buf_sub, data->learning->data[s].weight, 00276 data->learning->pc_normalized_var, data->field[cat].data[i].down->var_pc_norm, 00277 data->field[cat].data[i].eof_info->neof_ls, data->conf->season[s].nclusters, ntime_sub_learn); 00278 /* Diagnostic output */ 00279 printf("Season: %d\n", s); 00280 for (ii=0; ii<data->conf->season[s].nclusters; ii++) 00281 (void) printf("%s: Cluster #%d. Mean and variance of distances to clusters for control run: %lf %lf\n", __FILE__, ii, 00282 data->field[cat].data[i].down->mean_dist[s][ii], sqrt(data->field[cat].data[i].down->var_dist[s][ii])); 00283 00284 /* Free temporary buffer */ 00285 (void) free(buf_sub); 00286 } 00287 /* Free temporary buffer */ 00288 (void) free(buftmp); 00289 } 00290 00293 /* Process only secondary field of control run. */ 00294 cat = CTRL_SEC_FIELD_LS; 00295 /* Loop over secondary large-scale fields */ 00296 for (i=0; i<data->field[cat].n_ls; i++) { 00297 00298 /* Compute spatial mean of secondary large-scale fields */ 00299 data->field[cat].data[i].down->smean = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00300 if (data->field[cat].data[i].down->smean == NULL) alloc_error(__FILE__, __LINE__); 00301 00302 (void) mean_field_spatial(data->field[cat].data[i].down->smean, data->field[cat].data[i].field_ls, mask_sub, 00303 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls); 00304 00305 for (s=0; s<data->conf->nseasons; s++) { 00306 00307 /* Compute seasonal mean and variance of principal components of selected large-scale fields */ 00308 00309 /* Select common time period between the learning period and the model period (control run) */ 00310 istat = sub_period_common(&buf_sub, &ntime_sub_learn, data->field[cat].data[i].field_ls, 00311 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00312 data->learning->data[s].time_s->year, data->learning->data[s].time_s->month, 00313 data->learning->data[s].time_s->day, 3, 00314 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls, 00315 data->learning->data[s].ntime); 00316 if (istat != 0) return istat; 00317 00318 /* Compute seasonal mean and variance of spatially-averaged secondary field */ 00319 (void) mean_variance_field_spatial(&(data->field[cat].data[i].down->mean[s]), &(data->field[cat].data[i].down->var[s]), buf_sub, 00320 mask_sub, data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_sub_learn); 00321 00322 /* Compute mean and variance over time for each point of secondary field */ 00323 data->field[cat].data[i].down->smean_2d[s] = (double *) 00324 malloc(data->field[cat].nlon_ls*data->field[cat].nlat_ls*ntime_sub_learn * sizeof(double)); 00325 if (data->field[cat].data[i].down->smean_2d[s] == NULL) alloc_error(__FILE__, __LINE__); 00326 data->field[cat].data[i].down->svar_2d[s] = (double *) 00327 malloc(data->field[cat].nlon_ls*data->field[cat].nlat_ls*ntime_sub_learn * sizeof(double)); 00328 if (data->field[cat].data[i].down->svar_2d[s] == NULL) alloc_error(__FILE__, __LINE__); 00329 (void) time_mean_variance_field_2d(data->field[cat].data[i].down->smean_2d[s], data->field[cat].data[i].down->svar_2d[s], 00330 buf_sub, data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_sub_learn); 00331 00332 /* Diagnostic output */ 00333 (void) printf("Control run:: Season: %d TAS mean=%lf variance=%lf cat=%d field=%d\n", s, data->field[cat].data[i].down->mean[s], 00334 sqrt(data->field[cat].data[i].down->var[s]), cat, i); 00335 00336 /* Free temporary buffer */ 00337 (void) free(buf_sub); 00338 } 00339 } 00340 00341 00346 cat = SEC_FIELD_LS; 00347 /* Loop over secondary large-scale fields */ 00348 for (i=0; i<data->field[cat].n_ls; i++) { 00349 /* Compute spatial mean of secondary large-scale fields */ 00350 data->field[cat].data[i].down->smean = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00351 if (data->field[cat].data[i].down->smean == NULL) alloc_error(__FILE__, __LINE__); 00352 (void) mean_field_spatial(data->field[cat].data[i].down->smean, data->field[cat].data[i].field_ls, mask_sub, 00353 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls); 00354 } 00355 00358 /* Downscale also control run if needed */ 00359 if (data->conf->period_ctrl->downscale == TRUE) 00360 beg_cat = CTRL_FIELD_LS; 00361 else 00362 beg_cat = FIELD_LS; 00363 /* Loop over larg-scale field categories (model run and optionally control run) */ 00364 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00365 /* Loop over large-scale fields */ 00366 for (i=0; i<data->field[cat].n_ls; i++) { 00367 00368 /* Allocate memory for temporary buffer */ 00369 buftmp = (double *) malloc(data->field[cat].ntime_ls*data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00370 if (buftmp == NULL) alloc_error(__FILE__, __LINE__); 00371 00372 /* Normalisation of the principal component by the square root of the variance of the control run */ 00373 var_pc_norm_all = (double *) malloc(data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00374 if (var_pc_norm_all == NULL) alloc_error(__FILE__, __LINE__); 00375 (void) normalize_pc(var_pc_norm_all, &(data->field[CTRL_FIELD_LS].data[i].first_variance), buftmp, 00376 data->field[cat].data[i].field_eof_ls, data->field[cat].data[i].eof_info->neof_ls, 00377 data->field[cat].ntime_ls); 00378 (void) free(var_pc_norm_all); 00379 00380 /* Loop over seasons */ 00381 for (s=0; s<data->conf->nseasons; s++) { 00382 00383 /* Select season months in the whole time period and create sub-period large-scale field buffer */ 00384 (void) extract_subperiod_months(&buf_sub, &(ntime_sub[cat][s]), buftmp, 00385 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00386 data->conf->season[s].month, 00387 1, 1, data->field[cat].data[i].eof_info->neof_ls, data->field[cat].ntime_ls, 00388 data->conf->season[s].nmonths); 00389 00390 /* Compute distances to clusters using normalization and against the control reference run */ 00391 data->field[cat].data[i].down->dist[s] = (double *) 00392 malloc(data->conf->season[s].nclusters*ntime_sub[cat][s] * sizeof(double)); 00393 if (data->field[cat].data[i].down->dist[s] == NULL) alloc_error(__FILE__, __LINE__); 00394 (void) dist_clusters_normctrl(data->field[cat].data[i].down->dist[s], buf_sub, data->learning->data[s].weight, 00395 data->learning->pc_normalized_var, data->field[CTRL_FIELD_LS].data[i].down->var_pc_norm, 00396 data->field[CTRL_FIELD_LS].data[i].down->mean_dist[s], 00397 data->field[CTRL_FIELD_LS].data[i].down->var_dist[s], 00398 data->field[cat].data[i].eof_info->neof_ls, data->conf->season[s].nclusters, 00399 ntime_sub[cat][s]); 00400 /* Classify each day in the current clusters */ 00401 data->field[cat].data[i].down->days_class_clusters[s] = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00402 if (data->field[cat].data[i].down->days_class_clusters[s] == NULL) alloc_error(__FILE__, __LINE__); 00403 (void) class_days_pc_clusters(data->field[cat].data[i].down->days_class_clusters[s], buf_sub, 00404 data->learning->data[s].weight, data->conf->classif_type, 00405 data->field[cat].data[i].eof_info->neof_ls, data->conf->season[s].nclusters, 00406 ntime_sub[cat][s]); 00407 /* Free temporary buffer */ 00408 (void) free(buf_sub); 00409 } 00410 /* Free temporary buffer */ 00411 (void) free(buftmp); 00412 } 00413 } 00414 00417 /* Downscale also control run if needed */ 00418 if (data->conf->period_ctrl->downscale == TRUE) 00419 beg_cat = CTRL_SEC_FIELD_LS; 00420 else 00421 beg_cat = SEC_FIELD_LS; 00422 00423 /* Loop over secondary field categories (model run and optionally control run) */ 00424 for (cat=beg_cat; cat>=SEC_FIELD_LS; cat--) { 00425 /* Loop over secondary large-scale fields */ 00426 for (i=0; i<data->field[cat].n_ls; i++) 00427 /* Loop over each season */ 00428 for (s=0; s<data->conf->nseasons; s++) { 00429 /* Select season months in the whole time period to create a sub-period buffer */ 00430 (void) extract_subperiod_months(&buf_sub, &(ntime_sub[cat][s]), data->field[cat].data[i].down->smean, 00431 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00432 data->conf->season[s].month, 3, 1, 1, data->field[cat].ntime_ls, data->conf->season[s].nmonths); 00433 /* Normalize the spatial mean of secondary large-scale fields */ 00434 data->field[cat].data[i].down->smean_norm[s] = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00435 if (data->field[cat].data[i].down->smean_norm[s] == NULL) alloc_error(__FILE__, __LINE__); 00436 (void) normalize_field(data->field[cat].data[i].down->smean_norm[s], buf_sub, 00437 data->field[CTRL_SEC_FIELD_LS].data[i].down->mean[s], data->field[CTRL_SEC_FIELD_LS].data[i].down->var[s], 00438 1, 1, ntime_sub[cat][s]); 00439 /* Free temporary buffer */ 00440 (void) free(buf_sub); 00441 00442 /* Select season months in the whole time period to create a 2D sub-period buffer */ 00443 (void) extract_subperiod_months(&buf_sub, &(ntime_sub[cat][s]), data->field[cat].data[i].field_ls, 00444 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00445 data->conf->season[s].month, 3, data->field[cat].nlon_ls, data->field[cat].nlat_ls, 00446 data->field[cat].ntime_ls, data->conf->season[s].nmonths); 00447 /* Normalize the secondary large-scale fields */ 00448 data->field[cat].data[i].down->sup_val_norm[s] = 00449 (double *) malloc(data->field[cat].nlon_ls*data->field[cat].nlat_ls*data->field[cat].ntime_ls * sizeof(double)); 00450 if (data->field[cat].data[i].down->sup_val_norm[s] == NULL) alloc_error(__FILE__, __LINE__); 00451 (void) normalize_field_2d(data->field[cat].data[i].down->sup_val_norm[s], buf_sub, 00452 data->field[CTRL_SEC_FIELD_LS].data[i].down->smean_2d[s], 00453 data->field[CTRL_SEC_FIELD_LS].data[i].down->svar_2d[s], 00454 data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_sub[cat][s]); 00455 /* Free temporary buffer */ 00456 (void) free(buf_sub); 00457 } 00458 } 00459 00462 /* Select the first large-scale field which must contain the cluster distances */ 00463 /* and the first secondary large-scale fields which must contains its spatial mean */ 00464 i = 0; 00465 00466 /* Downscale also control run if needed */ 00467 if (data->conf->period_ctrl->downscale == TRUE) 00468 beg_cat = CTRL_FIELD_LS; 00469 else 00470 beg_cat = FIELD_LS; 00471 00472 /* Loop over large-scale field categories (model run and optionally control run) */ 00473 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00474 /* Process only if, for this category, at least one large-scale field is available */ 00475 for (i=0; i<data->field[cat].n_ls; i++) { 00476 /* Loop over each season */ 00477 for (s=0; s<data->conf->nseasons; s++) { 00478 /* Apply the regression coefficients to calculate precipitation using the cluster distances */ 00479 /* and the normalized spatial mean of the corresponding secondary large-scale field */ 00480 data->field[cat].precip_index[s] = (double *) malloc(data->reg->npts*ntime_sub[cat+2][s] * sizeof(double)); 00481 if (data->field[cat].precip_index[s] == NULL) alloc_error(__FILE__, __LINE__); 00482 (void) apply_regression(data->field[cat].precip_index[s], data->learning->data[s].precip_reg, 00483 data->learning->data[s].precip_reg_cst, 00484 data->field[cat].data[i].down->dist[s], data->field[cat+2].data[i].down->smean_norm[s], 00485 data->reg->npts, ntime_sub[cat+2][s], data->conf->season[s].nclusters, data->conf->season[s].nreg); 00486 if (data->reg->reg_save == TRUE) 00487 /* Select season months in the whole time period and create sub-period time vector */ 00488 (void) extract_subperiod_months(&(time_ls_sub[s]), &ntime_sub_tmp, data->field[cat].time_ls, 00489 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00490 data->conf->season[s].month, 00491 1, 1, 1, data->field[cat].ntime_ls, 00492 data->conf->season[s].nmonths); 00493 } 00494 if (data->reg->reg_save == TRUE) { 00495 (void) printf("Writing downscaling regression diagnostic fields.\n"); 00496 if (cat == CTRL_FIELD_LS) 00497 filename = data->reg->filename_save_ctrl_reg; 00498 else 00499 filename = data->reg->filename_save_other_reg; 00500 (void) write_regression_fields(data, filename, time_ls_sub, ntime_sub[cat+2], 00501 data->field[cat].precip_index, 00502 data->field[cat].data[i].down->dist, 00503 data->field[cat+2].data[i].down->smean_norm); 00504 } 00505 } 00506 } 00507 if (data->reg->reg_save == TRUE) { 00508 for (s=0; s<data->conf->nseasons; s++) 00509 (void) free(time_ls_sub[s]); 00510 (void) free(time_ls_sub); 00511 } 00512 00515 /* Select the first large-scale field which must contain the cluster distances */ 00516 /* and the first secondary large-scale fields which must contains its spatial mean */ 00517 i = 0; 00518 00519 /* Downscale also control run if needed */ 00520 if (data->conf->period_ctrl->downscale == TRUE) 00521 beg_cat = CTRL_FIELD_LS; 00522 else 00523 beg_cat = FIELD_LS; 00524 00525 /* Loop over large-scale field categories (model run and optionally control run) */ 00526 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00527 /* Process only if, for this category, at least one large-scale field is available */ 00528 if (data->field[cat].n_ls > 0) 00529 /* Loop over each season */ 00530 for (s=0; s<data->conf->nseasons; s++) { 00531 /* Find the analog days in the learning period given the precipitation index, */ 00532 /* the spatial mean of the secondary large-scale fields and its index, and the cluster classification of the days */ 00533 data->field[cat].analog_days[s].ntime = ntime_sub[cat][s]; 00534 data->field[cat].analog_days[s].time = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00535 if (data->field[cat].analog_days[s].time == NULL) alloc_error(__FILE__, __LINE__); 00536 data->field[cat].analog_days[s].tindex = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00537 if (data->field[cat].analog_days[s].tindex == NULL) alloc_error(__FILE__, __LINE__); 00538 data->field[cat].analog_days[s].tindex_all = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00539 if (data->field[cat].analog_days[s].tindex_all == NULL) alloc_error(__FILE__, __LINE__); 00540 data->field[cat].analog_days[s].year = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00541 if (data->field[cat].analog_days[s].year == NULL) alloc_error(__FILE__, __LINE__); 00542 data->field[cat].analog_days[s].month = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00543 if (data->field[cat].analog_days[s].month == NULL) alloc_error(__FILE__, __LINE__); 00544 data->field[cat].analog_days[s].day = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00545 if (data->field[cat].analog_days[s].day == NULL) alloc_error(__FILE__, __LINE__); 00546 data->field[cat].analog_days[s].tindex_s_all = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00547 if (data->field[cat].analog_days[s].tindex_s_all == NULL) alloc_error(__FILE__, __LINE__); 00548 data->field[cat].analog_days[s].year_s = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00549 if (data->field[cat].analog_days[s].year_s == NULL) alloc_error(__FILE__, __LINE__); 00550 data->field[cat].analog_days[s].month_s = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00551 if (data->field[cat].analog_days[s].month_s == NULL) alloc_error(__FILE__, __LINE__); 00552 data->field[cat].analog_days[s].day_s = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00553 if (data->field[cat].analog_days[s].day_s == NULL) alloc_error(__FILE__, __LINE__); 00554 data->field[cat].analog_days[s].ndayschoice = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00555 if (data->field[cat].analog_days[s].ndayschoice == NULL) alloc_error(__FILE__, __LINE__); 00556 data->field[cat].analog_days[s].analog_dayschoice = (tstruct **) malloc(ntime_sub[cat][s] * sizeof(tstruct *)); 00557 if (data->field[cat].analog_days[s].analog_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00558 data->field[cat].analog_days[s].metric_norm = (float **) malloc(ntime_sub[cat][s] * sizeof(float *)); 00559 if (data->field[cat].analog_days[s].metric_norm == NULL) alloc_error(__FILE__, __LINE__); 00560 data->field[cat].analog_days[s].tindex_dayschoice = (int **) malloc(ntime_sub[cat][s] * sizeof(int *)); 00561 if (data->field[cat].analog_days[s].tindex_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00562 for (ii=0; ii<ntime_sub[cat][s]; ii++) { 00563 data->field[cat].analog_days[s].ndayschoice[ii] = data->conf->season[s].ndayschoices; 00564 data->field[cat].analog_days[s].analog_dayschoice[ii] = (tstruct *) NULL; 00565 data->field[cat].analog_days[s].metric_norm[ii] = (float *) NULL; 00566 data->field[cat].analog_days[s].tindex_dayschoice[ii] = (int *) NULL; 00567 } 00568 (void) printf("%s: Searching analog days for season #%d\n", __FILE__, s); 00569 istat = find_the_days(data->field[cat].analog_days[s], data->field[cat].precip_index[s], data->learning->data[s].precip_index, 00570 data->field[cat+2].data[i].down->smean_norm[s], data->learning->data[s].sup_index, 00571 data->field[cat+2].data[i].down->sup_val_norm[s], data->learning->data[s].sup_val, mask_sub, 00572 data->field[cat].data[i].down->days_class_clusters[s], data->learning->data[s].class_clusters, 00573 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00574 data->learning->data[s].time_s->year, data->learning->data[s].time_s->month, 00575 data->learning->data[s].time_s->day, data->conf->time_units, 00576 data->field[cat].ntime_ls, data->learning->data[s].ntime, 00577 data->conf->season[s].month, data->conf->season[s].nmonths, 00578 data->conf->season[s].ndays, data->conf->season[s].ndayschoices, data->reg->npts, 00579 data->conf->season[s].shuffle, data->conf->season[s].secondary_choice, 00580 data->conf->season[s].secondary_main_choice, data->conf->season[s].secondary_cov, 00581 data->conf->use_downscaled_year, data->conf->only_wt, 00582 data->field[cat+2].nlon_ls, data->field[cat+2].nlat_ls, 00583 data->learning->sup_nlon, data->learning->sup_nlat); 00584 if (istat != 0) return istat; 00585 } 00586 } 00587 00590 /* Downscale also control run if needed */ 00591 if (data->conf->period_ctrl->downscale == TRUE) 00592 beg_cat = CTRL_SEC_FIELD_LS; 00593 else 00594 beg_cat = SEC_FIELD_LS; 00595 00596 /* Loop over secondary field categories (model run and optionally control run) */ 00597 for (cat=beg_cat; cat>=SEC_FIELD_LS; cat--) { 00598 /* Process only if, for this category, at least one secondary large-scale field is available */ 00599 for (i=0; i<data->field[cat].n_ls; i++) 00600 /* Loop over each season */ 00601 for (s=0; s<data->conf->nseasons; s++) { 00602 data->field[cat].data[i].down->delta[s] = (double *) malloc(ntime_sub[cat][s] * sizeof(double)); 00603 if (data->field[cat].data[i].down->delta[s] == NULL) alloc_error(__FILE__, __LINE__); 00604 data->field[cat].data[i].down->delta_dayschoice[s] = (double **) malloc(ntime_sub[cat][s] * sizeof(double *)); 00605 if (data->field[cat].data[i].down->delta_dayschoice[s] == NULL) alloc_error(__FILE__, __LINE__); 00606 for (ii=0; ii<ntime_sub[cat][s]; ii++) { 00607 data->field[cat].data[i].down->delta_dayschoice[s][ii] = (double *) calloc(data->conf->season[s].ndayschoices, sizeof(double)); 00608 if (data->field[cat].data[i].down->delta_dayschoice[s][ii] == NULL) alloc_error(__FILE__, __LINE__); 00609 } 00610 (void) compute_secondary_large_scale_diff(data->field[cat].data[i].down->delta[s], 00611 data->field[cat].data[i].down->delta_dayschoice[s], 00612 data->field[cat-2].analog_days[s], 00613 data->field[cat].data[i].down->smean_norm[s], data->learning->data[s].sup_index, 00614 data->field[CTRL_SEC_FIELD_LS].data[i].down->var[s], 00615 data->learning->data[s].sup_index_var, ntime_sub[cat][s]); 00616 } 00617 } 00618 00619 } 00620 00623 /* Downscale also control run if needed */ 00624 if (data->conf->period_ctrl->downscale == TRUE) 00625 beg_cat = CTRL_FIELD_LS; 00626 else 00627 beg_cat = FIELD_LS; 00628 00629 /* Loop over large-scale field categories (model run and optionally control run) */ 00630 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00631 /* Process only if, for this category, at least one large-scale field is available */ 00632 if (data->field[cat].n_ls > 0) { 00633 /* Process only first large-scale field. Limitation of the current implementation. */ 00634 i = 0; 00635 00636 if (data->conf->output_only != TRUE) { 00637 00639 /* Take into account the fact that it may be possible that the seasons does not span the whole year */ 00640 ntimes_merged = 0; 00641 merged_times_flag = (short int *) malloc(data->field[cat].ntime_ls * sizeof(short int)); 00642 if (merged_times_flag == NULL) alloc_error(__FILE__, __LINE__); 00643 for (ii=0; ii<data->field[cat].ntime_ls; ii++) merged_times_flag[ii] = 0; 00644 /* Flag all times within the processed seasons, and count number of timestep */ 00645 for (s=0; s<data->conf->nseasons; s++) 00646 for (ii=0; ii<ntime_sub[cat][s]; ii++) { 00647 /* Retrieve current index */ 00648 curindex_merged = data->field[cat].analog_days[s].tindex_s_all[ii]; 00649 /* Check for bounds */ 00650 if (curindex_merged < 0 || curindex_merged >= data->field[cat].ntime_ls) { 00651 (void) fprintf(stderr, "%s: Fatal error: index in merged season vector outside bounds! curindex_merged=%d max=%d\n", 00652 __FILE__, curindex_merged, data->field[cat].ntime_ls-1); 00653 return -1; 00654 } 00655 merged_times_flag[curindex_merged] = 1; 00656 ntimes_merged++; 00657 } 00658 /* Save time index given flag */ 00659 merged_itimes = (int *) malloc(data->field[cat].ntime_ls * sizeof(int)); 00660 if (merged_itimes == NULL) alloc_error(__FILE__, __LINE__); 00661 merged_times = (double *) malloc(ntimes_merged * sizeof(double)); 00662 if (merged_times == NULL) alloc_error(__FILE__, __LINE__); 00663 curindex_merged = 0; 00664 for (ii=0; ii<data->field[cat].ntime_ls; ii++) { 00665 if (merged_times_flag[ii] == 1) { 00666 merged_times[curindex_merged] = data->field[cat].time_ls[ii]; 00667 merged_itimes[ii] = curindex_merged++; 00668 } 00669 else 00670 merged_itimes[ii] = -1; 00671 } 00672 (void) free(merged_times_flag); 00673 00674 data->field[cat].analog_days_year.time = (int *) malloc(ntimes_merged * sizeof(int)); 00675 if (data->field[cat].analog_days_year.time == NULL) alloc_error(__FILE__, __LINE__); 00676 data->field[cat].analog_days_year.tindex = (int *) malloc(ntimes_merged * sizeof(int)); 00677 if (data->field[cat].analog_days_year.tindex == NULL) alloc_error(__FILE__, __LINE__); 00678 data->field[cat].analog_days_year.tindex_all = (int *) malloc(ntimes_merged * sizeof(int)); 00679 if (data->field[cat].analog_days_year.tindex_all == NULL) alloc_error(__FILE__, __LINE__); 00680 data->field[cat].analog_days_year.year = (int *) malloc(ntimes_merged * sizeof(int)); 00681 if (data->field[cat].analog_days_year.year == NULL) alloc_error(__FILE__, __LINE__); 00682 data->field[cat].analog_days_year.month = (int *) malloc(ntimes_merged * sizeof(int)); 00683 if (data->field[cat].analog_days_year.month == NULL) alloc_error(__FILE__, __LINE__); 00684 data->field[cat].analog_days_year.day = (int *) malloc(ntimes_merged * sizeof(int)); 00685 if (data->field[cat].analog_days_year.day == NULL) alloc_error(__FILE__, __LINE__); 00686 data->field[cat].analog_days_year.tindex_s_all = (int *) malloc(ntimes_merged * sizeof(int)); 00687 if (data->field[cat].analog_days_year.tindex_s_all == NULL) alloc_error(__FILE__, __LINE__); 00688 data->field[cat].analog_days_year.year_s = (int *) malloc(ntimes_merged * sizeof(int)); 00689 if (data->field[cat].analog_days_year.year_s == NULL) alloc_error(__FILE__, __LINE__); 00690 data->field[cat].analog_days_year.month_s = (int *) malloc(ntimes_merged * sizeof(int)); 00691 if (data->field[cat].analog_days_year.month_s == NULL) alloc_error(__FILE__, __LINE__); 00692 data->field[cat].analog_days_year.day_s = (int *) malloc(ntimes_merged * sizeof(int)); 00693 if (data->field[cat].analog_days_year.day_s == NULL) alloc_error(__FILE__, __LINE__); 00694 data->field[cat].analog_days_year.analog_dayschoice = (tstruct **) malloc(ntimes_merged * sizeof(tstruct *)); 00695 if (data->field[cat].analog_days_year.analog_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00696 data->field[cat].analog_days_year.metric_norm = (float **) malloc(ntimes_merged * sizeof(float *)); 00697 if (data->field[cat].analog_days_year.metric_norm == NULL) alloc_error(__FILE__, __LINE__); 00698 data->field[cat].analog_days_year.tindex_dayschoice = (int **) malloc(ntimes_merged * sizeof(int *)); 00699 if (data->field[cat].analog_days_year.tindex_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00700 for (ii=0; ii<ntimes_merged; ii++) { 00701 data->field[cat].analog_days_year.analog_dayschoice[ii] = (tstruct *) NULL; 00702 data->field[cat].analog_days_year.metric_norm[ii] = (float *) NULL; 00703 data->field[cat].analog_days_year.tindex_dayschoice[ii] = (int *) NULL; 00704 } 00705 data->field[cat].analog_days_year.ndayschoice = (int *) malloc(ntimes_merged * sizeof(int)); 00706 if (data->field[cat].analog_days_year.ndayschoice == NULL) alloc_error(__FILE__, __LINE__); 00707 data->field[cat+2].data[i].down->delta_all = (double *) malloc(ntimes_merged * sizeof(double)); 00708 if (data->field[cat+2].data[i].down->delta_all == NULL) alloc_error(__FILE__, __LINE__); 00709 data->field[cat+2].data[i].down->delta_dayschoice_all = (double **) malloc(ntimes_merged * sizeof(double *)); 00710 if (data->field[cat+2].data[i].down->delta_dayschoice_all == NULL) alloc_error(__FILE__, __LINE__); 00711 data->field[cat].data[i].down->dist_all = (double *) malloc(ntimes_merged * sizeof(double)); 00712 if (data->field[cat].data[i].down->dist_all == NULL) alloc_error(__FILE__, __LINE__); 00713 data->field[cat].data[i].down->days_class_clusters_all = (int *) malloc(ntimes_merged * sizeof(int)); 00714 if (data->field[cat].data[i].down->days_class_clusters_all == NULL) alloc_error(__FILE__, __LINE__); 00715 00716 /* Find maximum number of days choices within all seasons */ 00717 maxndays = data->conf->season[0].ndayschoices; 00718 for (s=0; s<data->conf->nseasons; s++) 00719 if (maxndays < data->conf->season[s].ndayschoices) 00720 maxndays = data->conf->season[s].ndayschoices; 00721 /* Allocate memory for special 2D delta t vector. Initialize to zero because dimensions can vary for each season. */ 00722 for (ii=0; ii<ntimes_merged; ii++) { 00723 data->field[cat+2].data[i].down->delta_dayschoice_all[ii] = (double *) calloc(maxndays, sizeof(double)); 00724 if (data->field[cat+2].data[i].down->delta_dayschoice_all[ii] == NULL) alloc_error(__FILE__, __LINE__); 00725 } 00726 00727 /* Loop over each season */ 00728 data->field[cat].analog_days_year.ntime = 0; 00729 for (s=0; s<data->conf->nseasons; s++) { 00730 /* Merge all seasons of analog_day data, supplemental field index, and cluster info */ 00731 printf("Season: %d\n",s); 00732 istat = merge_seasons(data->field[cat].analog_days_year, data->field[cat].analog_days[s], 00733 merged_itimes, ntimes_merged, ntime_sub[cat][s]); 00734 istat = merge_seasonal_data(data->field[cat+2].data[i].down->delta_all, 00735 data->field[cat+2].data[i].down->delta[s], 00736 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00737 ntimes_merged, ntime_sub[cat][s]); 00738 istat = merge_seasonal_data_2d(data->field[cat+2].data[i].down->delta_dayschoice_all, 00739 data->field[cat+2].data[i].down->delta_dayschoice[s], 00740 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00741 data->conf->season[s].ndayschoices,ntimes_merged, ntime_sub[cat][s]); 00742 istat = merge_seasonal_data(data->field[cat].data[i].down->dist_all, 00743 data->field[cat].data[i].down->dist[s], 00744 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00745 ntimes_merged, ntime_sub[cat][s]); 00746 istat = merge_seasonal_data_i(data->field[cat].data[i].down->days_class_clusters_all, 00747 data->field[cat].data[i].down->days_class_clusters[s], 00748 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00749 ntimes_merged, ntime_sub[cat][s]); 00750 if (istat != 0) { 00751 (void) free(merged_times); 00752 (void) free(merged_itimes); 00753 return istat; 00754 } 00755 data->field[cat].analog_days_year.ntime += ntime_sub[cat][s]; 00756 } 00757 00759 if (data->conf->analog_save == TRUE) { 00760 if (cat == FIELD_LS) 00761 analog_file = data->conf->analog_file_other; 00762 else 00763 analog_file = data->conf->analog_file_ctrl; 00764 (void) save_analog_data(data->field[cat].analog_days_year, data->field[cat+2].data[i].down->delta_all, 00765 data->field[cat+2].data[i].down->delta_dayschoice_all, 00766 data->field[cat].data[i].down->dist_all, data->field[cat].data[i].down->days_class_clusters_all, 00767 merged_times, analog_file, data); 00768 } 00769 } 00770 else { 00771 if (cat == FIELD_LS) 00772 analog_file = data->conf->analog_file_other; 00773 else 00774 analog_file = data->conf->analog_file_ctrl; 00775 (void) printf("%s: Reading analog data from file %s\n", __FILE__, analog_file); 00776 (void) read_analog_data(&(data->field[cat].analog_days_year), &(data->field[cat+2].data[i].down->delta_all), 00777 &merged_times, analog_file, data->conf->obs_var->timename); 00778 ntimes_merged = data->field[cat].analog_days_year.ntime; 00779 } 00780 00781 /* Process all data */ 00782 if (data->conf->output == TRUE) { 00783 if (cat == FIELD_LS) { 00784 period = data->conf->period; 00785 } 00786 else { 00787 period = data->conf->period_ctrl; 00788 } 00789 istat = output_downscaled_analog(data->field[cat].analog_days_year, data->field[cat+2].data[i].down->delta_all, 00790 data->conf->output_month_begin, data->conf->output_path, data->conf->config, 00791 data->conf->time_units, data->conf->cal_type, data->conf->deltat, 00792 data->conf->format, data->conf->compression, data->conf->compression_level, 00793 data->conf->debug, 00794 data->info, data->conf->obs_var, period, merged_times, ntimes_merged); 00795 if (istat != 0) { 00796 (void) free(merged_times); 00797 (void) free(merged_itimes); 00798 return istat; 00799 } 00800 } 00801 (void) free(merged_times); 00802 (void) free(merged_itimes); 00803 } 00804 } 00805 00806 /* Free memory for specific downscaling buffers */ 00807 if (data->conf->output_only != TRUE) { 00808 for (cat=0; cat<NCAT; cat++) 00809 (void) free(ntime_sub[cat]); 00810 (void) free(ntime_sub); 00811 00812 /* Free mask memory if needed */ 00813 if (mask_sub != NULL) 00814 (void) free(mask_sub); 00815 } 00816 00817 /* Success return */ 00818 return 0; 00819 }