Include file for main program of downscaling algorithm. More...
#include <unistd.h>
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#include <libgen.h>
#include <zlib.h>
#include <hdf5.h>
#include <netcdf.h>
#include <udunits.h>
#include <gsl/gsl_sort.h>
#include <utils.h>
#include <clim.h>
#include <classif.h>
#include <pceof.h>
#include <io.h>
#include <regress.h>
Go to the source code of this file.
Data Structures | |
struct | var_struct |
Data structure var_struct for observation database variables. More... | |
struct | analog_day_struct |
Analog day structure analog_day_struct, season-dependent. More... | |
struct | eof_data_struct |
EOF data field structure eof_data_struct. More... | |
struct | eof_info_struct |
EOF information field structure eof_info_struct. More... | |
struct | clim_info_struct |
Climatology information structure clim_info_struct. More... | |
struct | downscale_struct |
Downscaling data structure. More... | |
struct | field_data_struct |
Field data structure field_data_struct. More... | |
struct | field_struct |
Data structure to hold field information and data field_struct. More... | |
struct | learning_data_struct |
Data structure seasonal-dependent learning_data_struct. More... | |
struct | learning_eof_struct |
Data structure for EOF-related learning data learning_eof_struct. More... | |
struct | learning_struct |
Data structure for learning data learning_struct. More... | |
struct | reg_struct |
Data structure for regression. More... | |
struct | mask_struct |
Data structure for mask. More... | |
struct | season_struct |
Seasons definition season_struct. More... | |
struct | period_struct |
Period definition period_struct. More... | |
struct | conf_struct |
General configuration data structure conf_struct. More... | |
struct | data_struct |
MASTER data structure data_struct. More... | |
Defines | |
#define | _GNU_SOURCE |
GNU extensions. | |
#define | PAGESIZE sysconf(_SC_PAGESIZE) |
Pagesize value for the system for use with mmap. | |
#define | NCAT 4 |
Number of field type categories. | |
#define | FIELD_LS 0 |
Large-scale fields category. | |
#define | CTRL_FIELD_LS 1 |
Large-scale fields category for control-run. | |
#define | SEC_FIELD_LS 2 |
Large-scale secondary fields category. | |
#define | CTRL_SEC_FIELD_LS 3 |
Large-scale secondary fields category for control-run. | |
#define | MAXPATH 5000 |
Maximum length of paths/filenames strings. | |
#define | DEFLATE_LEVEL 6 |
Compression level. | |
Functions | |
int | load_conf (data_struct *data, char *fileconf) |
Read and set variables from XML configuration file. | |
int | wt_downscaling (data_struct *data) |
Downscaling climate scenarios program using weather typing. | |
int | wt_learning (data_struct *data) |
Compute or read learning data needed for downscaling climate scenarios using weather typing. | |
int | read_large_scale_fields (data_struct *data) |
Read large-scale fields data from input files. | |
int | read_large_scale_eof (data_struct *data) |
Read Large-Scale EOFs from input files. | |
int | read_learning_obs_eof (data_struct *data) |
Read observation data EOFs for learning period. | |
int | read_learning_rea_eof (data_struct *data) |
Read reanalysis data EOFs for learning period. | |
int | read_learning_fields (data_struct *data) |
Read Learning data from input files. | |
int | read_obs_period (double **buffer, double **lon, double **lat, double *missing_value, data_struct *data, char *varname, int *year, int *month, int *day, int *nlon, int *nlat, int ntime) |
Read observation data for a given period. | |
int | read_field_subdomain_period (double **buffer, double **lon, double **lat, double *missing_value, char *varname, int *year, int *month, int *day, double lonmin, double lonmax, double latmin, double latmax, char *coords, char *gridname, char *lonname, char *latname, char *dimxname, char *dimyname, char *timename, char *filename, int *nlon, int *nlat, int ntime) |
Read NetCDF field and extract subdomain and subperiod. | |
int | remove_clim (data_struct *data) |
Remove climatologies. | |
int | read_regression_points (reg_struct *reg) |
Read regression point positions. | |
int | read_mask (mask_struct *mask) |
Read a mask file. | |
int | find_the_days (analog_day_struct analog_days, double *precip_index, double *precip_index_learn, double *sup_field_index, double *sup_field_index_learn, double *sup_field, double *sup_field_learn, short int *mask, int *class_clusters, int *class_clusters_learn, int *year, int *month, int *day, int *year_learn, int *month_learn, int *day_learn, char *time_units, int ntime, int ntime_learn, int *months, int nmonths, int ndays, int ndayschoices, int npts, int shuffle, int sup, int sup_choice, int sup_cov, int use_downscaled_year, int only_wt, int nlon, int nlat, int sup_nlon, int sup_nlat) |
Find analog days given cluster, supplemental large-scale field, and precipitation distances. | |
void | compute_secondary_large_scale_diff (double *delta, double **delta_dayschoice, analog_day_struct analog_days, double *sup_field_index, double *sup_field_index_learn, double sup_field_var, double sup_field_var_learn, int ntimes) |
Compute secondary large-scale field difference between value of learning field at analog date vs model field at downscaled date. | |
int | merge_seasons (analog_day_struct analog_days_merged, analog_day_struct analog_days, int *merged_itimes, int ntimes_merged, int ntimes) |
Merge seasonal analog date data. | |
int | merge_seasonal_data (double *buf_merged, double *buf, analog_day_struct analog_days, int *merged_itimes, int dimx, int dimy, int ntimes_merged, int ntimes) |
Merge seasonal 3D double field data using analog day structure. | |
int | merge_seasonal_data_i (int *buf_merged, int *buf, analog_day_struct analog_days, int *merged_itimes, int dimx, int dimy, int ntimes_merged, int ntimes) |
Merge seasonal 3D integer field data using analog day structure. | |
int | merge_seasonal_data_2d (double **buf_merged, double **buf, analog_day_struct analog_days, int *merged_itimes, int dimx, int dimy, int supdim, int ntimes_merged, int ntimes) |
Merge seasonal 3D double field data using analog day structure, with another supplemental dimension. | |
int | output_downscaled_analog (analog_day_struct analog_days, double *delta, int output_month_begin, char *output_path, char *config, char *time_units, char *cal_type, double deltat, int file_format, int file_compression, int file_compression_level, int debug, info_struct *info, var_struct *obs_var, period_struct *period, double *time_ls, int ntime) |
Read analog day data and write it for downscaled period. | |
int | write_learning_fields (data_struct *data) |
Write learning fields for later use. | |
int | write_regression_fields (data_struct *data, char *filename, double **timeval, int *ntime, double **precip_index, double **distclust, double **sup_index) |
Write regression-related downscaling fields for diagnostics use. | |
void | read_analog_data (analog_day_struct *analog_days, double **delta, double **time_ls, char *filename, char *timename) |
Read analog data from NetCDF input file. | |
void | save_analog_data (analog_day_struct analog_days, double *delta, double **delta_dayschoice, double *dist, int *cluster, double *time_ls, char *filename, data_struct *data) |
Save analog data information for further use. | |
void | free_main_data (data_struct *data) |
Free main data structure. | |
const char * | get_filename_ext (const char *filename) |
Include file for main program of downscaling algorithm.
Definition in file dsclim.h.
#define CTRL_FIELD_LS 1 |
Large-scale fields category for control-run.
Definition at line 97 of file dsclim.h.
Referenced by free_main_data(), and wt_downscaling().
#define CTRL_SEC_FIELD_LS 3 |
Large-scale secondary fields category for control-run.
Definition at line 101 of file dsclim.h.
Referenced by wt_downscaling().
#define FIELD_LS 0 |
Large-scale fields category.
Definition at line 95 of file dsclim.h.
Referenced by free_main_data(), read_large_scale_fields(), and wt_downscaling().
#define MAXPATH 5000 |
Maximum length of paths/filenames strings.
Definition at line 104 of file dsclim.h.
Referenced by create_netcdf(), load_conf(), output_downscaled_analog(), read_netcdf_var_1d(), read_netcdf_var_2d(), read_netcdf_var_3d(), read_netcdf_var_3d_2d(), read_netcdf_var_generic_val(), read_obs_period(), show_license(), write_netcdf_dims_3d(), write_netcdf_var_3d(), and write_netcdf_var_3d_2d().
#define NCAT 4 |
Number of field type categories.
Definition at line 93 of file dsclim.h.
Referenced by free_main_data(), load_conf(), read_large_scale_fields(), remove_clim(), and wt_downscaling().
#define PAGESIZE sysconf(_SC_PAGESIZE) |
#define SEC_FIELD_LS 2 |
Large-scale secondary fields category.
Definition at line 99 of file dsclim.h.
Referenced by read_large_scale_fields(), and wt_downscaling().
void compute_secondary_large_scale_diff | ( | double * | delta, | |
double ** | delta_dayschoice, | |||
analog_day_struct | analog_days, | |||
double * | sup_field_index, | |||
double * | sup_field_index_learn, | |||
double | sup_field_var, | |||
double | sup_field_var_learn, | |||
int | ntimes | |||
) |
Compute secondary large-scale field difference between value of learning field at analog date vs model field at downscaled date.
[out] | delta | Difference between value of secondary large-scale learning field at analog date vs model field at downscaled date |
[out] | delta_dayschoice | Difference between value of secondary large-scale learning field at analog date vs model field at downscaled date, for ndayschoice analog days |
[in] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled |
[in] | sup_field_index | Secondary large-scale field index of days to downscale |
[in] | sup_field_index_learn | Secondary large-scale field index of learning period |
[in] | sup_field_var | Secondary large-scale field index variance of days to downscale |
[in] | sup_field_var_learn | Secondary large-scale field index variance of learning period |
[in] | ntimes | Number of times of days to downscale for this period |
Compute normalized secondary large-scale field difference (delta)
Definition at line 60 of file compute_secondary_large_scale_diff.c.
References analog_day_struct::ndayschoice, analog_day_struct::tindex, and analog_day_struct::tindex_dayschoice.
Referenced by wt_downscaling().
00061 { 00073 int t; /* Time loop counter */ 00074 int ii; /* ndayschoice loop counter */ 00075 // double sup_diff; /**< Secondary large-scale field difference. */ 00076 // int count = 0; 00077 00078 /* Process each downscaled day */ 00079 for (t=0; t<ntimes; t++) { 00081 delta[t] = (sup_field_index[t] * sqrt(sup_field_var)) - 00082 (sup_field_index_learn[analog_days.tindex[t]] * sqrt(sup_field_var_learn)); 00083 for (ii=0; ii<analog_days.ndayschoice[t]; ii++) 00084 delta_dayschoice[t][ii] = (sup_field_index[t] * sqrt(sup_field_var)) - 00085 (sup_field_index_learn[analog_days.tindex_dayschoice[t][ii]] * sqrt(sup_field_var_learn)); 00086 00087 // if (fabs(sup_diff) > 2.0) { 00088 // delta[t] = sup_diff; 00089 // printf("%d %lf\n",t,delta[t]); 00090 // count++; 00091 // } 00092 // else 00093 // delta[t] = 0.0; 00094 // if (t == (ntimes-1)) 00095 // printf("%d %lf %lf %lf %d %d %d\n",count,delta[t],sup_field_index[t],sup_field_index_learn[analog_days.tindex[t]],analog_days.year[t],analog_days.month[t],analog_days.day[t]); 00096 00097 } 00098 }
int find_the_days | ( | analog_day_struct | analog_days, | |
double * | precip_index, | |||
double * | precip_index_learn, | |||
double * | sup_field_index, | |||
double * | sup_field_index_learn, | |||
double * | sup_field, | |||
double * | sup_field_learn, | |||
short int * | mask, | |||
int * | class_clusters, | |||
int * | class_clusters_learn, | |||
int * | year, | |||
int * | month, | |||
int * | day, | |||
int * | year_learn, | |||
int * | month_learn, | |||
int * | day_learn, | |||
char * | time_units, | |||
int | ntime, | |||
int | ntime_learn, | |||
int * | months, | |||
int | nmonths, | |||
int | ndays, | |||
int | ndayschoices, | |||
int | npts, | |||
int | shuffle, | |||
int | sup, | |||
int | sup_choice, | |||
int | sup_cov, | |||
int | use_downscaled_year, | |||
int | only_wt, | |||
int | nlon, | |||
int | nlat, | |||
int | sup_nlon, | |||
int | sup_nlat | |||
) |
Find analog days given cluster, supplemental large-scale field, and precipitation distances.
[out] | analog_days | Analog days time indexes and dates, as well as corresponding downscale dates |
[in] | precip_index | Precipitation index of days to downscale |
[in] | precip_index_learn | Precipitation index of learning period |
[in] | sup_field_index | Secondary large-scale field index of days to downscale |
[in] | sup_field_index_learn | Secondary large-scale field index of learning period |
[in] | sup_field | Secondary large-scale field of days to downscale |
[in] | sup_field_learn | Secondary large-scale field of learning period |
[in] | mask | Mask for covariance of secondary large-scale field |
[in] | class_clusters | Days classification cluster index of days to downscale |
[in] | class_clusters_learn | Days classification cluster index of learning period |
[in] | year | years of days to downscale |
[in] | month | month of days to downscale |
[in] | day | day of month of days to downscale |
[in] | year_learn | years of days of learning period |
[in] | month_learn | month of days of learning period |
[in] | day_learn | day of month of days of learning period |
[in] | time_units | base time units for downscaling |
[in] | ntime | number of times of days to downscale |
[in] | ntime_learn | number of times of learning period |
[in] | months | months of the year in each season |
[in] | nmonths | number of months in each season |
[in] | ndays | number of +- days to look around day of the year being downscaled |
[in] | ndayschoices | number of days to choose in first selection |
[in] | npts | number of regression points of precipitation index |
[in] | shuffle | shuffle or not the days of the first selection |
[in] | sup | if we want to use the secondary large-scale field in the final selection of the analog day |
[in] | sup_choice | if we want to use the secondary large-scale field in the first selection of the analog day |
[in] | sup_cov | if we want to use covariance of fields instead of averaged-field differences |
[in] | use_downscaled_year | if we want to also search the analog day in the year of the current downscaled year |
[in] | only_wt | if we want to restrict search within the same weather type |
[in] | nlon | longitude dimension |
[in] | nlat | latitude dimension |
[in] | sup_nlon | secondary large-scale field longitude dimension (for covariance) |
[in] | sup_nlat | secondary large-scale field latitude dimension (for covariance) |
Normalize the two metrics
[out] | analog_days | Analog days time indexes and dates, as well as corresponding downscale dates |
[in] | precip_index | Precipitation index of days to downscale |
[in] | precip_index_learn | Precipitation index of learning period |
[in] | sup_field_index | Secondary large-scale field index of days to downscale |
[in] | sup_field_index_learn | Secondary large-scale field index of learning period |
[in] | sup_field | Secondary large-scale field of days to downscale |
[in] | sup_field_learn | Secondary large-scale field of learning period |
[in] | mask | Mask for covariance of secondary large-scale field |
[in] | class_clusters | Days classification cluster index of days to downscale |
[in] | class_clusters_learn | Days classification cluster index of learning period |
[in] | year | years of days to downscale |
[in] | month | month of days to downscale |
[in] | day | day of month of days to downscale |
[in] | year_learn | years of days of learning period |
[in] | month_learn | month of days of learning period |
[in] | day_learn | day of month of days of learning period |
[in] | time_units | base time units for downscaling |
[in] | ntime | number of times of days to downscale |
[in] | ntime_learn | number of times of learning period |
[in] | months | months of the year in each season |
[in] | nmonths | number of months in each season |
[in] | ndays | number of +- days to look around day of the year being downscaled |
[in] | ndayschoices | number of days to choose in first selection |
[in] | npts | number of regression points of precipitation index |
[in] | shuffle | shuffle or not the days of the first selection |
[in] | sup | if we want to use the secondary large-scale field in the final selection of the analog day |
[in] | sup_choice | if we want to use the secondary large-scale field in the first selection of the analog day |
[in] | sup_cov | if we want to use covariance of fields instead of averaged-field differences |
[in] | use_downscaled_year | if we want to also search the analog day in the year of the current downscaled year |
[in] | only_wt | if we want to restrict search within the same weather type |
[in] | nlon | longitude dimension |
[in] | nlat | latitude dimension |
[in] | sup_nlon | secondary large-scale field longitude dimension (for covariance) |
[in] | sup_nlat | secondary large-scale field latitude dimension (for covariance) |
Normalize the two metrics
Definition at line 59 of file find_the_days.c.
References alloc_error(), analog_day_struct::analog_dayschoice, covariance_fields_spatial(), tstruct::day, analog_day_struct::day, analog_day_struct::day_s, dayofclimyear(), find_the_days(), tstruct::hour, is_leap_year(), analog_day_struct::metric_norm, tstruct::min, tstruct::month, analog_day_struct::month, analog_day_struct::month_s, tstruct::sec, analog_day_struct::time, analog_day_struct::tindex, analog_day_struct::tindex_all, analog_day_struct::tindex_dayschoice, analog_day_struct::tindex_s_all, TRUE, tstruct::year, analog_day_struct::year, and analog_day_struct::year_s.
Referenced by find_the_days(), and wt_downscaling().
00064 { 00102 int *buf_sub_i = NULL; /* Temporary buffer for time index of subperiod */ 00103 int *buf_learn_sub_i = NULL; /* Temporary buffer for time index of learning subperiod */ 00104 int ntime_sub; /* Number of times in subperiod */ 00105 int ntime_learn_sub; /* Number of times in learning subperiod */ 00106 00107 unsigned long int *random_num = NULL; /* Random number for shuffle */ 00108 00109 const gsl_rng_type *T = NULL; /* Random number generator type for shuffle */ 00110 gsl_rng *rng = NULL; /* Random number generator for shuffle */ 00111 00112 int cur_dayofy; /* Current day of year being downscaled */ 00113 int learn_dayofy; /* Learning day of year being processed */ 00114 int diff_day_learn; /* Difference in terms of day of year between downscaled day of year and learning one */ 00115 00116 double max_metric = 0.0; /* Maximum metric value. The metric is the value used to compare days 00117 (cluster distance, index distance, etc.) */ 00118 double max_metric_sup = 0.0; /* Maximum metric value for secondary large-scale field metric. */ 00119 double min_metric = 0.0; /* Minimum metric */ 00120 00121 double precip_diff; /* Squared sum of regressed precipitation difference (between downscaled and learning day) over all points. */ 00122 double diff_precip_pt; /* Regressed precipitation difference (between downscaled and learning day) for 1 point. */ 00123 double sup_diff; /* Secondary large-scale field difference (between downscaled and learning day) */ 00124 00125 double varstd; /* Standard deviation of precipitation index metric */ 00126 double varmean; /* Mean of precipitation index metric */ 00127 double varstd_sup; /* Standard deviation of secondary large-scale field metric */ 00128 double varmean_sup; /* Mean of secondary large-scale field metric */ 00129 00130 double *metric = NULL; /* Precipitation index metric buffer */ 00131 double *metric_norm = NULL; /* Normalized precipitation index metric buffer */ 00132 double *metric_sup = NULL; /* Secondary large-scale field metric */ 00133 double *metric_sup_norm = NULL; /* Normalized secondary large-scale field metric */ 00134 size_t *metric_index = NULL; /* Metric sorted index of ndayschoices days */ 00135 size_t *random_index = NULL; /* Shuffled metric index of ndayschoices days */ 00136 int min_metric_index; /* Index of minimum metric */ 00137 int max_metric_index; /* Index of maximum metric */ 00138 int *clust_diff = NULL; /* Cluster number differences between cluster of learning day and cluster of day being downscaled */ 00139 00140 int ntime_days; /* Number of days in learning period within the +-ndays of downscaled day of year */ 00141 int *ntime_days_learn = NULL; /* Time index of the learning subperiod corresponding to all valid days within the +-ndays of downscaled day of year */ 00142 00143 int ii; /* Loop counter */ 00144 int t; /* Time loop counter */ 00145 int tt; /* Time loop counter */ 00146 int tl; /* Time loop counter */ 00147 int pts; /* Regression points loop counter */ 00148 int ndays_year; /* Number of days in a year */ 00149 00150 ut_system *unitSystem = NULL; /* Unit System (udunits) */ 00151 ut_unit *dataunits = NULL; /* udunits variable */ 00152 int istat; /* Return status of functions */ 00153 double timei; /* udunits Time value */ 00154 00155 /* Initialize udunits */ 00156 ut_set_error_message_handler(ut_ignore); 00157 unitSystem = ut_read_xml(NULL); 00158 ut_set_error_message_handler(ut_write_to_stderr); 00159 dataunits = ut_parse(unitSystem, time_units, UT_ASCII); 00160 00161 /* Initialize random number generator if needed */ 00162 if (shuffle == TRUE) { 00163 T = gsl_rng_default; 00164 rng = gsl_rng_alloc(T); 00165 (void) gsl_rng_set(rng, time(NULL)); 00166 /* Allocate memory */ 00167 random_num = (unsigned long int *) malloc(ndayschoices * sizeof(unsigned long int)); 00168 if (random_num == NULL) alloc_error(__FILE__, __LINE__); 00169 random_index = (size_t *) malloc(ndayschoices * sizeof(size_t)); 00170 if (random_index == NULL) alloc_error(__FILE__, __LINE__); 00171 } 00172 00173 /* Allocate memory for metric index */ 00174 metric_index = (size_t *) malloc(ndayschoices * sizeof(size_t)); 00175 if (metric_index == NULL) alloc_error(__FILE__, __LINE__); 00176 00177 /* Select correct months for the current season in the time vectors of the downscaled and learning period */ 00178 ntime_sub = 0; 00179 for (t=0; t<ntime; t++) 00180 for (tt=0; tt<nmonths; tt++) 00181 if (month[t] == months[tt]) { 00182 buf_sub_i = (int *) realloc(buf_sub_i, (ntime_sub+1) * sizeof(int)); 00183 if (buf_sub_i == NULL) alloc_error(__FILE__, __LINE__); 00184 buf_sub_i[ntime_sub++] = t; 00185 } 00186 00187 ntime_learn_sub = 0; 00188 for (t=0; t<ntime_learn; t++) 00189 for (tt=0; tt<nmonths; tt++) 00190 if (month_learn[t] == months[tt]) { 00191 buf_learn_sub_i = (int *) realloc(buf_learn_sub_i, (ntime_learn_sub+1) * sizeof(int)); 00192 if (buf_learn_sub_i == NULL) alloc_error(__FILE__, __LINE__); 00193 buf_learn_sub_i[ntime_learn_sub++] = t; 00194 } 00195 00196 /* Process each downscaled day */ 00197 for (t=0; t<ntime_sub; t++) { 00198 00199 #if DEBUG > 7 00200 printf("%d %d %d %d\n",t,year[buf_sub_i[t]],month[buf_sub_i[t]],day[buf_sub_i[t]]); 00201 #endif 00202 00203 /* Compute the current downscaled day of year being processed */ 00204 cur_dayofy = dayofclimyear(day[buf_sub_i[t]], month[buf_sub_i[t]]); 00205 if (is_leap_year(year[buf_sub_i[t]])) 00206 ndays_year = 366; 00207 else 00208 ndays_year = 365; 00209 00210 /* Initializing */ 00211 ntime_days = 0; 00212 max_metric = -9999999.9; 00213 max_metric_sup = -9999999.9; 00214 00215 /* Search analog days in learning period */ 00216 for (tl=0; tl<ntime_learn_sub; tl++) { 00217 00218 /* If use_downscaled_year != 1, check that we don't search the analog day in the downscaled year. */ 00219 if (use_downscaled_year != 0 || (use_downscaled_year == 0 && year_learn[buf_learn_sub_i[tl]] != year[buf_sub_i[t]])) { 00220 00221 /* Compute the learning period day of year and compute the difference with the current processed day of year */ 00222 learn_dayofy = dayofclimyear(day_learn[buf_learn_sub_i[tl]], month_learn[buf_learn_sub_i[tl]]); 00223 diff_day_learn = (int) fminl( (float) abs(cur_dayofy - learn_dayofy), (float) abs(cur_dayofy - learn_dayofy + ndays_year) ); 00224 diff_day_learn = (int) fminl( (float) diff_day_learn, (float) abs(cur_dayofy - learn_dayofy - ndays_year) ); 00225 00226 /* We are within the day of year range */ 00227 if (diff_day_learn <= ndays) { 00228 00229 /* Allocate memory */ 00230 metric = (double *) realloc(metric, (ntime_days+1) * sizeof(double)); 00231 if (metric == NULL) alloc_error(__FILE__, __LINE__); 00232 metric_norm = (double *) realloc(metric_norm, (ntime_days+1) * sizeof(double)); 00233 if (metric_norm == NULL) alloc_error(__FILE__, __LINE__); 00234 00235 /* Compute precipitation index difference and precipitation index metric */ 00236 precip_diff = 0.0; 00237 for (pts=0; pts<npts; pts++) { 00238 diff_precip_pt = precip_index[pts+t*npts] - precip_index_learn[pts+tl*npts]; 00239 precip_diff += (diff_precip_pt*diff_precip_pt); 00240 /* 00241 if (t == 4595 && year_learn[buf_learn_sub_i[tl]] == 2005 && month_learn[buf_learn_sub_i[tl]] == 5 && day_learn[buf_learn_sub_i[tl]] == 29) { 00242 printf("%d %d %lf %lf %lf %lf\n",t,pts,precip_diff,diff_precip_pt,precip_index[pts+t*npts],precip_index_learn[pts+tl*npts]); 00243 } 00244 */ 00245 /* if (t == 4595) 00246 printf("%d %d %lf %lf %lf %lf\n",t,pts,precip_diff,diff_precip_pt,precip_index[pts+t*npts],precip_index_learn[pts+tl*npts]);*/ 00247 00248 } 00249 metric[ntime_days] = sqrt(precip_diff); 00250 // if (t == (ntime_sub-1)) 00251 // printf("metric before max %d %lf\n",ntime_days,metric[ntime_days]); 00252 00253 /* Store the maximum metric value and its index */ 00254 if (metric[ntime_days] > max_metric) { 00255 // if (t == (ntime_sub-1)) 00256 // printf("%d %lf %lf\n",ntime_days,max_metric,metric[ntime_days]); 00257 max_metric = metric[ntime_days]; 00258 max_metric_index = ntime_days; 00259 } 00260 00261 /* If we want to also use the secondary large-scale fields in the first selection of days */ 00262 if (sup_choice == TRUE || sup == TRUE) { 00263 /* Allocate memory */ 00264 metric_sup = (double *) realloc(metric_sup, (ntime_days+1) * sizeof(double)); 00265 if (metric_sup == NULL) alloc_error(__FILE__, __LINE__); 00266 metric_sup_norm = (double *) realloc(metric_sup_norm, (ntime_days+1) * sizeof(double)); 00267 if (metric_sup_norm == NULL) alloc_error(__FILE__, __LINE__); 00268 00269 if (sup_cov != TRUE) { 00270 /* Compute supplemental field index difference */ 00271 sup_diff = sup_field_index[t] - sup_field_index_learn[buf_learn_sub_i[tl]]; 00272 metric_sup[ntime_days] = sqrt(sup_diff * sup_diff); 00273 } 00274 else { 00275 /* Compute covariance of supplemental field */ 00276 if (nlon != sup_nlon || nlat != sup_nlat) { 00277 (void) fprintf(stderr, "%s: Dimensions of downscaled large-scale secondary field (nlat=%d nlon=%d) are not the same as the learning field (nlat=%d nlon=%d. Cannot proceed...\n", __FILE__, nlat, nlon, sup_nlat, sup_nlon); 00278 return -1; 00279 } 00280 (void) covariance_fields_spatial(&sup_diff, sup_field, sup_field_learn, mask, t, tl, sup_nlon, sup_nlat); 00281 metric_sup[ntime_days] = sqrt(sup_diff * sup_diff); 00282 } 00283 /* Store the maximum value and its index */ 00284 if (metric_sup[ntime_days] > max_metric_sup) 00285 max_metric_sup = metric_sup[ntime_days]; 00286 00287 // if (t >= (ntime_sub-5)) 00288 // if (year_learn[buf_learn_sub_i[tl]] == 2005 && month_learn[buf_learn_sub_i[tl]] == 5 && day_learn[buf_learn_sub_i[tl]] == 29) { 00289 // printf("%d %lf %lf %lf %lf %lf %lf\n",ntime_days,max_metric_sup,metric_sup[ntime_days],sup_field_index[t],sup_field_index_learn[buf_learn_sub_i[tl]],metric[ntime_days],precip_diff); 00290 // } 00291 00292 } 00293 00294 /* Compute cluster difference */ 00295 clust_diff = (int *) realloc(clust_diff, (ntime_days+1) * sizeof(int)); 00296 if (clust_diff == NULL) alloc_error(__FILE__, __LINE__); 00297 00298 clust_diff[ntime_days] = class_clusters_learn[tl] - class_clusters[t]; 00299 00300 /* Store the index in the time vector of the selected day */ 00301 ntime_days_learn = (int *) realloc(ntime_days_learn, (ntime_days+1) * sizeof(int)); 00302 if (ntime_days_learn == NULL) alloc_error(__FILE__, __LINE__); 00303 00304 ntime_days_learn[ntime_days] = buf_learn_sub_i[tl]; 00305 00306 /* 00307 if (t == (ntime_sub-1)) { 00308 printf("!clust %d %d %d %d %d %d day=%d\n",year_learn[buf_learn_sub_i[tl]], month_learn[buf_learn_sub_i[tl]], day_learn[buf_learn_sub_i[tl]], clust_diff[ntime_days], class_clusters_learn[tl], class_clusters[t],t); 00309 if (year_learn[buf_learn_sub_i[tl]] == 1986 && month_learn[buf_learn_sub_i[tl]] == 5 && day_learn[buf_learn_sub_i[tl]] == 25) 00310 for (pts=0; pts<npts; pts++) 00311 printf("!!! %d %d %d %d %d %lf %lf %lf %lf %lf\n",pts,ntime_days,year_learn[buf_learn_sub_i[tl]], month_learn[buf_learn_sub_i[tl]], day_learn[buf_learn_sub_i[tl]], (precip_index[pts+t*npts] - precip_index_learn[pts+tl*npts]), (precip_index[pts+t*npts] - precip_index_learn[pts+tl*npts])*(precip_index[pts+t*npts] - precip_index_learn[pts+tl*npts]), metric[ntime_days], precip_index[pts+t*npts],precip_index_learn[pts+tl*npts]); 00312 } 00313 */ 00314 00315 /* Count days within day of year range */ 00316 ntime_days++; 00317 } 00318 } 00319 } 00320 00321 /* If at least one day was in range */ 00322 if (ntime_days > 0) { 00323 00324 if (only_wt != 0) 00325 /* Put the maximum value when cluster number is not the same */ 00326 /* Parse each days within range */ 00327 for (tl=0; tl<ntime_days; tl++) { 00328 if (clust_diff[tl] != 0) { 00329 metric[tl] = max_metric; 00330 if (sup_choice == TRUE || sup == TRUE) 00331 metric_sup[tl] = max_metric_sup; 00332 } 00333 } 00334 00335 /* 00336 for (tl=0; tl<ntime_days; tl++) 00337 if (year_learn[ntime_days_learn[tl]] == 2005 && month_learn[ntime_days_learn[tl]] == 5 && day_learn[ntime_days_learn[tl]] == 29) { 00338 if (clust_diff[tl] == 0) 00339 printf("after max metric %lf max_metric=%lf %d\n",metric[tl],max_metric,max_metric_index); 00340 } 00341 */ 00342 00344 /* Compute the standard deviation */ 00345 varmean = gsl_stats_mean(metric, 1, (size_t) ntime_days); 00346 varstd = gsl_stats_sd_m(metric, 1, (size_t) ntime_days, varmean); 00347 if (sup_choice == TRUE) { 00348 /* Do the same if needed for secondary large-scale field */ 00349 varmean_sup = gsl_stats_mean(metric_sup, 1, (size_t) ntime_days); 00350 varstd_sup = gsl_stats_sd_m(metric_sup, 1, (size_t) ntime_days, varmean_sup); 00351 /* Apply normalization and sum the two metrics if we use the secondary large-scale field in the first selection */ 00352 /* and also in the second and final selection */ 00353 if (sup_choice == TRUE) { 00354 /* Process each days within range */ 00355 for (tl=0; tl<ntime_days; tl++) { 00356 /* 00357 if (tl == 0) 00358 printf("std mean %lf %lf\n",varstd,varmean); 00359 if (year_learn[ntime_days_learn[tl]] == 2005 && month_learn[ntime_days_learn[tl]] == 5 && day_learn[ntime_days_learn[tl]] == 29) { 00360 if (clust_diff[tl] == 0) 00361 printf("after norm metric %lf ",metric[tl]); 00362 } 00363 */ 00364 metric_sup_norm[tl] = (metric_sup[tl] - varmean_sup) / varstd_sup; 00365 metric_norm[tl] = ((metric[tl] - varmean) / varstd) + metric_sup_norm[tl]; 00366 00367 /* 00368 if (t >= (ntime_sub-5)) 00369 printf("!! %d %lf %lf %lf\n",tl,metric_norm[tl],varmean,varstd); 00370 if (year_learn[ntime_days_learn[tl]] == 2005 && month_learn[ntime_days_learn[tl]] == 5 && day_learn[ntime_days_learn[tl]] == 29) 00371 if (clust_diff[tl] == 0) 00372 printf("%lf\n",metric[tl]); 00373 */ 00374 } 00375 } 00376 else 00377 for (tl=0; tl<ntime_days; tl++) 00378 metric_norm[tl] = (metric[tl] - varmean) / varstd; 00379 } 00380 else 00381 for (tl=0; tl<ntime_days; tl++) 00382 metric_norm[tl] = (metric[tl] - varmean) / varstd; 00383 00384 /* Sort the vector, retrieve the sorted indexes and select only the first ndayschoices ones */ 00385 // printf("%d %d\n",ntime_days,ndayschoices); 00386 (void) gsl_sort_smallest_index(metric_index, (size_t) ndayschoices, metric_norm, 1, (size_t) ntime_days); 00387 00388 if (shuffle == TRUE) { 00389 /* Shuffle the vector of indexes and choose the first one. This select a random day for the second and final selection */ 00390 for (ii=0; ii<ndayschoices; ii++) 00391 random_num[ii] = gsl_rng_uniform_int(rng, 100); 00392 (void) gsl_sort_ulong_index(random_index, random_num, 1, (size_t) ndayschoices); 00393 00394 min_metric = metric_norm[metric_index[random_index[0]]]; 00395 min_metric_index = metric_index[random_index[0]]; 00396 00397 /* Save analog day time index in the learning period */ 00398 analog_days.tindex[t] = ntime_days_learn[metric_index[random_index[0]]]; 00399 analog_days.year[t] = year_learn[analog_days.tindex[t]]; 00400 analog_days.month[t] = month_learn[analog_days.tindex[t]]; 00401 analog_days.day[t] = day_learn[analog_days.tindex[t]]; 00402 analog_days.tindex_all[t] = buf_learn_sub_i[analog_days.tindex[t]]; 00403 istat = utInvCalendar2(analog_days.year[t], analog_days.month[t], analog_days.day[t], 0, 0, 0.0, dataunits, &timei); 00404 analog_days.time[t] = (int) timei; 00405 00406 /* Save date of day being downscaled */ 00407 analog_days.year_s[t] = year[buf_sub_i[t]]; 00408 analog_days.month_s[t] = month[buf_sub_i[t]]; 00409 analog_days.day_s[t] = day[buf_sub_i[t]]; 00410 analog_days.tindex_s_all[t] = buf_sub_i[t]; 00411 00412 /* Save all analog days in special time structure */ 00413 analog_days.analog_dayschoice[t] = (tstruct *) malloc(ndayschoices * sizeof(tstruct)); 00414 if (analog_days.analog_dayschoice[t] == NULL) alloc_error(__FILE__, __LINE__); 00415 analog_days.metric_norm[t] = (float *) malloc(ndayschoices * sizeof(float)); 00416 if (analog_days.metric_norm[t] == NULL) alloc_error(__FILE__, __LINE__); 00417 analog_days.tindex_dayschoice[t] = (int *) malloc(ndayschoices * sizeof(int)); 00418 if (analog_days.tindex_dayschoice[t] == NULL) alloc_error(__FILE__, __LINE__); 00419 for (ii=0; ii<ndayschoices; ii++) { 00420 analog_days.metric_norm[t][ii] = metric_norm[metric_index[ii]]; 00421 analog_days.tindex_dayschoice[t][ii] = ntime_days_learn[metric_index[ii]]; 00422 analog_days.analog_dayschoice[t][ii].year = year_learn[ntime_days_learn[metric_index[ii]]]; 00423 analog_days.analog_dayschoice[t][ii].month = month_learn[ntime_days_learn[metric_index[ii]]]; 00424 analog_days.analog_dayschoice[t][ii].day = day_learn[ntime_days_learn[metric_index[ii]]]; 00425 analog_days.analog_dayschoice[t][ii].hour = 0; 00426 analog_days.analog_dayschoice[t][ii].min = 0; 00427 analog_days.analog_dayschoice[t][ii].sec = 0; 00428 } 00429 } 00430 else { 00431 /* Don't shuffle. Instead choose the one having the smallest metric for the best match */ 00432 00433 min_metric = 99999999.9; 00434 min_metric_index = -1; 00435 if (sup == TRUE) { 00436 /* If we use the secondary large-scale field for this final selection */ 00437 for (ii=0; ii<ndayschoices; ii++) { 00438 if (metric_sup[metric_index[ii]] < min_metric) { 00439 min_metric_index = metric_index[ii]; 00440 min_metric = metric_sup[metric_index[ii]]; 00441 } 00442 00443 /* if (t >= (ntime_sub-5)) { 00444 printf("!!! %d %d %d %d %lf\n",ii,t,tl,(int)metric_index[ii],metric_norm[metric_index[ii]]); 00445 printf("SUP %d %d %lf %lf %lf %d ",ii,ndayschoices,metric_sup[metric_index[ii]],metric_norm[metric_index[ii]]*2.0,min_metric,(int)metric_index[ii]); 00446 printf("%d %d %d\n",year_learn[ntime_days_learn[metric_index[ii]]],month_learn[ntime_days_learn[metric_index[ii]]],day_learn[ntime_days_learn[metric_index[ii]]]); 00447 printf("%d %d\n",sup,sup_choice); 00448 }*/ 00449 00450 } 00451 } 00452 else { 00453 /* We rather use the main large-scale field (precipitation) as the metric for the final selection */ 00454 for (ii=0; ii<ndayschoices; ii++) { 00455 // if (t == (ntime_sub-1)) { 00456 // printf("%d %d %lf %lf\n",ii,ndayschoices,metric[metric_index[ii]],min_metric); 00457 // printf("%d %d %d\n",year_learn[ntime_days_learn[metric_index[ii]]],month_learn[ntime_days_learn[metric_index[ii]]],day_learn[ntime_days_learn[metric_index[ii]]]); 00458 // } 00459 if (metric_norm[metric_index[ii]] < min_metric) { 00460 min_metric_index = metric_index[ii]; 00461 min_metric = metric_norm[metric_index[ii]]; 00462 } 00463 } 00464 } 00465 00466 /* Save analog day time index in the learning period */ 00467 analog_days.tindex[t] = ntime_days_learn[min_metric_index]; 00468 analog_days.year[t] = year_learn[analog_days.tindex[t]]; 00469 analog_days.month[t] = month_learn[analog_days.tindex[t]]; 00470 analog_days.day[t] = day_learn[analog_days.tindex[t]]; 00471 analog_days.tindex_all[t] = buf_learn_sub_i[analog_days.tindex[t]]; 00472 istat = utInvCalendar2(analog_days.year[t], analog_days.month[t], analog_days.day[t], 0, 0, 0.0, dataunits, &timei); 00473 analog_days.time[t] = (int) timei; 00474 00475 /* Save date of day being downscaled */ 00476 analog_days.year_s[t] = year[buf_sub_i[t]]; 00477 analog_days.month_s[t] = month[buf_sub_i[t]]; 00478 analog_days.day_s[t] = day[buf_sub_i[t]]; 00479 analog_days.tindex_s_all[t] = buf_sub_i[t]; 00480 00481 /* Save all analog days in special time structure */ 00482 analog_days.analog_dayschoice[t] = (tstruct *) malloc(ndayschoices * sizeof(tstruct)); 00483 if (analog_days.analog_dayschoice[t] == NULL) alloc_error(__FILE__, __LINE__); 00484 analog_days.metric_norm[t] = (float *) malloc(ndayschoices * sizeof(float)); 00485 if (analog_days.metric_norm[t] == NULL) alloc_error(__FILE__, __LINE__); 00486 analog_days.tindex_dayschoice[t] = (int *) malloc(ndayschoices * sizeof(int)); 00487 if (analog_days.tindex_dayschoice[t] == NULL) alloc_error(__FILE__, __LINE__); 00488 for (ii=0; ii<ndayschoices; ii++) { 00489 analog_days.metric_norm[t][ii] = metric_norm[metric_index[ii]]; 00490 analog_days.tindex_dayschoice[t][ii] = ntime_days_learn[metric_index[ii]]; 00491 analog_days.analog_dayschoice[t][ii].year = year_learn[ntime_days_learn[metric_index[ii]]]; 00492 analog_days.analog_dayschoice[t][ii].month = month_learn[ntime_days_learn[metric_index[ii]]]; 00493 analog_days.analog_dayschoice[t][ii].day = day_learn[ntime_days_learn[metric_index[ii]]]; 00494 analog_days.analog_dayschoice[t][ii].hour = 0; 00495 analog_days.analog_dayschoice[t][ii].min = 0; 00496 analog_days.analog_dayschoice[t][ii].sec = 0; 00497 } 00498 } 00499 00500 /* Free memory */ 00501 (void) free(metric); 00502 (void) free(metric_norm); 00503 metric = NULL; 00504 metric_norm = NULL; 00505 if (sup_choice == TRUE || sup == TRUE) { 00506 (void) free(metric_sup); 00507 metric_sup = NULL; 00508 (void) free(metric_sup_norm); 00509 metric_sup_norm = NULL; 00510 } 00511 (void) free(clust_diff); 00512 clust_diff = NULL; 00513 (void) free(ntime_days_learn); 00514 ntime_days_learn = NULL; 00515 } 00516 if (year[buf_sub_i[t]] == 1999 && month[buf_sub_i[t]] == 5) 00517 if (month[buf_sub_i[t]] == 3 || month[buf_sub_i[t]] == 4 || month[buf_sub_i[t]] == 5) 00518 printf("Time downscaled %d: %d %d %d. Analog day: %d %d %d %lf\n", t, year[buf_sub_i[t]], month[buf_sub_i[t]], day[buf_sub_i[t]], year_learn[analog_days.tindex[t]], month_learn[analog_days.tindex[t]], day_learn[analog_days.tindex[t]], min_metric); 00519 00520 } 00521 00522 /* Free memory */ 00523 if (shuffle == TRUE) { 00524 (void) gsl_rng_free(rng); 00525 (void) free(random_num); 00526 (void) free(random_index); 00527 } 00528 (void) free(metric_index); 00529 00530 (void) free(buf_sub_i); 00531 (void) free(buf_learn_sub_i); 00532 00533 (void) ut_free(dataunits); 00534 (void) ut_free_system(unitSystem); 00535 00536 return 0; 00537 }
void free_main_data | ( | data_struct * | data | ) |
Free main data structure.
[in] | data | MASTER data structure. |
Definition at line 66 of file free_main_data.c.
References var_struct::acronym, var_struct::altitude, var_struct::altitudename, field_struct::analog_days, field_struct::analog_days_year, analog_day_struct::analog_dayschoice, conf_struct::analog_file_ctrl, conf_struct::analog_file_other, conf_struct::analog_save, conf_struct::cal_type, learning_data_struct::class_clusters, conf_struct::classif_type, var_struct::clim, clim_info_struct::clim_filein_ls, clim_info_struct::clim_fileout_ls, conf_struct::clim_filter_type, field_data_struct::clim_info, clim_info_struct::clim_nomvar_ls, clim_info_struct::clim_provided, clim_info_struct::clim_save, conf_struct::clustname, data_struct::conf, conf_struct::config, info_struct::contact_email, info_struct::contact_name, info_field_struct::coordinates, mask_struct::coords, proj_struct::coords, info_struct::country, info_struct::creator_email, info_struct::creator_name, info_struct::creator_url, CTRL_FIELD_LS, learning_struct::data, field_struct::data, time_vect_struct::day, analog_day_struct::day, analog_day_struct::day_s, downscale_struct::days_class_clusters, downscale_struct::days_class_clusters_all, var_struct::delta, downscale_struct::delta, downscale_struct::delta_all, downscale_struct::delta_dayschoice, downscale_struct::delta_dayschoice_all, info_struct::description, var_struct::dimcoords, mask_struct::dimcoords, var_struct::dimxname, mask_struct::dimxname, reg_struct::dimxname, field_data_struct::dimxname, conf_struct::dimxname_eof, var_struct::dimyname, mask_struct::dimyname, reg_struct::dimyname, field_data_struct::dimyname, conf_struct::dimyname_eof, downscale_struct::dist, downscale_struct::dist_all, field_data_struct::down, period_struct::downscale, info_struct::downscaling_forcing, learning_eof_struct::eof, eof_info_struct::eof_coords, field_data_struct::eof_data, eof_info_struct::eof_filein_ls, field_data_struct::eof_info, eof_data_struct::eof_ls, eof_data_struct::eof_nomvar_ls, eof_info_struct::eof_project, conf_struct::eofname, var_struct::factor, FALSE, mask_struct::field, data_struct::field, field_data_struct::field_eof_ls, FIELD_LS, field_data_struct::field_ls, mask_struct::filename, reg_struct::filename, learning_eof_struct::filename_eof, field_data_struct::filename_ls, learning_struct::filename_open_clust_learn, learning_struct::filename_open_learn, learning_struct::filename_open_weight, learning_struct::filename_rea_sup, learning_struct::filename_save_clust_learn, reg_struct::filename_save_ctrl_reg, learning_struct::filename_save_learn, reg_struct::filename_save_other_reg, learning_struct::filename_save_weight, var_struct::frequency, info_field_struct::grid_mapping, proj_struct::grid_mapping_name, var_struct::height, info_field_struct::height, time_vect_struct::hour, data_struct::info, field_data_struct::info, eof_info_struct::info, info_struct::institution, info_struct::institution_model, info_struct::keywords, mask_struct::lat, reg_struct::lat, learning_struct::lat, field_struct::lat_eof_ls, field_struct::lat_ls, var_struct::latname, mask_struct::latname, reg_struct::latname, field_data_struct::latname, conf_struct::latname_eof, data_struct::learning, conf_struct::learning_maskfile, learning_struct::learning_provided, learning_struct::learning_save, mask_struct::lon, reg_struct::lon, learning_struct::lon, field_struct::lon_eof_ls, field_struct::lon_ls, info_field_struct::long_name, var_struct::lonname, mask_struct::lonname, reg_struct::lonname, field_data_struct::lonname, conf_struct::lonname_eof, mask_struct::maskname, downscale_struct::mean, downscale_struct::mean_dist, info_struct::member, analog_day_struct::metric_norm, time_vect_struct::minutes, info_struct::model, season_struct::month, time_vect_struct::month, analog_day_struct::month, analog_day_struct::month_s, field_struct::n_ls, var_struct::name, proj_struct::name, NCAT, analog_day_struct::ndayschoice, var_struct::netcdfname, season_struct::nmonths, var_struct::nobs_var, learning_struct::nomvar_class_clusters, learning_eof_struct::nomvar_eof, field_data_struct::nomvar_ls, learning_struct::nomvar_pc_normalized_var, learning_struct::nomvar_precip_index, learning_struct::nomvar_precip_index_obs, learning_struct::nomvar_precip_reg, learning_struct::nomvar_precip_reg_acor, learning_struct::nomvar_precip_reg_cst, learning_struct::nomvar_precip_reg_dist, learning_struct::nomvar_precip_reg_err, learning_struct::nomvar_precip_reg_rsq, learning_struct::nomvar_precip_reg_vif, learning_struct::nomvar_rea_sup, learning_eof_struct::nomvar_sing, learning_struct::nomvar_sup_index, learning_struct::nomvar_sup_index_mean, learning_struct::nomvar_sup_index_var, learning_struct::nomvar_sup_val, learning_struct::nomvar_time, learning_struct::nomvar_weight, conf_struct::nperiods, conf_struct::nseasons, analog_day_struct::ntime, learning_struct::obs, learning_struct::obs_dimxname, learning_struct::obs_dimyname, learning_struct::obs_eofname, learning_struct::obs_latname, learning_struct::obs_lonname, learning_struct::obs_timename, conf_struct::obs_var, info_struct::other_contact_email, info_struct::other_contact_name, var_struct::output, conf_struct::output_only, conf_struct::output_path, var_struct::path, learning_struct::pc_normalized_var, conf_struct::period, conf_struct::period_ctrl, var_struct::post, learning_data_struct::precip_index, field_struct::precip_index, learning_data_struct::precip_index_obs, learning_data_struct::precip_reg, learning_data_struct::precip_reg_autocor, learning_data_struct::precip_reg_cst, learning_data_struct::precip_reg_dist, learning_data_struct::precip_reg_err, learning_data_struct::precip_reg_rsq, learning_data_struct::precip_reg_vif, info_struct::processor, var_struct::proj, mask_struct::proj, field_struct::proj, conf_struct::ptsname, reg_struct::ptsname, learning_struct::rea, learning_struct::rea_coords, learning_struct::rea_dimxname, learning_struct::rea_dimyname, learning_struct::rea_gridname, learning_struct::rea_latname, learning_struct::rea_lonname, learning_struct::rea_timename, data_struct::reg, reg_struct::reg_save, info_struct::scenario, info_struct::scenario_co2, conf_struct::season, data_struct::secondary_mask, time_vect_struct::seconds, learning_eof_struct::sing, eof_data_struct::sing_ls, eof_data_struct::sing_nomvar_ls, downscale_struct::smean, downscale_struct::smean_2d, downscale_struct::smean_norm, info_struct::software, info_struct::summary, info_struct::summary_french, learning_data_struct::sup_index, learning_struct::sup_lat, learning_struct::sup_latname, learning_struct::sup_lon, learning_struct::sup_lonname, learning_data_struct::sup_val, downscale_struct::sup_val_norm, downscale_struct::svar_2d, var_struct::template, learning_data_struct::time, analog_day_struct::time, field_struct::time_ls, learning_struct::time_s, learning_eof_struct::time_s, learning_data_struct::time_s, field_struct::time_s, conf_struct::time_units, var_struct::timename, reg_struct::timename, field_data_struct::timename, info_struct::timestep, analog_day_struct::tindex, analog_day_struct::tindex_all, analog_day_struct::tindex_dayschoice, analog_day_struct::tindex_s_all, info_struct::title, info_struct::title_french, TRUE, var_struct::units, info_field_struct::units, mask_struct::use_mask, downscale_struct::var, downscale_struct::var_dist, downscale_struct::var_pc_norm, info_struct::version, learning_data_struct::weight, time_vect_struct::year, analog_day_struct::year, and analog_day_struct::year_s.
Referenced by main().
00066 { 00067 00072 int i; /* Loop counter */ 00073 int j; /* Loop counter */ 00074 int s; /* Loop counter */ 00075 int tt; /* Loop counter */ 00076 int end_cat; /* End category to process */ 00077 00078 if ( (data->conf->analog_save == TRUE || data->conf->output_only == TRUE) && data->conf->period_ctrl->downscale == TRUE ) 00079 (void) free(data->conf->analog_file_ctrl); 00080 if (data->conf->analog_save == TRUE || data->conf->output_only == TRUE) 00081 (void) free(data->conf->analog_file_other); 00082 00083 for (i=0; i<NCAT; i++) { 00084 00085 for (j=0; j<data->field[i].n_ls; j++) { 00086 00087 if (data->field[i].data[j].clim_info->clim_provided == TRUE) 00088 (void) free(data->field[i].data[j].clim_info->clim_filein_ls); 00089 if (data->field[i].data[j].clim_info->clim_save == TRUE) 00090 (void) free(data->field[i].data[j].clim_info->clim_fileout_ls); 00091 if (data->field[i].data[j].clim_info->clim_save == TRUE || data->field[i].data[j].clim_info->clim_provided == TRUE) 00092 (void) free(data->field[i].data[j].clim_info->clim_nomvar_ls); 00093 (void) free(data->field[i].data[j].clim_info); 00094 00095 if (data->field[i].data[j].eof_info->eof_project == TRUE) { 00096 (void) free(data->field[i].data[j].eof_info->eof_coords); 00097 (void) free(data->field[i].data[j].eof_info->eof_filein_ls); 00098 (void) free(data->field[i].data[j].eof_data->eof_nomvar_ls); 00099 (void) free(data->field[i].data[j].eof_data->sing_nomvar_ls); 00100 00101 if (i == 0 || i == 1) 00102 (void) free(data->field[i].data[j].field_eof_ls); 00103 (void) free(data->field[i].data[j].eof_data->eof_ls); 00104 (void) free(data->field[i].data[j].eof_data->sing_ls); 00105 00106 if ((i == 0 || i == 1) && data->conf->output_only != TRUE) { 00107 (void) free(data->field[i].data[j].eof_info->info->units); 00108 (void) free(data->field[i].data[j].eof_info->info->height); 00109 (void) free(data->field[i].data[j].eof_info->info->coordinates); 00110 (void) free(data->field[i].data[j].eof_info->info->grid_mapping); 00111 (void) free(data->field[i].data[j].eof_info->info->long_name); 00112 } 00113 } 00114 (void) free(data->field[i].data[j].eof_info->info); 00115 (void) free(data->field[i].data[j].eof_info); 00116 (void) free(data->field[i].data[j].eof_data); 00117 00118 if (data->conf->output_only != TRUE) { 00119 (void) free(data->field[i].data[j].info->coordinates); 00120 (void) free(data->field[i].data[j].info->grid_mapping); 00121 (void) free(data->field[i].data[j].info->units); 00122 (void) free(data->field[i].data[j].info->height); 00123 (void) free(data->field[i].data[j].info->long_name); 00124 } 00125 00126 (void) free(data->field[i].data[j].info); 00127 (void) free(data->field[i].data[j].nomvar_ls); 00128 (void) free(data->field[i].data[j].filename_ls); 00129 (void) free(data->field[i].data[j].dimyname); 00130 (void) free(data->field[i].data[j].dimxname); 00131 (void) free(data->field[i].data[j].latname); 00132 (void) free(data->field[i].data[j].lonname); 00133 (void) free(data->field[i].data[j].timename); 00134 00135 if (data->field[i].proj[j].name != NULL) 00136 (void) free(data->field[i].proj[j].name); 00137 if (data->field[i].proj[j].grid_mapping_name != NULL) 00138 (void) free(data->field[i].proj[j].grid_mapping_name); 00139 if (data->field[i].proj[j].coords != NULL) 00140 (void) free(data->field[i].proj[j].coords); 00141 00142 if (data->conf->output_only != TRUE) { 00143 for (s=0; s<data->conf->nseasons; s++) { 00144 if (i == 0 || (i == 1 && data->conf->period_ctrl->downscale == TRUE)) { 00145 (void) free(data->field[i].data[j].down->days_class_clusters[s]); 00146 (void) free(data->field[i].data[j].down->dist[s]); 00147 } 00148 if (i == 1) { 00149 (void) free(data->field[i].data[j].down->mean_dist[s]); 00150 (void) free(data->field[i].data[j].down->var_dist[s]); 00151 } 00152 } 00153 } 00154 if (i == 0 || i == 1) { 00155 (void) free(data->field[i].data[j].down->mean_dist); 00156 (void) free(data->field[i].data[j].down->var_dist); 00157 } 00158 if ( i == 0 || i == 1) { 00159 if (i == 0 || (i == 1 && data->conf->period_ctrl->downscale == TRUE)) { 00160 if (data->conf->output_only != TRUE) { 00161 (void) free(data->field[i].data[j].down->dist_all); 00162 (void) free(data->field[i].data[j].down->days_class_clusters_all); 00163 } 00164 } 00165 (void) free(data->field[i].data[j].down->dist); 00166 (void) free(data->field[i].data[j].down->days_class_clusters); 00167 (void) free(data->field[i].data[j].down->var_pc_norm); 00168 if (i == 0 || (i == 1 && data->conf->period_ctrl->downscale == TRUE)) { 00169 (void) free(data->field[i+2].data[j].down->delta_all); 00170 for (tt=0; tt<data->field[i].analog_days_year.ntime; tt++) 00171 (void) free(data->field[i+2].data[j].down->delta_dayschoice_all[tt]); 00172 (void) free(data->field[i+2].data[j].down->delta_dayschoice_all); 00173 } 00174 00175 /* 00176 if ((data->conf->period_ctrl->downscale == TRUE && i == 1) || i == 0) 00177 if (data->conf->output_only != TRUE) { 00178 for (s=0; s<data->conf->nseasons; s++) { 00179 for (tt=0; tt<data->field[i].analog_days[s].ntime; tt++) { 00180 (void) free(data->field[i].data[j].down->delta_dayschoice[s][tt]); 00181 (void) free(data->field[i].data[j].down->delta[s]); 00182 } 00183 } 00184 (void) free(data->field[i].data[j].down->delta_dayschoice[s]); 00185 } 00186 */ 00187 } 00188 else { 00189 if (data->conf->period_ctrl->downscale == TRUE || i == 2) 00190 if (data->conf->output_only != TRUE) 00191 for (s=0; s<data->conf->nseasons; s++) { 00192 (void) free(data->field[i].data[j].down->smean_norm[s]); 00193 (void) free(data->field[i].data[j].down->sup_val_norm[s]); 00194 } 00195 if (data->conf->output_only != TRUE) { 00196 if (i == 3) { 00197 /* Only for secondary large-scale control field */ 00198 for (s=0; s<data->conf->nseasons; s++) { 00199 (void) free(data->field[i].data[j].down->smean_2d[s]); 00200 (void) free(data->field[i].data[j].down->svar_2d[s]); 00201 } 00202 (void) free(data->field[i].data[j].down->smean_2d); 00203 (void) free(data->field[i].data[j].down->svar_2d); 00204 } 00205 (void) free(data->field[i].data[j].down->smean); 00206 } 00207 00208 (void) free(data->field[i].data[j].down->smean_norm); 00209 (void) free(data->field[i].data[j].down->sup_val_norm); 00210 (void) free(data->field[i].data[j].down->mean); 00211 (void) free(data->field[i].data[j].down->var); 00212 (void) free(data->field[i].data[j].down->delta); 00213 (void) free(data->field[i].data[j].down->delta_dayschoice); 00214 } 00215 00216 (void) free(data->field[i].data[j].down); 00217 00218 (void) free(data->field[i].data[j].field_ls); 00219 } 00220 00221 (void) free(data->field[i].lat_ls); 00222 (void) free(data->field[i].lon_ls); 00223 00224 if (data->field[i].lon_eof_ls != NULL) 00225 (void) free(data->field[i].lon_eof_ls); 00226 if (data->field[i].lat_eof_ls != NULL) 00227 (void) free(data->field[i].lat_eof_ls); 00228 00229 if (data->conf->period_ctrl->downscale == TRUE) 00230 end_cat = CTRL_FIELD_LS; 00231 else 00232 end_cat = FIELD_LS; 00233 if (i >= FIELD_LS && i <= end_cat) { 00234 if (data->field[i].n_ls > 0) { 00235 if (data->conf->output_only != TRUE) { 00236 for (s=0; s<data->conf->nseasons; s++) { 00237 (void) free(data->field[i].precip_index[s]); 00238 (void) free(data->field[i].analog_days[s].tindex); 00239 (void) free(data->field[i].analog_days[s].tindex_all); 00240 (void) free(data->field[i].analog_days[s].tindex_s_all); 00241 (void) free(data->field[i].analog_days[s].time); 00242 (void) free(data->field[i].analog_days[s].year); 00243 (void) free(data->field[i].analog_days[s].month); 00244 (void) free(data->field[i].analog_days[s].day); 00245 (void) free(data->field[i].analog_days[s].year_s); 00246 (void) free(data->field[i].analog_days[s].month_s); 00247 (void) free(data->field[i].analog_days[s].day_s); 00248 for (tt=0; tt<data->field[i].analog_days[s].ntime; tt++) { 00249 if (data->field[i].analog_days[s].analog_dayschoice[tt] != NULL) 00250 (void) free(data->field[i].analog_days[s].analog_dayschoice[tt]); 00251 if (data->field[i].analog_days[s].metric_norm[tt] != NULL) 00252 (void) free(data->field[i].analog_days[s].metric_norm[tt]); 00253 if (data->field[i].analog_days[s].tindex_dayschoice[tt] != NULL) 00254 (void) free(data->field[i].analog_days[s].tindex_dayschoice[tt]); 00255 } 00256 (void) free(data->field[i].analog_days[s].analog_dayschoice); 00257 (void) free(data->field[i].analog_days[s].tindex_dayschoice); 00258 (void) free(data->field[i].analog_days[s].metric_norm); 00259 (void) free(data->field[i].analog_days[s].ndayschoice); 00260 } 00261 (void) free(data->field[i].analog_days_year.tindex); 00262 (void) free(data->field[i].analog_days_year.tindex_all); 00263 (void) free(data->field[i].analog_days_year.tindex_s_all); 00264 (void) free(data->field[i].analog_days_year.time); 00265 for (tt=0; tt<data->field[i].analog_days_year.ntime; tt++) { 00266 if (data->field[i].analog_days_year.analog_dayschoice[tt] != NULL) 00267 (void) free(data->field[i].analog_days_year.analog_dayschoice[tt]); 00268 if (data->field[i].analog_days_year.metric_norm[tt] != NULL) 00269 (void) free(data->field[i].analog_days_year.metric_norm[tt]); 00270 } 00271 (void) free(data->field[i].analog_days_year.analog_dayschoice); 00272 (void) free(data->field[i].analog_days_year.tindex_dayschoice); 00273 (void) free(data->field[i].analog_days_year.metric_norm); 00274 (void) free(data->field[i].analog_days_year.ndayschoice); 00275 } 00276 (void) free(data->field[i].analog_days_year.year); 00277 (void) free(data->field[i].analog_days_year.month); 00278 (void) free(data->field[i].analog_days_year.day); 00279 (void) free(data->field[i].analog_days_year.year_s); 00280 (void) free(data->field[i].analog_days_year.month_s); 00281 (void) free(data->field[i].analog_days_year.day_s); 00282 } 00283 } 00284 00285 (void) free(data->field[i].precip_index); 00286 00287 if (data->field[i].n_ls > 0) { 00288 (void) free(data->field[i].data); 00289 if (data->conf->output_only != TRUE) { 00290 (void) free(data->field[i].time_s->year); 00291 (void) free(data->field[i].time_s->month); 00292 (void) free(data->field[i].time_s->day); 00293 (void) free(data->field[i].time_s->hour); 00294 (void) free(data->field[i].time_s->minutes); 00295 (void) free(data->field[i].time_s->seconds); 00296 } 00297 (void) free(data->field[i].proj); 00298 } 00299 (void) free(data->field[i].time_s); 00300 (void) free(data->field[i].time_ls); 00301 } 00302 00303 for (i=0; i<NCAT; i++) 00304 (void) free(data->field[i].analog_days); 00305 00306 for (s=0; s<data->conf->nseasons; s++) { 00307 if (data->conf->output_only != TRUE) { 00308 (void) free(data->learning->data[s].time_s->year); 00309 (void) free(data->learning->data[s].time_s->month); 00310 (void) free(data->learning->data[s].time_s->day); 00311 (void) free(data->learning->data[s].time_s->hour); 00312 (void) free(data->learning->data[s].time_s->minutes); 00313 (void) free(data->learning->data[s].time_s->seconds); 00314 (void) free(data->learning->data[s].time); 00315 (void) free(data->learning->data[s].class_clusters); 00316 } 00317 (void) free(data->learning->data[s].time_s); 00318 if (data->conf->season[s].nmonths > 0) 00319 (void) free(data->conf->season[s].month); 00320 00321 if (data->conf->output_only != TRUE) { 00322 (void) free(data->learning->data[s].weight); 00323 (void) free(data->learning->data[s].precip_reg); 00324 if (data->learning->data[s].precip_reg_dist != NULL) 00325 (void) free(data->learning->data[s].precip_reg_dist); 00326 (void) free(data->learning->data[s].precip_reg_cst); 00327 if (data->learning->data[s].precip_reg_err != NULL) 00328 (void) free(data->learning->data[s].precip_reg_err); 00329 if (data->learning->learning_provided == FALSE) { 00330 (void) free(data->learning->data[s].precip_reg_rsq); 00331 (void) free(data->learning->data[s].precip_reg_vif); 00332 (void) free(data->learning->data[s].precip_reg_autocor); 00333 } 00334 (void) free(data->learning->data[s].precip_index); 00335 (void) free(data->learning->data[s].sup_index); 00336 if (data->learning->data[s].sup_val != NULL) 00337 (void) free(data->learning->data[s].sup_val); 00338 } 00339 } 00340 00341 if (data->learning->learning_provided == FALSE) { 00342 (void) free(data->learning->obs->filename_eof); 00343 (void) free(data->learning->obs->nomvar_eof); 00344 (void) free(data->learning->obs->nomvar_sing); 00345 (void) free(data->learning->obs->eof); 00346 (void) free(data->learning->obs->sing); 00347 00348 for (s=0; s<data->conf->nseasons; s++) 00349 (void) free(data->learning->data[s].precip_index_obs); 00350 00351 (void) free(data->learning->obs->time_s->year); 00352 (void) free(data->learning->obs->time_s->month); 00353 (void) free(data->learning->obs->time_s->day); 00354 (void) free(data->learning->obs->time_s->hour); 00355 (void) free(data->learning->obs->time_s->minutes); 00356 (void) free(data->learning->obs->time_s->seconds); 00357 (void) free(data->learning->obs->time_s); 00358 00359 (void) free(data->learning->rea->time_s->year); 00360 (void) free(data->learning->rea->time_s->month); 00361 (void) free(data->learning->rea->time_s->day); 00362 (void) free(data->learning->rea->time_s->hour); 00363 (void) free(data->learning->rea->time_s->minutes); 00364 (void) free(data->learning->rea->time_s->seconds); 00365 (void) free(data->learning->rea->time_s); 00366 00367 (void) free(data->learning->rea->filename_eof); 00368 (void) free(data->learning->rea->nomvar_eof); 00369 (void) free(data->learning->rea->nomvar_sing); 00370 (void) free(data->learning->rea->eof); 00371 (void) free(data->learning->rea->sing); 00372 00373 (void) free(data->learning->obs); 00374 (void) free(data->learning->rea); 00375 00376 (void) free(data->learning->nomvar_rea_sup); 00377 (void) free(data->learning->filename_rea_sup); 00378 (void) free(data->learning->rea_coords); 00379 (void) free(data->learning->rea_gridname); 00380 (void) free(data->learning->rea_dimxname); 00381 (void) free(data->learning->rea_dimyname); 00382 (void) free(data->learning->rea_lonname); 00383 (void) free(data->learning->rea_latname); 00384 (void) free(data->learning->rea_timename); 00385 00386 if (data->learning->sup_lon != NULL) 00387 (void) free(data->learning->sup_lon); 00388 if (data->learning->sup_lat != NULL) 00389 (void) free(data->learning->sup_lat); 00390 00391 (void) free(data->learning->obs_dimxname); 00392 (void) free(data->learning->obs_dimyname); 00393 (void) free(data->learning->obs_lonname); 00394 (void) free(data->learning->obs_latname); 00395 (void) free(data->learning->obs_timename); 00396 (void) free(data->learning->obs_eofname); 00397 00398 if (data->learning->lon != NULL) 00399 (void) free(data->learning->lon); 00400 if (data->learning->lat != NULL) 00401 (void) free(data->learning->lat); 00402 } 00403 00404 if (data->conf->output_only != TRUE) { 00405 (void) free(data->learning->time_s->year); 00406 (void) free(data->learning->time_s->month); 00407 (void) free(data->learning->time_s->day); 00408 (void) free(data->learning->time_s->hour); 00409 (void) free(data->learning->time_s->minutes); 00410 (void) free(data->learning->time_s->seconds); 00411 } 00412 00413 (void) free(data->learning->time_s); 00414 00415 if (data->learning->learning_provided == TRUE && data->conf->output_only != TRUE) { 00416 (void) free(data->learning->filename_open_weight); 00417 (void) free(data->learning->filename_open_learn); 00418 (void) free(data->learning->filename_open_clust_learn); 00419 } 00420 00421 if (data->conf->output_only != TRUE) 00422 (void) free(data->learning->pc_normalized_var); 00423 00424 (void) free(data->learning->nomvar_time); 00425 (void) free(data->learning->nomvar_weight); 00426 (void) free(data->learning->nomvar_class_clusters); 00427 (void) free(data->learning->nomvar_precip_reg); 00428 (void) free(data->learning->nomvar_precip_reg_dist); 00429 (void) free(data->learning->nomvar_precip_reg_cst); 00430 (void) free(data->learning->nomvar_precip_reg_rsq); 00431 (void) free(data->learning->nomvar_precip_reg_acor); 00432 (void) free(data->learning->nomvar_precip_reg_vif); 00433 if (data->learning->nomvar_precip_reg_err != NULL) 00434 (void) free(data->learning->nomvar_precip_reg_err); 00435 (void) free(data->learning->nomvar_precip_index); 00436 (void) free(data->learning->nomvar_precip_index_obs); 00437 (void) free(data->learning->nomvar_sup_index); 00438 (void) free(data->learning->nomvar_sup_val); 00439 (void) free(data->learning->nomvar_sup_index_mean); 00440 (void) free(data->learning->nomvar_sup_index_var); 00441 (void) free(data->learning->nomvar_pc_normalized_var); 00442 00443 (void) free(data->learning->sup_latname); 00444 (void) free(data->learning->sup_lonname); 00445 00446 if (data->learning->learning_save == TRUE && data->conf->output_only != TRUE) { 00447 (void) free(data->learning->filename_save_weight); 00448 (void) free(data->learning->filename_save_learn); 00449 (void) free(data->learning->filename_save_clust_learn); 00450 } 00451 00452 if (data->info->title != NULL) { 00453 (void) free(data->info->title); 00454 (void) free(data->info->title_french); 00455 (void) free(data->info->summary); 00456 (void) free(data->info->summary_french); 00457 (void) free(data->info->keywords); 00458 (void) free(data->info->processor); 00459 (void) free(data->info->software); 00460 (void) free(data->info->description); 00461 (void) free(data->info->institution); 00462 (void) free(data->info->creator_email); 00463 (void) free(data->info->creator_url); 00464 (void) free(data->info->creator_name); 00465 (void) free(data->info->version); 00466 (void) free(data->info->scenario); 00467 (void) free(data->info->scenario_co2); 00468 (void) free(data->info->model); 00469 (void) free(data->info->institution_model); 00470 (void) free(data->info->country); 00471 (void) free(data->info->member); 00472 (void) free(data->info->downscaling_forcing); 00473 (void) free(data->info->timestep); 00474 (void) free(data->info->contact_email); 00475 (void) free(data->info->contact_name); 00476 (void) free(data->info->other_contact_email); 00477 (void) free(data->info->other_contact_name); 00478 } 00479 00480 (void) free(data->reg->filename); 00481 (void) free(data->reg->dimxname); 00482 (void) free(data->reg->dimyname); 00483 (void) free(data->reg->lonname); 00484 (void) free(data->reg->latname); 00485 (void) free(data->reg->ptsname); 00486 if (data->reg->reg_save == TRUE) { 00487 (void) free(data->reg->filename_save_ctrl_reg); 00488 (void) free(data->reg->filename_save_other_reg); 00489 (void) free(data->reg->timename); 00490 } 00491 if (data->conf->output_only != TRUE) { 00492 (void) free(data->reg->lat); 00493 (void) free(data->reg->lon); 00494 } 00495 00496 if (data->secondary_mask->use_mask == TRUE) { 00497 if (data->conf->output_only != TRUE) 00498 (void) free(data->secondary_mask->field); 00499 (void) free(data->secondary_mask->filename); 00500 (void) free(data->secondary_mask->maskname); 00501 (void) free(data->secondary_mask->lonname); 00502 (void) free(data->secondary_mask->latname); 00503 (void) free(data->secondary_mask->coords); 00504 (void) free(data->secondary_mask->dimxname); 00505 (void) free(data->secondary_mask->dimyname); 00506 (void) free(data->secondary_mask->dimcoords); 00507 (void) free(data->secondary_mask->proj); 00508 (void) free(data->secondary_mask->lat); 00509 (void) free(data->secondary_mask->lon); 00510 } 00511 (void) free(data->secondary_mask); 00512 00513 if (data->conf->learning_maskfile->use_mask == TRUE) { 00514 if (data->conf->output_only != TRUE) 00515 (void) free(data->conf->learning_maskfile->field); 00516 (void) free(data->conf->learning_maskfile->filename); 00517 (void) free(data->conf->learning_maskfile->maskname); 00518 (void) free(data->conf->learning_maskfile->lonname); 00519 (void) free(data->conf->learning_maskfile->latname); 00520 (void) free(data->conf->learning_maskfile->coords); 00521 (void) free(data->conf->learning_maskfile->dimxname); 00522 (void) free(data->conf->learning_maskfile->dimyname); 00523 (void) free(data->conf->learning_maskfile->dimcoords); 00524 (void) free(data->conf->learning_maskfile->proj); 00525 (void) free(data->conf->learning_maskfile->lat); 00526 (void) free(data->conf->learning_maskfile->lon); 00527 } 00528 (void) free(data->conf->learning_maskfile); 00529 00530 if (data->conf->obs_var->nobs_var > 0) { 00531 for (i=0; i<data->conf->obs_var->nobs_var; i++) { 00532 (void) free(data->conf->obs_var->acronym[i]); 00533 (void) free(data->conf->obs_var->netcdfname[i]); 00534 (void) free(data->conf->obs_var->name[i]); 00535 (void) free(data->conf->obs_var->post[i]); 00536 (void) free(data->conf->obs_var->clim[i]); 00537 (void) free(data->conf->obs_var->output[i]); 00538 (void) free(data->conf->obs_var->height[i]); 00539 (void) free(data->conf->obs_var->units[i]); 00540 } 00541 (void) free(data->conf->obs_var->acronym); 00542 (void) free(data->conf->obs_var->netcdfname); 00543 (void) free(data->conf->obs_var->name); 00544 (void) free(data->conf->obs_var->factor); 00545 (void) free(data->conf->obs_var->delta); 00546 (void) free(data->conf->obs_var->post); 00547 (void) free(data->conf->obs_var->clim); 00548 (void) free(data->conf->obs_var->output); 00549 (void) free(data->conf->obs_var->height); 00550 (void) free(data->conf->obs_var->units); 00551 } 00552 (void) free(data->conf->obs_var->frequency); 00553 (void) free(data->conf->obs_var->template); 00554 (void) free(data->conf->obs_var->latname); 00555 (void) free(data->conf->obs_var->lonname); 00556 (void) free(data->conf->obs_var->timename); 00557 (void) free(data->conf->obs_var->altitude); 00558 (void) free(data->conf->obs_var->altitudename); 00559 (void) free(data->conf->obs_var->proj->name); 00560 (void) free(data->conf->obs_var->proj->coords); 00561 (void) free(data->conf->obs_var->proj->grid_mapping_name); 00562 (void) free(data->conf->obs_var->dimxname); 00563 (void) free(data->conf->obs_var->dimyname); 00564 (void) free(data->conf->obs_var->dimcoords); 00565 (void) free(data->conf->obs_var->proj); 00566 (void) free(data->conf->obs_var->path); 00567 (void) free(data->conf->obs_var); 00568 00569 (void) free(data->conf->clim_filter_type); 00570 (void) free(data->conf->classif_type); 00571 (void) free(data->conf->time_units); 00572 (void) free(data->conf->cal_type); 00573 (void) free(data->conf->dimxname_eof); 00574 (void) free(data->conf->dimyname_eof); 00575 (void) free(data->conf->lonname_eof); 00576 (void) free(data->conf->latname_eof); 00577 (void) free(data->conf->eofname); 00578 (void) free(data->conf->ptsname); 00579 (void) free(data->conf->clustname); 00580 (void) free(data->conf->output_path); 00581 00582 if (data->conf->nperiods > 0) 00583 (void) free(data->conf->period); 00584 (void) free(data->conf->period_ctrl); 00585 (void) free(data->conf->season); 00586 00587 (void) free(data->learning->data); 00588 00589 (void) free(data->conf->config); 00590 00591 (void) free(data->conf); 00592 (void) free(data->info); 00593 (void) free(data->learning); 00594 (void) free(data->reg); 00595 (void) free(data->field); 00596 }
int load_conf | ( | data_struct * | data, | |
char * | fileconf | |||
) |
Read and set variables from XML configuration file.
[in] | data | MASTER data structure. |
[in] | fileconf | XML input filename |
Read entire file into memory for archive in the output file later
debug
format: NetCDF-4 or NetCDF-3 for output files
compression for NetCDF-4
compression level for NetCDF-4
Fix incorrect time in input climate model file, and use 01/01/YEARBEGIN as first day, and assume daily data since it is required.
year_begin_ctrl
year_begin_other
clim_filter_width
clim_filter_type
deltat
classif_type
npartitions
nclassifications
use_downscaled_year
only_wt
base_time_units
base_calendar_type
longitude_name_eof
latitude_name_eof
dimx_name_eof
dimy_name_eof
eof_name
pts_name
clust_name
longitude min
longitude max
latitude min
latitude max
longitude min
longitude max
latitude min
latitude max
use_mask
filename
maskname
lonname
latname
coords
dimxname
dimyname
dimcoords
projection
longitude min
longitude max
latitude min
latitude max
use_mask
filename
maskname
lonname
latname
coords
dimxname
dimyname
dimcoords
projection
path
month_begin
title
title_french
summary
summary_french
description
keywords
processor
institution
creator_email
creator_url
creator_name
version
scenario
scenario_co2
model
institution_model
country
member
downscaling_forcing
timestep
contact_email
contact_name
other_contact_email
other_contact_name
number_of_variables
Allocate memory for variable informations
Data frequency
template
Number of digits for year in data filename
Data path
month_begin
coords
longitude_name
latitude_name
dimx_name
dimy_name
time_name
coords
Altitude NetCDF filename
altitude_name NetCDF variable name
learning_provided
learning_save
number of EOFs one parameter
number of EOFs observation data
number of EOFs reanalysis data
filename_save_weight
filename_save_learn
filename_save_clust_learn
filename_open_weight
filename_open_learn
filename_open_clust_learn
filename_obs_eof
filename_rea_eof
filename_rea_sup
nomvar_obs_eof
nomvar_rea_eof
nomvar_obs_sing
nomvar_rea_sing
nomvar_rea_sup
rea_coords
rea_gridname
rea_dimxname
rea_dimyname
rea_lonname
rea_latname
rea_timename
obs_dimxname
obs_dimyname
obs_lonname
obs_latname
obs_timename
obs_eofname
sup_lonname
sup_latname
nomvar_time
nomvar_weight
nomvar_class_clusters
nomvar_precip_reg
nomvar_precip_reg_cst
nomvar_precip_reg_rsq
nomvar_precip_reg_acor
nomvar_precip_reg_vif
nomvar_precip_reg_dist
nomvar_precip_reg_err
nomvar_precip_index
nomvar_precip_index_obs
nomvar_sup_index
nomvar_sup_val
nomvar_sup_index_mean
nomvar_sup_index_var
nomvar_pc_normalized_var
filename
dimxname
dimyname
lonname
latname
ptsname
dist
regression_save
filename_save_ctrl_reg
filename_save_other_reg
timename
number_of_large_scale_fields
number_of_large_scale_control_fields
number_of_secondary_large_scale_fields
number_of_secondary_large_scale_control_fields
Climatology values
clim_remove
clim_provided
clim_openfilename
clim_save
EOF and Singular values
eof_project
number_of_eofs
eof_coordinates
eof_openfilename
eof_scale
eof_weight
eof_name
sing_name
downscale
year_begin
month_begin
day_begin
year_end
month_end
day_end
downscale
year_begin
month_begin
day_begin
year_end
month_end
day_end
number_of_seasons
Allocate memory for season-dependent variables
number_of_clusters
number_of_regression_vars
number_of_days_search
number_of_days_choices
number_of_days_choices_min
days_shuffle
secondary_field_choice
secondary_field_main_choice
secondary_covariance
season
output_only
output
analog_save
analog_file_ctrl
analog_file_other
Definition at line 59 of file load_conf.c.
References var_struct::acronym, alloc_error(), var_struct::altitude, var_struct::altitudename, field_struct::analog_days, conf_struct::analog_file_ctrl, conf_struct::analog_file_other, conf_struct::analog_save, conf_struct::cal_type, conf_struct::classif_type, var_struct::clim, clim_info_struct::clim_filein_ls, clim_info_struct::clim_fileout_ls, conf_struct::clim_filter_type, conf_struct::clim_filter_width, field_data_struct::clim_info, clim_info_struct::clim_nomvar_ls, clim_info_struct::clim_provided, clim_info_struct::clim_remove, clim_info_struct::clim_save, conf_struct::clustname, conf_struct::compression, conf_struct::compression_level, data_struct::conf, conf_struct::config, info_struct::contact_email, info_struct::contact_name, proj_struct::coords, mask_struct::coords, info_struct::country, info_struct::creator_email, info_struct::creator_name, info_struct::creator_url, learning_struct::data, field_struct::data, period_struct::day_begin, period_struct::day_end, downscale_struct::days_class_clusters, conf_struct::debug, downscale_struct::delta, var_struct::delta, downscale_struct::delta_dayschoice, conf_struct::deltat, info_struct::description, var_struct::dimcoords, mask_struct::dimcoords, field_data_struct::dimxname, reg_struct::dimxname, var_struct::dimxname, mask_struct::dimxname, conf_struct::dimxname_eof, field_data_struct::dimyname, reg_struct::dimyname, var_struct::dimyname, mask_struct::dimyname, conf_struct::dimyname_eof, downscale_struct::dist, reg_struct::dist, field_data_struct::down, period_struct::downscale, info_struct::downscaling_forcing, eof_info_struct::eof_coords, field_data_struct::eof_data, eof_info_struct::eof_filein_ls, field_data_struct::eof_info, eof_data_struct::eof_ls, eof_data_struct::eof_nomvar_ls, eof_info_struct::eof_project, eof_info_struct::eof_scale, eof_info_struct::eof_weight, conf_struct::eofname, var_struct::factor, FALSE, data_struct::field, field_data_struct::field_eof_ls, field_data_struct::field_ls, reg_struct::filename, mask_struct::filename, learning_eof_struct::filename_eof, field_data_struct::filename_ls, learning_struct::filename_open_clust_learn, learning_struct::filename_open_learn, learning_struct::filename_open_weight, learning_struct::filename_rea_sup, learning_struct::filename_save_clust_learn, reg_struct::filename_save_ctrl_reg, learning_struct::filename_save_learn, reg_struct::filename_save_other_reg, learning_struct::filename_save_weight, conf_struct::fixtime, conf_struct::format, var_struct::frequency, proj_struct::grid_mapping_name, var_struct::height, eof_info_struct::info, field_data_struct::info, data_struct::info, info_struct::institution, info_struct::institution_model, info_struct::keywords, field_struct::lat_eof_ls, field_struct::lat_ls, conf_struct::latitude_max, conf_struct::latitude_min, field_data_struct::latname, reg_struct::latname, var_struct::latname, mask_struct::latname, conf_struct::latname_eof, data_struct::learning, conf_struct::learning_mask_latitude_max, conf_struct::learning_mask_latitude_min, conf_struct::learning_mask_longitude_max, conf_struct::learning_mask_longitude_min, conf_struct::learning_maskfile, learning_struct::learning_provided, learning_struct::learning_save, field_struct::lon_eof_ls, field_struct::lon_ls, conf_struct::longitude_max, conf_struct::longitude_min, field_data_struct::lonname, reg_struct::lonname, var_struct::lonname, mask_struct::lonname, conf_struct::lonname_eof, mask_struct::maskname, MAXPATH, downscale_struct::mean, downscale_struct::mean_dist, info_struct::member, info_struct::model, season_struct::month, period_struct::month_begin, var_struct::month_begin, period_struct::month_end, field_struct::n_ls, var_struct::name, proj_struct::name, NCAT, conf_struct::nclassifications, season_struct::nclusters, season_struct::ndays, season_struct::ndayschoices, season_struct::ndayschoices_min, eof_info_struct::neof_ls, var_struct::netcdfname, season_struct::nmonths, var_struct::nobs_var, learning_struct::nomvar_class_clusters, learning_eof_struct::nomvar_eof, field_data_struct::nomvar_ls, learning_struct::nomvar_pc_normalized_var, learning_struct::nomvar_precip_index, learning_struct::nomvar_precip_index_obs, learning_struct::nomvar_precip_reg, learning_struct::nomvar_precip_reg_acor, learning_struct::nomvar_precip_reg_cst, learning_struct::nomvar_precip_reg_dist, learning_struct::nomvar_precip_reg_err, learning_struct::nomvar_precip_reg_rsq, learning_struct::nomvar_precip_reg_vif, learning_struct::nomvar_rea_sup, learning_eof_struct::nomvar_sing, learning_struct::nomvar_sup_index, learning_struct::nomvar_sup_index_mean, learning_struct::nomvar_sup_index_var, learning_struct::nomvar_sup_val, learning_struct::nomvar_time, learning_struct::nomvar_weight, conf_struct::npartitions, conf_struct::nperiods, season_struct::nreg, conf_struct::nseasons, learning_struct::obs, learning_struct::obs_dimxname, learning_struct::obs_dimyname, learning_struct::obs_eofname, learning_struct::obs_latname, learning_struct::obs_lonname, learning_struct::obs_neof, learning_struct::obs_timename, conf_struct::obs_var, conf_struct::only_wt, info_struct::other_contact_email, info_struct::other_contact_name, conf_struct::output, var_struct::output, conf_struct::output_month_begin, conf_struct::output_only, conf_struct::output_path, var_struct::path, conf_struct::period, conf_struct::period_ctrl, var_struct::post, field_struct::precip_index, info_struct::processor, field_struct::proj, var_struct::proj, mask_struct::proj, reg_struct::ptsname, conf_struct::ptsname, learning_struct::rea, learning_struct::rea_coords, learning_struct::rea_dimxname, learning_struct::rea_dimyname, learning_struct::rea_gridname, learning_struct::rea_latname, learning_struct::rea_lonname, learning_struct::rea_neof, learning_struct::rea_timename, data_struct::reg, reg_struct::reg_save, info_struct::scenario, info_struct::scenario_co2, conf_struct::season, season_struct::secondary_choice, season_struct::secondary_cov, conf_struct::secondary_latitude_max, conf_struct::secondary_latitude_min, conf_struct::secondary_longitude_max, conf_struct::secondary_longitude_min, season_struct::secondary_main_choice, data_struct::secondary_mask, season_struct::shuffle, eof_data_struct::sing_ls, eof_data_struct::sing_nomvar_ls, downscale_struct::smean_2d, downscale_struct::smean_norm, info_struct::software, info_struct::summary, info_struct::summary_french, learning_struct::sup_latname, learning_struct::sup_lonname, downscale_struct::sup_val_norm, downscale_struct::svar_2d, var_struct::template, field_struct::time_ls, learning_data_struct::time_s, learning_eof_struct::time_s, learning_struct::time_s, field_struct::time_s, conf_struct::time_units, field_data_struct::timename, reg_struct::timename, var_struct::timename, info_struct::timestep, info_struct::title, info_struct::title_french, TRUE, var_struct::units, conf_struct::use_downscaled_year, mask_struct::use_mask, downscale_struct::var, downscale_struct::var_dist, downscale_struct::var_pc_norm, info_struct::version, xml_free_config(), xml_get_setting(), xml_load_config(), period_struct::year_begin, conf_struct::year_begin_ctrl, conf_struct::year_begin_other, var_struct::year_digits, and period_struct::year_end.
Referenced by main().
00059 { 00066 FILE *infile; /* Input file pointer */ 00067 long int numbytes; /* Size of entire file */ 00068 00069 xmlConfig_t *conf; /* Pointer to XML Config */ 00070 char setting_name[1000]; /* Setting name in XML file */ 00071 xmlChar *val; /* Value in XML file */ 00072 int i; /* Loop counter */ 00073 int j; /* Loop counter */ 00074 int ii; /* Loop counter */ 00075 int cat; /* Loop counter for field category */ 00076 int istat; /* Diagnostic status */ 00077 char *path = NULL; /* XPath */ 00078 00079 char *token; /* Token for string decoding */ 00080 char *saveptr = NULL; /* Pointer to save buffer data for thread-safe strtok use */ 00081 char *catstr; /* Category string */ 00082 char *catstrt; /* Category string */ 00083 00084 (void) fprintf(stdout, "%s: *** Current Configuration ***\n\n", __FILE__); 00085 00086 (void) strcpy(setting_name, "setting"); 00087 00088 /* Allocate main memory data structure */ 00089 data->conf = (conf_struct *) malloc(sizeof(conf_struct)); 00090 if (data->conf == NULL) alloc_error(__FILE__, __LINE__); 00091 00094 /* Open config file */ 00095 infile = fopen(fileconf, "r"); 00096 if (infile == NULL) { 00097 (void) fprintf(stderr, "%s: Cannot open %s configuration file for reading. Aborting.\n", __FILE__, fileconf); 00098 (void) free(data->conf); 00099 return -1; 00100 } 00101 00102 /* Get the number of bytes */ 00103 istat = fseek(infile, 0L, SEEK_END); 00104 if (istat < 0) { 00105 (void) fprintf(stderr, "%s: Cannot seek to end of %s configuration file. Aborting.\n", __FILE__, fileconf); 00106 (void) free(data->conf); 00107 return -1; 00108 } 00109 numbytes = ftell(infile); 00110 if (numbytes < 0) { 00111 (void) fprintf(stderr, "%s: Cannot get file pointer position of %s configuration file. Aborting.\n", __FILE__, fileconf); 00112 (void) free(data->conf); 00113 return -1; 00114 } 00115 00116 /* Reset the file position indicator to the beginning of the file */ 00117 istat = fseek(infile, 0L, SEEK_SET); 00118 if (istat < 0) { 00119 (void) fprintf(stderr, "%s: Cannot seek to beginning of %s configuration file. Aborting.\n", __FILE__, fileconf); 00120 (void) free(data->conf); 00121 return -1; 00122 } 00123 00124 /* Allocate memory */ 00125 data->conf->config = (char *) calloc(numbytes+1, sizeof(char)); 00126 if (data->conf->config == NULL) alloc_error(__FILE__, __LINE__); 00127 00128 /* Copy all the text into the buffer */ 00129 istat = fread(data->conf->config, sizeof(char), numbytes, infile); 00130 if (istat < 0) { 00131 (void) fprintf(stderr, "%s: Cannot read %s configuration file. Aborting.\n", __FILE__, fileconf); 00132 (void) free(data->conf); 00133 (void) free(data->conf->config); 00134 return -1; 00135 } 00136 /* Add null character at the end of the string */ 00137 data->conf->config[numbytes] = '\0'; 00138 00139 /* Close file */ 00140 istat = fclose(infile); 00141 if (istat < 0) { 00142 (void) fprintf(stderr, "%s: Cannot close properly %s configuration file. Aborting.\n", __FILE__, fileconf); 00143 (void) free(data->conf); 00144 (void) free(data->conf->config); 00145 return -1; 00146 } 00147 00148 #if DEBUG > 7 00149 printf("The file called test.dat contains this text\n\n%s", data->conf->config); 00150 #endif 00151 00152 /* Load XML configuration file into memory */ 00153 conf = xml_load_config(fileconf); 00154 if (conf == NULL) { 00155 (void) free(data->conf); 00156 (void) free(data->conf->config); 00157 (void) xmlCleanupParser(); 00158 return -1; 00159 } 00160 00161 /* Allocate memory in main data structures */ 00162 data->conf->period_ctrl = (period_struct *) malloc(sizeof(period_struct)); 00163 if (data->conf->period_ctrl == NULL) alloc_error(__FILE__, __LINE__); 00164 data->info = (info_struct *) malloc(sizeof(info_struct)); 00165 if (data->info == NULL) alloc_error(__FILE__, __LINE__); 00166 data->learning = (learning_struct *) malloc(sizeof(learning_struct)); 00167 if (data->learning == NULL) alloc_error(__FILE__, __LINE__); 00168 data->reg = (reg_struct *) malloc(sizeof(reg_struct)); 00169 if (data->reg == NULL) alloc_error(__FILE__, __LINE__); 00170 00171 data->field = (field_struct *) malloc(NCAT * sizeof(field_struct)); 00172 if (data->field == NULL) alloc_error(__FILE__, __LINE__); 00173 00174 /* Loop over field categories */ 00175 /* Allocate memory in main data structure */ 00176 for (i=0; i<NCAT; i++) { 00177 data->field[i].time_ls = (double *) malloc(sizeof(double)); 00178 if (data->field[i].time_ls == NULL) alloc_error(__FILE__, __LINE__); 00179 data->field[i].time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 00180 if (data->field[i].time_s == NULL) alloc_error(__FILE__, __LINE__); 00181 00182 data->field[i].lat_ls = NULL; 00183 data->field[i].lon_ls = NULL; 00184 00185 data->field[i].lat_eof_ls = NULL; 00186 data->field[i].lon_eof_ls = NULL; 00187 } 00188 00189 /*** Get needed settings ***/ 00190 /* Set default value if not in configuration file */ 00191 path = (char *) malloc(MAXPATH * sizeof(char)); 00192 if (path == NULL) alloc_error(__FILE__, __LINE__); 00193 00195 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "debug"); 00196 val = xml_get_setting(conf, path); 00197 if ( !xmlStrcmp(val, (xmlChar *) "On") ) 00198 data->conf->debug = TRUE; 00199 else 00200 data->conf->debug = FALSE; 00201 (void) fprintf(stdout, "%s: debug = %d\n", __FILE__, data->conf->debug); 00202 if (val != NULL) 00203 (void) xmlFree(val); 00204 00206 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "format"); 00207 val = xml_get_setting(conf, path); 00208 if (val != NULL) { 00209 data->conf->format = (int) xmlXPathCastStringToNumber(val); 00210 if (data->conf->format != 3 && data->conf->format != 4) 00211 data->conf->format = 3; 00212 (void) xmlFree(val); 00213 } 00214 else 00215 data->conf->format = 3; 00216 if (data->conf->format == 3) 00217 (void) fprintf(stdout, "%s: NetCDF-3 Classic output format.\n", __FILE__); 00218 else 00219 (void) fprintf(stdout, "%s: NetCDF-4 New HDF5-based format with Classic-type output support.\n", __FILE__); 00220 00222 if (data->conf->format == 4) { 00223 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "compression"); 00224 val = xml_get_setting(conf, path); 00225 if (val != NULL) { 00226 if ( !xmlStrcmp(val, (xmlChar *) "On") ) { 00227 data->conf->compression = TRUE; 00228 (void) fprintf(stdout, "%s: Compression ACTIVE for NetCDF-4 format\n", __FILE__); 00229 } 00230 else { 00231 data->conf->compression = FALSE; 00232 (void) fprintf(stdout, "%s: Compression DISABLED for NetCDF-4 format\n", __FILE__); 00233 } 00234 (void) xmlFree(val); 00235 } 00236 } 00237 else 00238 data->conf->compression = FALSE; 00239 00241 if (data->conf->compression == TRUE) { 00242 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "compression_level"); 00243 val = xml_get_setting(conf, path); 00244 if (val != NULL) { 00245 data->conf->compression_level = (int) xmlXPathCastStringToNumber(val); 00246 if (data->conf->compression_level < 0) { 00247 data->conf->compression_level = 1; 00248 (void) fprintf(stdout, 00249 "%s: WARNING: NetCDF-4 Compression Level invalid value (must be between 1 and 9 inclusively). Forced to %d.\n", 00250 __FILE__, data->conf->compression_level); 00251 } 00252 else if (data->conf->compression_level > 9) { 00253 data->conf->compression_level = 9; 00254 (void) fprintf(stdout, 00255 "%s: WARNING: NetCDF-4 Compression Level invalid value (must be between 1 and 9 inclusively). Forced to %d.\n", 00256 __FILE__, data->conf->compression_level); 00257 } 00258 (void) xmlFree(val); 00259 } 00260 else { 00261 data->conf->compression_level = 1; 00262 (void) fprintf(stdout, 00263 "%s: WARNING: NetCDF-4 Compression Level not set! (must be between 1 and 9 inclusively). Forced to default value of %d.\n", 00264 __FILE__, data->conf->compression_level); 00265 } 00266 (void) fprintf(stdout, "%s: NetCDF-4 Compression Level = %d.\n", __FILE__, data->conf->compression_level); 00267 } 00268 else 00269 data->conf->compression_level = 0; 00270 00272 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "fixtime"); 00273 val = xml_get_setting(conf, path); 00274 if (val != NULL) { 00275 if ( !xmlStrcmp(val, (xmlChar *) "On") ) { 00276 data->conf->fixtime = TRUE; 00277 (void) fprintf(stdout, "%s: WARNING: Will fix time coordinate start date using start date in configuration file!\n", __FILE__); 00278 } 00279 else { 00280 data->conf->fixtime = FALSE; 00281 (void) fprintf(stdout, "%s: Will NOT fix time coordinate start date.\n", __FILE__); 00282 } 00283 (void) xmlFree(val); 00284 } 00285 00286 if (data->conf->fixtime == TRUE) { 00288 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "year_begin_ctrl"); 00289 val = xml_get_setting(conf, path); 00290 if (val != NULL) { 00291 data->conf->year_begin_ctrl = xmlXPathCastStringToNumber(val); 00292 (void) fprintf(stdout, "%s: year_begin_ctrl = %d\n", __FILE__, data->conf->year_begin_ctrl); 00293 (void) xmlFree(val); 00294 } 00295 else 00296 data->conf->year_begin_ctrl = -1; 00298 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "year_begin_other"); 00299 val = xml_get_setting(conf, path); 00300 if (val != NULL) { 00301 data->conf->year_begin_other = xmlXPathCastStringToNumber(val); 00302 (void) fprintf(stdout, "%s: year_begin_other = %d\n", __FILE__, data->conf->year_begin_other); 00303 (void) xmlFree(val); 00304 } 00305 else 00306 data->conf->year_begin_other = -1; 00307 if (data->conf->year_begin_ctrl == -1 || data->conf->year_begin_other == -1) { 00308 (void) fprintf(stderr, "%s: ERROR: must specify year_begin_ctrl and year_begin_other when using the fixtime setting! Aborting.\n", __FILE__); 00309 return -1; 00310 } 00311 } 00312 00314 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "clim_filter_width"); 00315 val = xml_get_setting(conf, path); 00316 data->conf->clim_filter_width = (int) xmlXPathCastStringToNumber(val); 00317 if ( data->conf->clim_filter_width < 4 || data->conf->clim_filter_width > 365 ) 00318 data->conf->clim_filter_width = 60; 00319 (void) fprintf(stdout, "%s: clim_filter_width = %d\n", __FILE__, data->conf->clim_filter_width); 00320 if (val != NULL) 00321 (void) xmlFree(val); 00322 00324 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "clim_filter_type"); 00325 val = xml_get_setting(conf, path); 00326 if ( !xmlStrcmp(val, (xmlChar *) "hanning") ) 00327 data->conf->clim_filter_type = strdup("hanning"); 00328 else { 00329 (void) fprintf(stderr, "%s: Invalid clim_filter_type value %s in configuration file. Aborting.\n", __FILE__, val); 00330 (void) abort(); 00331 } 00332 (void) fprintf(stdout, "%s: clim_filter_type = %s\n", __FILE__, data->conf->clim_filter_type); 00333 if (val != NULL) 00334 (void) xmlFree(val); 00335 00337 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "deltat"); 00338 val = xml_get_setting(conf, path); 00339 if (val != NULL) 00340 data->conf->deltat = (double) xmlXPathCastStringToNumber(val); 00341 else 00342 data->conf->deltat = 2.0; 00343 (void) fprintf(stdout, "%s: Absolute difference of temperature for corrections = %lf\n", __FILE__, data->conf->deltat); 00344 if (val != NULL) 00345 (void) xmlFree(val); 00346 00348 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "classif_type"); 00349 val = xml_get_setting(conf, path); 00350 if ( !xmlStrcmp(val, (xmlChar *) "euclidian") ) 00351 data->conf->classif_type = strdup("euclidian"); 00352 else { 00353 (void) fprintf(stderr, "%s: Invalid classif_type value %s in configuration file. Aborting.\n", __FILE__, val); 00354 (void) abort(); 00355 } 00356 (void) fprintf(stdout, "%s: classif_type = %s\n", __FILE__, data->conf->classif_type); 00357 if (val != NULL) 00358 (void) xmlFree(val); 00359 00361 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "number_of_partitions"); 00362 val = xml_get_setting(conf, path); 00363 if (val != NULL) 00364 data->conf->npartitions = xmlXPathCastStringToNumber(val); 00365 else 00366 data->conf->npartitions = 30; 00367 (void) fprintf(stdout, "%s: Number of partitions = %d\n", __FILE__, data->conf->npartitions); 00368 if (val != NULL) 00369 (void) xmlFree(val); 00370 00372 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "number_of_classifications"); 00373 val = xml_get_setting(conf, path); 00374 if (val != NULL) 00375 data->conf->nclassifications = xmlXPathCastStringToNumber(val); 00376 else 00377 data->conf->nclassifications = 1000; 00378 (void) fprintf(stdout, "%s: Number of classifications = %d\n", __FILE__, data->conf->nclassifications); 00379 if (val != NULL) 00380 (void) xmlFree(val); 00381 00383 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "use_downscaled_year"); 00384 val = xml_get_setting(conf, path); 00385 if (val != NULL) 00386 data->conf->use_downscaled_year = xmlXPathCastStringToNumber(val); 00387 else 00388 data->conf->use_downscaled_year = 1; 00389 (void) fprintf(stdout, "%s: Use_downscaled_year = %d\n", __FILE__, data->conf->use_downscaled_year); 00390 if (val != NULL) 00391 (void) xmlFree(val); 00392 00394 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "only_wt"); 00395 val = xml_get_setting(conf, path); 00396 if (val != NULL) 00397 data->conf->only_wt = xmlXPathCastStringToNumber(val); 00398 else 00399 data->conf->only_wt = 1; 00400 (void) fprintf(stdout, "%s: only_wt = %d\n", __FILE__, data->conf->only_wt); 00401 if (val != NULL) 00402 (void) xmlFree(val); 00403 00405 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "base_time_units"); 00406 val = xml_get_setting(conf, path); 00407 if (val != NULL) 00408 data->conf->time_units = strdup((char *) val); 00409 else 00410 data->conf->time_units = strdup("days since 1900-01-01 12:00:00"); 00411 (void) fprintf(stdout, "%s: base_time_units = %s\n", __FILE__, data->conf->time_units); 00412 if (val != NULL) 00413 (void) xmlFree(val); 00414 00416 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "base_calendar_type"); 00417 val = xml_get_setting(conf, path); 00418 if (val != NULL) 00419 data->conf->cal_type = strdup((char *) val); 00420 else 00421 data->conf->cal_type = strdup("gregorian"); 00422 (void) fprintf(stdout, "%s: base_calendar_type = %s\n", __FILE__, data->conf->cal_type); 00423 if (val != NULL) 00424 (void) xmlFree(val); 00425 00427 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "longitude_name_eof"); 00428 val = xml_get_setting(conf, path); 00429 if (val != NULL) 00430 data->conf->lonname_eof = strdup((char *) val); 00431 else 00432 data->conf->lonname_eof = strdup("lon"); 00433 (void) fprintf(stdout, "%s: longitude_name_eof = %s\n", __FILE__, data->conf->lonname_eof); 00434 if (val != NULL) 00435 (void) xmlFree(val); 00436 00438 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "latitude_name_eof"); 00439 val = xml_get_setting(conf, path); 00440 if (val != NULL) 00441 data->conf->latname_eof = strdup((char *) val); 00442 else 00443 data->conf->latname_eof = strdup("lat"); 00444 (void) fprintf(stdout, "%s: latitude_name_eof = %s\n", __FILE__, data->conf->latname_eof); 00445 if (val != NULL) 00446 (void) xmlFree(val); 00447 00449 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "dimx_name_eof"); 00450 val = xml_get_setting(conf, path); 00451 if (val != NULL) 00452 data->conf->dimxname_eof = strdup((char *) val); 00453 else 00454 data->conf->dimxname_eof = strdup("lon"); 00455 (void) fprintf(stdout, "%s: dimx_name_eof = %s\n", __FILE__, data->conf->dimxname_eof); 00456 if (val != NULL) 00457 (void) xmlFree(val); 00458 00460 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "dimy_name_eof"); 00461 val = xml_get_setting(conf, path); 00462 if (val != NULL) 00463 data->conf->dimyname_eof = strdup((char *) val); 00464 else 00465 data->conf->dimyname_eof = strdup("lat"); 00466 (void) fprintf(stdout, "%s: dimy_name_eof = %s\n", __FILE__, data->conf->dimyname_eof); 00467 if (val != NULL) 00468 (void) xmlFree(val); 00469 00471 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "eof_name"); 00472 val = xml_get_setting(conf, path); 00473 if (val != NULL) 00474 data->conf->eofname = strdup((char *) val); 00475 else 00476 data->conf->eofname = strdup("eof"); 00477 (void) fprintf(stdout, "%s: eof_name = %s\n", __FILE__, data->conf->eofname); 00478 if (val != NULL) 00479 (void) xmlFree(val); 00480 00482 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "pts_name"); 00483 val = xml_get_setting(conf, path); 00484 if (val != NULL) 00485 data->conf->ptsname = strdup((char *) val); 00486 else 00487 data->conf->ptsname = strdup("pts"); 00488 (void) fprintf(stdout, "%s: pts_name = %s\n", __FILE__, data->conf->ptsname); 00489 if (val != NULL) 00490 (void) xmlFree(val); 00491 00493 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "clust_name"); 00494 val = xml_get_setting(conf, path); 00495 if (val != NULL) 00496 data->conf->clustname = strdup((char *) val); 00497 else 00498 data->conf->clustname = strdup("clust"); 00499 (void) fprintf(stdout, "%s: clust_name = %s\n", __FILE__, data->conf->clustname); 00500 if (val != NULL) 00501 (void) xmlFree(val); 00502 00503 /**** LARGE-SCALE FIELDS (CLASSIFICATION) DOMAIN CONFIGURATION ****/ 00504 00506 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_large_scale", "longitude", "min"); 00507 val = xml_get_setting(conf, path); 00508 if (val != NULL) 00509 data->conf->longitude_min = xmlXPathCastStringToNumber(val); 00510 else 00511 data->conf->longitude_min = -15.0; 00512 (void) fprintf(stdout, "%s: Large-scale domain longitude min = %lf\n", __FILE__, data->conf->longitude_min); 00513 if (val != NULL) 00514 (void) xmlFree(val); 00515 00517 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_large_scale", "longitude", "max"); 00518 val = xml_get_setting(conf, path); 00519 if (val != NULL) 00520 data->conf->longitude_max = xmlXPathCastStringToNumber(val); 00521 else 00522 data->conf->longitude_max = 20.0; 00523 (void) fprintf(stdout, "%s: Large-scale domain longitude max = %lf\n", __FILE__, data->conf->longitude_max); 00524 if (val != NULL) 00525 (void) xmlFree(val); 00526 00528 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_large_scale", "latitude", "min"); 00529 val = xml_get_setting(conf, path); 00530 if (val != NULL) 00531 data->conf->latitude_min = xmlXPathCastStringToNumber(val); 00532 else 00533 data->conf->latitude_min = 35.0; 00534 (void) fprintf(stdout, "%s: Large-scale domain latitude min = %lf\n", __FILE__, data->conf->latitude_min); 00535 if (val != NULL) 00536 (void) xmlFree(val); 00537 00539 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_large_scale", "latitude", "max"); 00540 val = xml_get_setting(conf, path); 00541 if (val != NULL) 00542 data->conf->latitude_max = xmlXPathCastStringToNumber(val); 00543 else 00544 data->conf->latitude_max = 60.0; 00545 (void) fprintf(stdout, "%s: Large-scale domain latitude max = %lf\n", __FILE__, data->conf->latitude_max); 00546 if (val != NULL) 00547 (void) xmlFree(val); 00548 00549 /**** SECONDARY LARGE-SCALE FIELDS DOMAIN CONFIGURATION ****/ 00550 00552 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_secondary_large_scale", "longitude", "min"); 00553 val = xml_get_setting(conf, path); 00554 if (val != NULL) 00555 data->conf->secondary_longitude_min = xmlXPathCastStringToNumber(val); 00556 else 00557 data->conf->secondary_longitude_min = -15.0; 00558 (void) fprintf(stdout, "%s: Large-scale domain longitude min = %lf\n", __FILE__, data->conf->secondary_longitude_min); 00559 if (val != NULL) 00560 (void) xmlFree(val); 00561 00563 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_secondary_large_scale", "longitude", "max"); 00564 val = xml_get_setting(conf, path); 00565 if (val != NULL) 00566 data->conf->secondary_longitude_max = xmlXPathCastStringToNumber(val); 00567 else 00568 data->conf->secondary_longitude_max = 20.0; 00569 (void) fprintf(stdout, "%s: Large-scale domain longitude max = %lf\n", __FILE__, data->conf->secondary_longitude_max); 00570 if (val != NULL) 00571 (void) xmlFree(val); 00572 00574 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_secondary_large_scale", "latitude", "min"); 00575 val = xml_get_setting(conf, path); 00576 if (val != NULL) 00577 data->conf->secondary_latitude_min = xmlXPathCastStringToNumber(val); 00578 else 00579 data->conf->secondary_latitude_min = 35.0; 00580 (void) fprintf(stdout, "%s: Large-scale domain latitude min = %lf\n", __FILE__, data->conf->secondary_latitude_min); 00581 if (val != NULL) 00582 (void) xmlFree(val); 00583 00585 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_secondary_large_scale", "latitude", "max"); 00586 val = xml_get_setting(conf, path); 00587 if (val != NULL) 00588 data->conf->secondary_latitude_max = xmlXPathCastStringToNumber(val); 00589 else 00590 data->conf->secondary_latitude_max = 60.0; 00591 (void) fprintf(stdout, "%s: Large-scale domain latitude max = %lf\n", __FILE__, data->conf->secondary_latitude_max); 00592 if (val != NULL) 00593 (void) xmlFree(val); 00594 00595 /**** SECONDARY-LARGE SCALE FIELDS MASK CONFIGURATION ****/ 00596 data->secondary_mask = (mask_struct *) malloc(sizeof(mask_struct)); 00597 if (data->secondary_mask == NULL) alloc_error(__FILE__, __LINE__); 00599 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "use_mask"); 00600 val = xml_get_setting(conf, path); 00601 if (val != NULL) 00602 data->secondary_mask->use_mask = (int) strtol((char *) val, (char **)NULL, 10); 00603 else 00604 data->secondary_mask->use_mask = FALSE; 00605 if (data->secondary_mask->use_mask != FALSE && data->secondary_mask->use_mask != TRUE) { 00606 (void) fprintf(stderr, "%s: Invalid or missing secondary_mask use_mask value %s in configuration file. Aborting.\n", __FILE__, val); 00607 return -1; 00608 } 00609 (void) fprintf(stdout, "%s: secondary_mask use_mask=%d\n", __FILE__, data->secondary_mask->use_mask); 00610 if (val != NULL) 00611 (void) xmlFree(val); 00612 00613 if (data->secondary_mask->use_mask == TRUE) { 00615 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "filename"); 00616 val = xml_get_setting(conf, path); 00617 if (val != NULL) { 00618 data->secondary_mask->filename = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00619 if (data->secondary_mask->filename == NULL) alloc_error(__FILE__, __LINE__); 00620 (void) strcpy(data->secondary_mask->filename, (char *) val); 00621 (void) fprintf(stdout, "%s: Secondary large-scale fields mask filename = %s\n", __FILE__, data->secondary_mask->filename); 00622 (void) xmlFree(val); 00623 00625 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "mask_name"); 00626 val = xml_get_setting(conf, path); 00627 if (val != NULL) { 00628 data->secondary_mask->maskname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00629 if (data->secondary_mask->maskname == NULL) alloc_error(__FILE__, __LINE__); 00630 (void) strcpy(data->secondary_mask->maskname, (char *) val); 00631 (void) fprintf(stdout, "%s: Secondary large-scale fields mask name = %s\n", __FILE__, data->secondary_mask->maskname); 00632 (void) xmlFree(val); 00633 } 00634 else { 00635 data->secondary_mask->maskname = strdup("mask"); 00636 (void) fprintf(stderr, "%s: Default secondary large-scale fields mask name = %s\n", __FILE__, 00637 data->secondary_mask->maskname); 00638 (void) xmlFree(val); 00639 } 00641 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "longitude_name"); 00642 val = xml_get_setting(conf, path); 00643 if (val != NULL) { 00644 data->secondary_mask->lonname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00645 if (data->secondary_mask->lonname == NULL) alloc_error(__FILE__, __LINE__); 00646 (void) strcpy(data->secondary_mask->lonname, (char *) val); 00647 (void) fprintf(stdout, "%s: Secondary large-scale fields mask longitude_name = %s\n", __FILE__, data->secondary_mask->lonname); 00648 (void) xmlFree(val); 00649 } 00650 else { 00651 data->secondary_mask->lonname = strdup("lon"); 00652 (void) fprintf(stderr, "%s: Default secondary large-scale fields mask longitude_name = %s\n", __FILE__, 00653 data->secondary_mask->lonname); 00654 (void) xmlFree(val); 00655 } 00657 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "latitude_name"); 00658 val = xml_get_setting(conf, path); 00659 if (val != NULL) { 00660 data->secondary_mask->latname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00661 if (data->secondary_mask->latname == NULL) alloc_error(__FILE__, __LINE__); 00662 (void) strcpy(data->secondary_mask->latname, (char *) val); 00663 (void) fprintf(stdout, "%s: Secondary large-scale fields mask latitude_name = %s\n", __FILE__, data->secondary_mask->latname); 00664 (void) xmlFree(val); 00665 } 00666 else { 00667 data->secondary_mask->latname = strdup("lat"); 00668 (void) fprintf(stderr, "%s: Default secondary large-scale fields mask latitude_name = %s\n", __FILE__, 00669 data->secondary_mask->latname); 00670 (void) xmlFree(val); 00671 } 00673 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "coordinates"); 00674 val = xml_get_setting(conf, path); 00675 if (val != NULL) 00676 data->secondary_mask->coords = strdup((char *) val); 00677 else 00678 data->secondary_mask->coords = strdup("2D"); 00679 (void) fprintf(stdout, "%s: Secondary large-scale fields mask coords = %s\n", __FILE__, data->secondary_mask->coords); 00680 if (val != NULL) 00681 (void) xmlFree(val); 00683 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "dimx_name"); 00684 val = xml_get_setting(conf, path); 00685 if (val != NULL) { 00686 data->secondary_mask->dimxname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00687 if (data->secondary_mask->dimxname == NULL) alloc_error(__FILE__, __LINE__); 00688 (void) strcpy(data->secondary_mask->dimxname, (char *) val); 00689 (void) fprintf(stdout, "%s: Secondary large-scale fields mask dimx_name = %s\n", __FILE__, data->secondary_mask->dimxname); 00690 (void) xmlFree(val); 00691 } 00692 else { 00693 data->secondary_mask->dimxname = strdup("dimx"); 00694 (void) fprintf(stderr, "%s: Default secondary large-scale fields mask dimx_name = %s\n", __FILE__, 00695 data->secondary_mask->dimxname); 00696 (void) xmlFree(val); 00697 } 00699 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "dimy_name"); 00700 val = xml_get_setting(conf, path); 00701 if (val != NULL) { 00702 data->secondary_mask->dimyname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00703 if (data->secondary_mask->dimyname == NULL) alloc_error(__FILE__, __LINE__); 00704 (void) strcpy(data->secondary_mask->dimyname, (char *) val); 00705 (void) fprintf(stdout, "%s: Secondary large-scale fields mask dimy_name = %s\n", __FILE__, data->secondary_mask->dimyname); 00706 (void) xmlFree(val); 00707 } 00708 else { 00709 data->secondary_mask->dimyname = strdup("dimy"); 00710 (void) fprintf(stderr, "%s: Default secondary large-scale fields mask dimy_name = %s\n", __FILE__, 00711 data->secondary_mask->dimyname); 00712 (void) xmlFree(val); 00713 } 00715 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "dim_coordinates"); 00716 val = xml_get_setting(conf, path); 00717 if (val != NULL) 00718 data->secondary_mask->dimcoords = strdup((char *) val); 00719 else 00720 data->secondary_mask->dimcoords = strdup("2D"); 00721 (void) fprintf(stdout, "%s: Secondary large-scale fields mask dim_coords = %s\n", __FILE__, data->secondary_mask->dimcoords); 00722 if (val != NULL) 00723 (void) xmlFree(val); 00725 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_secondary_large_scale_mask", "projection"); 00726 val = xml_get_setting(conf, path); 00727 if (val != NULL) { 00728 data->secondary_mask->proj = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00729 if (data->secondary_mask->proj == NULL) alloc_error(__FILE__, __LINE__); 00730 (void) strcpy(data->secondary_mask->proj, (char *) val); 00731 (void) xmlFree(val); 00732 } 00733 else 00734 data->secondary_mask->proj = strdup("Latitude_Longitude"); 00735 (void) fprintf(stdout, "%s: Secondary large-scale fields mask projection = %s\n", 00736 __FILE__, data->secondary_mask->proj); 00737 } 00738 else { 00739 (void) fprintf(stderr, "%s: No secondary large-scale fields mask. Desactivating the use of the mask.\n", __FILE__); 00740 data->secondary_mask->filename = NULL; 00741 data->secondary_mask->use_mask = FALSE; 00742 (void) xmlFree(val); 00743 } 00744 } 00745 00746 /**** LEARNING MASK DOMAIN CONFIGURATION ****/ 00747 00749 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_learning_mask", "longitude", "min"); 00750 val = xml_get_setting(conf, path); 00751 if (val != NULL) 00752 data->conf->learning_mask_longitude_min = xmlXPathCastStringToNumber(val); 00753 else 00754 data->conf->learning_mask_longitude_min = -999.0; 00755 (void) fprintf(stdout, "%s: Learning mask domain longitude min = %lf\n", __FILE__, data->conf->learning_mask_longitude_min); 00756 if (val != NULL) 00757 (void) xmlFree(val); 00758 00760 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_learning_mask", "longitude", "max"); 00761 val = xml_get_setting(conf, path); 00762 if (val != NULL) 00763 data->conf->learning_mask_longitude_max = xmlXPathCastStringToNumber(val); 00764 else 00765 data->conf->learning_mask_longitude_max = -999.0; 00766 (void) fprintf(stdout, "%s: Learning mask domain longitude max = %lf\n", __FILE__, data->conf->learning_mask_longitude_max); 00767 if (val != NULL) 00768 (void) xmlFree(val); 00769 00771 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_learning_mask", "latitude", "min"); 00772 val = xml_get_setting(conf, path); 00773 if (val != NULL) 00774 data->conf->learning_mask_latitude_min = xmlXPathCastStringToNumber(val); 00775 else 00776 data->conf->learning_mask_latitude_min = -999.0; 00777 (void) fprintf(stdout, "%s: Learning mask domain latitude min = %lf\n", __FILE__, data->conf->learning_mask_latitude_min); 00778 if (val != NULL) 00779 (void) xmlFree(val); 00780 00782 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@type=\"%s\"]", "setting", "domain_learning_mask", "latitude", "max"); 00783 val = xml_get_setting(conf, path); 00784 if (val != NULL) 00785 data->conf->learning_mask_latitude_max = xmlXPathCastStringToNumber(val); 00786 else 00787 data->conf->learning_mask_latitude_max = -999.0; 00788 (void) fprintf(stdout, "%s: Learning mask domain latitude max = %lf\n", __FILE__, data->conf->learning_mask_latitude_max); 00789 if (val != NULL) 00790 (void) xmlFree(val); 00791 00792 /**** LEARNING MASKFILE CONFIGURATION ****/ 00793 data->conf->learning_maskfile = (mask_struct *) malloc(sizeof(mask_struct)); 00794 if (data->conf->learning_maskfile == NULL) alloc_error(__FILE__, __LINE__); 00796 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "use_mask"); 00797 val = xml_get_setting(conf, path); 00798 if (val != NULL) 00799 data->conf->learning_maskfile->use_mask = (int) strtol((char *) val, (char **)NULL, 10); 00800 else 00801 data->conf->learning_maskfile->use_mask = FALSE; 00802 if (data->conf->learning_maskfile->use_mask != FALSE && data->conf->learning_maskfile->use_mask != TRUE) { 00803 (void) fprintf(stderr, "%s: Invalid or missing domain_learning_maskfile use_mask value %s in configuration file. Aborting.\n", __FILE__, val); 00804 return -1; 00805 } 00806 (void) fprintf(stdout, "%s: domain_learning_maskfile use_mask=%d\n", __FILE__, data->conf->learning_maskfile->use_mask); 00807 if (val != NULL) 00808 (void) xmlFree(val); 00809 00810 if (data->conf->learning_maskfile->use_mask == TRUE) { 00812 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "filename"); 00813 val = xml_get_setting(conf, path); 00814 if (val != NULL) { 00815 data->conf->learning_maskfile->filename = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00816 if (data->conf->learning_maskfile->filename == NULL) alloc_error(__FILE__, __LINE__); 00817 (void) strcpy(data->conf->learning_maskfile->filename, (char *) val); 00818 (void) fprintf(stdout, "%s: Learning domain maskfile filename = %s\n", __FILE__, data->conf->learning_maskfile->filename); 00819 (void) xmlFree(val); 00820 00822 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "mask_name"); 00823 val = xml_get_setting(conf, path); 00824 if (val != NULL) { 00825 data->conf->learning_maskfile->maskname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00826 if (data->conf->learning_maskfile->maskname == NULL) alloc_error(__FILE__, __LINE__); 00827 (void) strcpy(data->conf->learning_maskfile->maskname, (char *) val); 00828 (void) fprintf(stdout, "%s: Learning domain maskfile name = %s\n", __FILE__, data->conf->learning_maskfile->maskname); 00829 (void) xmlFree(val); 00830 } 00831 else { 00832 data->conf->learning_maskfile->maskname = strdup("mask"); 00833 (void) fprintf(stderr, "%s: Default learning domain maskfile name = %s\n", __FILE__, 00834 data->conf->learning_maskfile->maskname); 00835 (void) xmlFree(val); 00836 } 00838 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "longitude_name"); 00839 val = xml_get_setting(conf, path); 00840 if (val != NULL) { 00841 data->conf->learning_maskfile->lonname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00842 if (data->conf->learning_maskfile->lonname == NULL) alloc_error(__FILE__, __LINE__); 00843 (void) strcpy(data->conf->learning_maskfile->lonname, (char *) val); 00844 (void) fprintf(stdout, "%s: Learning domain maskfile longitude_name = %s\n", __FILE__, data->conf->learning_maskfile->lonname); 00845 (void) xmlFree(val); 00846 } 00847 else { 00848 data->conf->learning_maskfile->lonname = strdup("lon"); 00849 (void) fprintf(stderr, "%s: Default learning domain maskfile longitude_name = %s\n", __FILE__, 00850 data->conf->learning_maskfile->lonname); 00851 (void) xmlFree(val); 00852 } 00854 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "latitude_name"); 00855 val = xml_get_setting(conf, path); 00856 if (val != NULL) { 00857 data->conf->learning_maskfile->latname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00858 if (data->conf->learning_maskfile->latname == NULL) alloc_error(__FILE__, __LINE__); 00859 (void) strcpy(data->conf->learning_maskfile->latname, (char *) val); 00860 (void) fprintf(stdout, "%s: Learning domain maskfile latitude_name = %s\n", __FILE__, data->conf->learning_maskfile->latname); 00861 (void) xmlFree(val); 00862 } 00863 else { 00864 data->conf->learning_maskfile->latname = strdup("lat"); 00865 (void) fprintf(stderr, "%s: Default learning domain maskfile latitude_name = %s\n", __FILE__, 00866 data->conf->learning_maskfile->latname); 00867 (void) xmlFree(val); 00868 } 00870 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "coordinates"); 00871 val = xml_get_setting(conf, path); 00872 if (val != NULL) 00873 data->conf->learning_maskfile->coords = strdup((char *) val); 00874 else 00875 data->conf->learning_maskfile->coords = strdup("2D"); 00876 (void) fprintf(stdout, "%s: Learning domain maskfile coords = %s\n", __FILE__, data->conf->learning_maskfile->coords); 00877 if (val != NULL) 00878 (void) xmlFree(val); 00880 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "dimx_name"); 00881 val = xml_get_setting(conf, path); 00882 if (val != NULL) { 00883 data->conf->learning_maskfile->dimxname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00884 if (data->conf->learning_maskfile->dimxname == NULL) alloc_error(__FILE__, __LINE__); 00885 (void) strcpy(data->conf->learning_maskfile->dimxname, (char *) val); 00886 (void) fprintf(stdout, "%s: Learning domain maskfile dimx_name = %s\n", __FILE__, data->conf->learning_maskfile->dimxname); 00887 (void) xmlFree(val); 00888 } 00889 else { 00890 data->conf->learning_maskfile->dimxname = strdup("dimx"); 00891 (void) fprintf(stderr, "%s: Default learning domain maskfile dimx_name = %s\n", __FILE__, 00892 data->conf->learning_maskfile->dimxname); 00893 (void) xmlFree(val); 00894 } 00896 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "dimy_name"); 00897 val = xml_get_setting(conf, path); 00898 if (val != NULL) { 00899 data->conf->learning_maskfile->dimyname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00900 if (data->conf->learning_maskfile->dimyname == NULL) alloc_error(__FILE__, __LINE__); 00901 (void) strcpy(data->conf->learning_maskfile->dimyname, (char *) val); 00902 (void) fprintf(stdout, "%s: Learning domain maskfile dimy_name = %s\n", __FILE__, data->conf->learning_maskfile->dimyname); 00903 (void) xmlFree(val); 00904 } 00905 else { 00906 data->conf->learning_maskfile->dimyname = strdup("dimy"); 00907 (void) fprintf(stderr, "%s: Default learning domain maskfile dimy_name = %s\n", __FILE__, 00908 data->conf->learning_maskfile->dimyname); 00909 (void) xmlFree(val); 00910 } 00912 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "dim_coordinates"); 00913 val = xml_get_setting(conf, path); 00914 if (val != NULL) 00915 data->conf->learning_maskfile->dimcoords = strdup((char *) val); 00916 else 00917 data->conf->learning_maskfile->dimcoords = strdup("2D"); 00918 (void) fprintf(stdout, "%s: Learning domain maskfile dim_coords = %s\n", __FILE__, data->conf->learning_maskfile->dimcoords); 00919 if (val != NULL) 00920 (void) xmlFree(val); 00922 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "domain_learning_maskfile", "projection"); 00923 val = xml_get_setting(conf, path); 00924 if (val != NULL) { 00925 data->conf->learning_maskfile->proj = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 00926 if (data->conf->learning_maskfile->proj == NULL) alloc_error(__FILE__, __LINE__); 00927 (void) strcpy(data->conf->learning_maskfile->proj, (char *) val); 00928 (void) xmlFree(val); 00929 } 00930 else 00931 data->conf->learning_maskfile->proj = strdup("Latitude_Longitude"); 00932 (void) fprintf(stdout, "%s: Learning domain maskfile projection = %s\n", 00933 __FILE__, data->conf->learning_maskfile->proj); 00934 } 00935 else { 00936 (void) fprintf(stderr, "%s: No learning domain maskfile. Desactivating the use of the mask.\n", __FILE__); 00937 data->conf->learning_maskfile->filename = NULL; 00938 data->conf->learning_maskfile->use_mask = FALSE; 00939 (void) xmlFree(val); 00940 } 00941 } 00942 00943 /**** OUTPUT CONFIGURATION ****/ 00944 00946 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "path"); 00947 val = xml_get_setting(conf, path); 00948 if (val != NULL) 00949 data->conf->output_path = strdup((char *) val); 00950 else { 00951 (void) fprintf(stderr, "%s: Missing or invalid output path setting. Aborting.\n", __FILE__); 00952 return -1; 00953 } 00954 (void) fprintf(stdout, "%s: output path = %s\n", __FILE__, data->conf->output_path); 00955 if (val != NULL) 00956 (void) xmlFree(val); 00957 00959 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "month_begin"); 00960 val = xml_get_setting(conf, path); 00961 if (val != NULL) 00962 data->conf->output_month_begin = xmlXPathCastStringToNumber(val); 00963 else { 00964 (void) fprintf(stderr, "%s: Missing or invalid output month_begin setting. Aborting.\n", __FILE__); 00965 return -1; 00966 } 00967 (void) fprintf(stdout, "%s: output month_begin = %d\n", __FILE__, data->conf->output_month_begin); 00968 if (val != NULL) 00969 (void) xmlFree(val); 00970 00972 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "title"); 00973 val = xml_get_setting(conf, path); 00974 if (val != NULL) 00975 data->info->title = strdup((char *) val); 00976 else { 00977 data->info->title = strdup("Downscaling data from Cerfacs"); 00978 } 00979 (void) fprintf(stdout, "%s: output metadata title = %s\n", __FILE__, data->info->title); 00980 if (val != NULL) 00981 (void) xmlFree(val); 00982 00984 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "title_french"); 00985 val = xml_get_setting(conf, path); 00986 if (val != NULL) 00987 data->info->title_french = strdup((char *) val); 00988 else { 00989 data->info->title_french = strdup("Donnees de desagregation produites par le Cerfacs"); 00990 } 00991 (void) fprintf(stdout, "%s: output metadata title_french = %s\n", __FILE__, data->info->title_french); 00992 if (val != NULL) 00993 (void) xmlFree(val); 00994 00996 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "summary"); 00997 val = xml_get_setting(conf, path); 00998 if (val != NULL) 00999 data->info->summary = strdup((char *) val); 01000 else { 01001 data->info->summary = strdup("Downscaling data from Cerfacs"); 01002 } 01003 (void) fprintf(stdout, "%s: output metadata summary = %s\n", __FILE__, data->info->summary); 01004 if (val != NULL) 01005 (void) xmlFree(val); 01006 01008 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "summary_french"); 01009 val = xml_get_setting(conf, path); 01010 if (val != NULL) 01011 data->info->summary_french = strdup((char *) val); 01012 else { 01013 data->info->summary_french = strdup("Donnees de desagregation produites par le Cerfacs"); 01014 } 01015 (void) fprintf(stdout, "%s: output metadata summary_french = %s\n", __FILE__, data->info->summary_french); 01016 if (val != NULL) 01017 (void) xmlFree(val); 01018 01020 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "description"); 01021 val = xml_get_setting(conf, path); 01022 if (val != NULL) 01023 data->info->description = strdup((char *) val); 01024 else { 01025 data->info->description = strdup("Downscaling data from Cerfacs"); 01026 } 01027 (void) fprintf(stdout, "%s: output metadata description = %s\n", __FILE__, data->info->description); 01028 if (val != NULL) 01029 (void) xmlFree(val); 01030 01032 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "keywords"); 01033 val = xml_get_setting(conf, path); 01034 if (val != NULL) 01035 data->info->keywords = strdup((char *) val); 01036 else { 01037 data->info->keywords = strdup("climat,scenarios,desagregation,downscaling,Cerfacs"); 01038 } 01039 (void) fprintf(stdout, "%s: output metadata keywords = %s\n", __FILE__, data->info->keywords); 01040 if (val != NULL) 01041 (void) xmlFree(val); 01042 01044 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "processor"); 01045 val = xml_get_setting(conf, path); 01046 if (val != NULL) 01047 data->info->processor = strdup((char *) val); 01048 else { 01049 data->info->processor = strdup("C programming language"); 01050 } 01051 (void) fprintf(stdout, "%s: output metadata processor = %s\n", __FILE__, data->info->processor); 01052 if (val != NULL) 01053 (void) xmlFree(val); 01054 01055 /* Initialize software string */ 01056 data->info->software = (char *) malloc(1000 * sizeof(char)); 01057 if (data->info->software == NULL) alloc_error(__FILE__, __LINE__); 01058 (void) sprintf(data->info->software, "%s %s", PACKAGE_NAME, PACKAGE_VERSION); 01059 01061 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "institution"); 01062 val = xml_get_setting(conf, path); 01063 if (val != NULL) 01064 data->info->institution = strdup((char *) val); 01065 else { 01066 data->info->institution = strdup("Cerfacs"); 01067 } 01068 (void) fprintf(stdout, "%s: output metadata institution = %s\n", __FILE__, data->info->institution); 01069 if (val != NULL) 01070 (void) xmlFree(val); 01071 01073 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "creator_email"); 01074 val = xml_get_setting(conf, path); 01075 if (val != NULL) 01076 data->info->creator_email = strdup((char *) val); 01077 else { 01078 data->info->creator_email = strdup("globc@cerfacs.fr"); 01079 } 01080 (void) fprintf(stdout, "%s: output metadata creator_email = %s\n", __FILE__, data->info->creator_email); 01081 if (val != NULL) 01082 (void) xmlFree(val); 01083 01085 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "creator_url"); 01086 val = xml_get_setting(conf, path); 01087 if (val != NULL) 01088 data->info->creator_url = strdup((char *) val); 01089 else { 01090 data->info->creator_url = strdup("http://www.cerfacs.fr/globc/"); 01091 } 01092 (void) fprintf(stdout, "%s: output metadata creator_url = %s\n", __FILE__, data->info->creator_url); 01093 if (val != NULL) 01094 (void) xmlFree(val); 01095 01097 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "creator_name"); 01098 val = xml_get_setting(conf, path); 01099 if (val != NULL) 01100 data->info->creator_name = strdup((char *) val); 01101 else { 01102 data->info->creator_name = strdup("Global Change Team"); 01103 } 01104 (void) fprintf(stdout, "%s: output metadata creator_name = %s\n", __FILE__, data->info->creator_name); 01105 if (val != NULL) 01106 (void) xmlFree(val); 01107 01109 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "version"); 01110 val = xml_get_setting(conf, path); 01111 if (val != NULL) 01112 data->info->version = strdup((char *) val); 01113 else { 01114 data->info->version = strdup("1.0"); 01115 } 01116 (void) fprintf(stdout, "%s: output metadata version = %s\n", __FILE__, data->info->version); 01117 if (val != NULL) 01118 (void) xmlFree(val); 01119 01121 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "scenario"); 01122 val = xml_get_setting(conf, path); 01123 if (val != NULL) 01124 data->info->scenario = strdup((char *) val); 01125 else { 01126 data->info->scenario = strdup("SRESA1B"); 01127 } 01128 (void) fprintf(stdout, "%s: output metadata scenario = %s\n", __FILE__, data->info->scenario); 01129 if (val != NULL) 01130 (void) xmlFree(val); 01131 01133 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "scenario_co2"); 01134 val = xml_get_setting(conf, path); 01135 if (val != NULL) 01136 data->info->scenario_co2 = strdup((char *) val); 01137 else { 01138 data->info->scenario_co2 = strdup("A1B"); 01139 } 01140 (void) fprintf(stdout, "%s: output metadata scenario_co2 = %s\n", __FILE__, data->info->scenario_co2); 01141 if (val != NULL) 01142 (void) xmlFree(val); 01143 01145 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "model"); 01146 val = xml_get_setting(conf, path); 01147 if (val != NULL) 01148 data->info->model = strdup((char *) val); 01149 else { 01150 data->info->model = strdup("ARPEGE grille etiree"); 01151 } 01152 (void) fprintf(stdout, "%s: output metadata model = %s\n", __FILE__, data->info->model); 01153 if (val != NULL) 01154 (void) xmlFree(val); 01155 01157 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "institution_model"); 01158 val = xml_get_setting(conf, path); 01159 if (val != NULL) 01160 data->info->institution_model = strdup((char *) val); 01161 else { 01162 data->info->institution_model = strdup("Meteo-France CNRM/GMGEC"); 01163 } 01164 (void) fprintf(stdout, "%s: output metadata institution_model = %s\n", __FILE__, data->info->institution_model); 01165 if (val != NULL) 01166 (void) xmlFree(val); 01167 01169 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "country"); 01170 val = xml_get_setting(conf, path); 01171 if (val != NULL) 01172 data->info->country = strdup((char *) val); 01173 else { 01174 data->info->country = strdup("France"); 01175 } 01176 (void) fprintf(stdout, "%s: output metadata country = %s\n", __FILE__, data->info->country); 01177 if (val != NULL) 01178 (void) xmlFree(val); 01179 01181 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "member"); 01182 val = xml_get_setting(conf, path); 01183 if (val != NULL) 01184 data->info->member = strdup((char *) val); 01185 else { 01186 data->info->member = strdup("1"); 01187 } 01188 (void) fprintf(stdout, "%s: output metadata member = %s\n", __FILE__, data->info->member); 01189 if (val != NULL) 01190 (void) xmlFree(val); 01191 01193 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "downscaling_forcing"); 01194 val = xml_get_setting(conf, path); 01195 if (val != NULL) 01196 data->info->downscaling_forcing = strdup((char *) val); 01197 else { 01198 data->info->downscaling_forcing = strdup("SAFRAN 1970-2005"); 01199 } 01200 (void) fprintf(stdout, "%s: output metadata downscaling_forcing = %s\n", __FILE__, data->info->downscaling_forcing); 01201 if (val != NULL) 01202 (void) xmlFree(val); 01203 01205 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "timestep"); 01206 val = xml_get_setting(conf, path); 01207 if (val != NULL) 01208 data->info->timestep = strdup((char *) val); 01209 else { 01210 data->info->timestep = strdup("daily"); 01211 } 01212 if ( !strcmp(data->info->timestep, "daily") || !strcmp(data->info->timestep, "hourly")) 01213 (void) fprintf(stdout, "%s: output metadata timestep = %s\n", __FILE__, data->info->timestep); 01214 else { 01215 (void) fprintf(stderr, "%s: Invalid output timestep! Values accepted are either \"hourly\" or \"daily\"! Aborting.\n", __FILE__); 01216 } 01217 if (val != NULL) 01218 (void) xmlFree(val); 01219 01221 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "contact_email"); 01222 val = xml_get_setting(conf, path); 01223 if (val != NULL) 01224 data->info->contact_email = strdup((char *) val); 01225 else { 01226 data->info->contact_email = strdup("christian.page@cerfacs.fr"); 01227 } 01228 (void) fprintf(stdout, "%s: output metadata contact_email = %s\n", __FILE__, data->info->contact_email); 01229 if (val != NULL) 01230 (void) xmlFree(val); 01231 01233 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "contact_name"); 01234 val = xml_get_setting(conf, path); 01235 if (val != NULL) 01236 data->info->contact_name = strdup((char *) val); 01237 else { 01238 data->info->contact_name = strdup("Christian PAGE"); 01239 } 01240 (void) fprintf(stdout, "%s: output metadata contact_name = %s\n", __FILE__, data->info->contact_name); 01241 if (val != NULL) 01242 (void) xmlFree(val); 01243 01245 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "other_contact_email"); 01246 val = xml_get_setting(conf, path); 01247 if (val != NULL) 01248 data->info->other_contact_email = strdup((char *) val); 01249 else { 01250 data->info->other_contact_email = strdup("laurent.terray@cerfacs.fr"); 01251 } 01252 (void) fprintf(stdout, "%s: output metadata other_contact_email = %s\n", __FILE__, data->info->other_contact_email); 01253 if (val != NULL) 01254 (void) xmlFree(val); 01255 01257 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "output", "other_contact_name"); 01258 val = xml_get_setting(conf, path); 01259 if (val != NULL) 01260 data->info->other_contact_name = strdup((char *) val); 01261 else { 01262 data->info->other_contact_name = strdup("Laurent TERRAY"); 01263 } 01264 (void) fprintf(stdout, "%s: output metadata other_contact_name = %s\n", __FILE__, data->info->other_contact_name); 01265 if (val != NULL) 01266 (void) xmlFree(val); 01267 01268 /**** OBSERVATION DATABASE CONFIGURATION ****/ 01269 01270 data->conf->obs_var = (var_struct *) malloc(sizeof(var_struct)); 01271 if (data->conf->obs_var == NULL) alloc_error(__FILE__, __LINE__); 01272 data->conf->obs_var->proj = (proj_struct *) malloc(sizeof(proj_struct)); 01273 if (data->conf->obs_var->proj == NULL) alloc_error(__FILE__, __LINE__); 01274 data->conf->obs_var->proj->name = NULL; 01275 data->conf->obs_var->proj->grid_mapping_name = NULL; 01276 01278 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "number_of_variables"); 01279 val = xml_get_setting(conf, path); 01280 if (val != NULL) { 01281 data->conf->obs_var->nobs_var = (int) xmlXPathCastStringToNumber(val); 01282 (void) xmlFree(val); 01283 (void) fprintf(stdout, "%s: observations: number_of_variables = %d\n", __FILE__, data->conf->obs_var->nobs_var); 01284 01287 data->conf->obs_var->acronym = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01288 if (data->conf->obs_var->acronym == NULL) alloc_error(__FILE__, __LINE__); 01289 data->conf->obs_var->netcdfname = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01290 if (data->conf->obs_var->netcdfname == NULL) alloc_error(__FILE__, __LINE__); 01291 data->conf->obs_var->name = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01292 if (data->conf->obs_var->name == NULL) alloc_error(__FILE__, __LINE__); 01293 data->conf->obs_var->factor = (double *) malloc(data->conf->obs_var->nobs_var * sizeof(double)); 01294 if (data->conf->obs_var->factor == NULL) alloc_error(__FILE__, __LINE__); 01295 data->conf->obs_var->delta = (double *) malloc(data->conf->obs_var->nobs_var * sizeof(double)); 01296 if (data->conf->obs_var->delta == NULL) alloc_error(__FILE__, __LINE__); 01297 data->conf->obs_var->post = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01298 if (data->conf->obs_var->post == NULL) alloc_error(__FILE__, __LINE__); 01299 data->conf->obs_var->clim = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01300 if (data->conf->obs_var->clim == NULL) alloc_error(__FILE__, __LINE__); 01301 data->conf->obs_var->output = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01302 if (data->conf->obs_var->output == NULL) alloc_error(__FILE__, __LINE__); 01303 data->conf->obs_var->units = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01304 if (data->conf->obs_var->units == NULL) alloc_error(__FILE__, __LINE__); 01305 data->conf->obs_var->height = (char **) malloc(data->conf->obs_var->nobs_var * sizeof(char *)); 01306 if (data->conf->obs_var->height == NULL) alloc_error(__FILE__, __LINE__); 01307 01308 /* Loop over observation variables */ 01309 for (i=0; i<data->conf->obs_var->nobs_var; i++) { 01310 01311 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "acronym"); 01312 val = xml_get_setting(conf, path); 01313 if (val != NULL) { 01314 data->conf->obs_var->acronym[i] = strdup((char *) val); 01315 (void) xmlFree(val); 01316 } 01317 else { 01318 (void) fprintf(stderr, "%s: Missing or invalid observation variable acronym setting. Aborting.\n", __FILE__); 01319 return -1; 01320 } 01321 01322 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "netcdfname"); 01323 val = xml_get_setting(conf, path); 01324 if (val != NULL) { 01325 data->conf->obs_var->netcdfname[i] = strdup((char *) val); 01326 (void) xmlFree(val); 01327 } 01328 else { 01329 (void) fprintf(stderr, "%s: Missing or invalid observation variable netcdfname setting. Aborting.\n", __FILE__); 01330 return -1; 01331 } 01332 01333 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]", "setting", "observations", "variables", "name", i+1); 01334 val = xml_get_setting(conf, path); 01335 if (val != NULL) { 01336 data->conf->obs_var->name[i] = strdup((char *) val); 01337 (void) xmlFree(val); 01338 } 01339 else { 01340 (void) fprintf(stderr, "%s: Missing or invalid observation variable name setting. Aborting.\n", __FILE__); 01341 return -1; 01342 } 01343 01344 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "factor"); 01345 val = xml_get_setting(conf, path); 01346 if (val != NULL) { 01347 data->conf->obs_var->factor[i] = (double) xmlXPathCastStringToNumber(val); 01348 (void) xmlFree(val); 01349 } 01350 else { 01351 (void) fprintf(stderr, "%s: Missing or invalid observation variable factor setting. Aborting.\n", __FILE__); 01352 return -1; 01353 } 01354 01355 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "delta"); 01356 val = xml_get_setting(conf, path); 01357 if (val != NULL) { 01358 data->conf->obs_var->delta[i] = (double) xmlXPathCastStringToNumber(val); 01359 (void) xmlFree(val); 01360 } 01361 else { 01362 (void) fprintf(stderr, "%s: Missing or invalid observation variable delta setting. Aborting.\n", __FILE__); 01363 return -1; 01364 } 01365 01366 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "postprocess"); 01367 val = xml_get_setting(conf, path); 01368 if (val != NULL) { 01369 data->conf->obs_var->post[i] = strdup((char *) val); 01370 (void) xmlFree(val); 01371 } 01372 else { 01373 data->conf->obs_var->post[i] = strdup("no"); 01374 } 01375 01376 if ( strcmp(data->conf->obs_var->post[i], "yes") && strcmp(data->conf->obs_var->post[i], "no") ) { 01377 (void) fprintf(stderr, "%s: Invalid observation variable postprocess setting (valid values are \"yes\" or \"no\"). Aborting.\n", __FILE__); 01378 return -1; 01379 } 01380 if (i == 0 && !strcmp(data->conf->obs_var->post[i], "yes")) { 01381 (void) fprintf(stderr, "%s: Invalid observation variable postprocess setting. A variable having a postprocess attribute of \"yes\" must not be the first one in the list. Aborting.\n", __FILE__); 01382 return -1; 01383 } 01384 01385 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "clim"); 01386 val = xml_get_setting(conf, path); 01387 if (val != NULL) { 01388 data->conf->obs_var->clim[i] = strdup((char *) val); 01389 (void) xmlFree(val); 01390 } 01391 else { 01392 data->conf->obs_var->clim[i] = strdup("no"); 01393 } 01394 01395 if ( strcmp(data->conf->obs_var->clim[i], "yes") && strcmp(data->conf->obs_var->clim[i], "no") ) { 01396 (void) fprintf(stderr, "%s: Invalid observation variable climatology anomaly setting (valid values are \"yes\" or \"no\"). Aborting.\n", __FILE__); 01397 return -1; 01398 } 01399 01400 /* Output */ 01401 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "output"); 01402 val = xml_get_setting(conf, path); 01403 if (val != NULL) { 01404 data->conf->obs_var->output[i] = strdup((char *) val); 01405 (void) xmlFree(val); 01406 } 01407 else { 01408 data->conf->obs_var->output[i] = strdup("yes"); 01409 } 01410 01411 if ( strcmp(data->conf->obs_var->output[i], "yes") && strcmp(data->conf->obs_var->output[i], "no") ) { 01412 (void) fprintf(stderr, "%s: Invalid observation variable output setting (valid values are \"yes\" or \"no\"). Aborting.\n", __FILE__); 01413 return -1; 01414 } 01415 01416 /* Try to retrieve units and height. */ 01417 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "units"); 01418 val = xml_get_setting(conf, path); 01419 if (val != NULL) { 01420 data->conf->obs_var->units[i] = strdup((char *) val); 01421 (void) xmlFree(val); 01422 } 01423 else { 01424 data->conf->obs_var->units[i] = strdup("unknown"); 01425 } 01426 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s[@id=\"%d\"]/@%s", "setting", "observations", "variables", "name", i+1, "height"); 01427 val = xml_get_setting(conf, path); 01428 if (val != NULL) { 01429 data->conf->obs_var->height[i] = strdup((char *) val); 01430 (void) xmlFree(val); 01431 } 01432 else { 01433 data->conf->obs_var->height[i] = strdup("unknown"); 01434 } 01435 01436 (void) printf("%s: Variable id=%d name=\"%s\" netcdfname=%s acronym=%s factor=%f delta=%f postprocess=%s output=%s\n", __FILE__, i+1, data->conf->obs_var->name[i], data->conf->obs_var->netcdfname[i], data->conf->obs_var->acronym[i], data->conf->obs_var->factor[i], data->conf->obs_var->delta[i], data->conf->obs_var->post[i], data->conf->obs_var->output[i]); 01437 } 01438 } 01439 else { 01440 (void) fprintf(stderr, "%s: Invalid number_of_variables value %s in configuration file. Aborting.\n", __FILE__, val); 01441 return -1; 01442 } 01443 01445 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "frequency"); 01446 val = xml_get_setting(conf, path); 01447 if (val != NULL) 01448 data->conf->obs_var->frequency = strdup((char *) val); 01449 else { 01450 (void) fprintf(stderr, "%s: Missing or invalid observations data frequency setting. Aborting.\n", __FILE__); 01451 return -1; 01452 } 01453 if (val != NULL) 01454 (void) xmlFree(val); 01455 if ( strcmp(data->conf->obs_var->frequency, "daily") && strcmp(data->conf->obs_var->frequency, "hourly")) { 01456 (void) fprintf(stderr, "%s: Missing or invalid observations data frequency setting. Aborting.\n", __FILE__); 01457 return -1; 01458 } 01459 if ( !strcmp(data->info->timestep, "hourly") && !strcmp(data->conf->obs_var->frequency, "daily") ) { 01460 (void) fprintf(stderr, "%s: Invalid observations data frequency setting \"daily\" while output timestep is set to \"hourly\"! Aborting.\n", __FILE__); 01461 return -1; 01462 } 01463 01465 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "template"); 01466 val = xml_get_setting(conf, path); 01467 if (val != NULL) 01468 data->conf->obs_var->template = strdup((char *) val); 01469 else { 01470 (void) fprintf(stderr, "%s: Missing or invalid output template setting. Aborting.\n", __FILE__); 01471 return -1; 01472 } 01473 (void) fprintf(stdout, "%s: output template = %s\n", __FILE__, data->conf->obs_var->template); 01474 if (val != NULL) 01475 (void) xmlFree(val); 01476 01478 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "year_digits"); 01479 val = xml_get_setting(conf, path); 01480 if (val != NULL) { 01481 data->conf->obs_var->year_digits = (int) xmlXPathCastStringToNumber(val); 01482 (void) xmlFree(val); 01483 if (data->conf->obs_var->year_digits != 2 && data->conf->obs_var->year_digits != 4) { 01484 (void) fprintf(stderr, "%s: Invalid observations data year_digits setting %d. Only values of 2 or 4 are valid. Aborting.\n", 01485 __FILE__, data->conf->obs_var->year_digits); 01486 return -1; 01487 } 01488 } 01489 else { 01490 (void) fprintf(stderr, "%s: Missing or invalid observations data year_digits setting. Aborting.\n", __FILE__); 01491 return -1; 01492 } 01493 01495 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "path"); 01496 val = xml_get_setting(conf, path); 01497 if (val != NULL) 01498 data->conf->obs_var->path = strdup((char *) val); 01499 else { 01500 (void) fprintf(stderr, "%s: Missing or invalid observations data path setting. Aborting.\n", __FILE__); 01501 return -1; 01502 } 01503 if (val != NULL) 01504 (void) xmlFree(val); 01505 01507 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "month_begin"); 01508 val = xml_get_setting(conf, path); 01509 if (val != NULL) 01510 data->conf->obs_var->month_begin = xmlXPathCastStringToNumber(val); 01511 else { 01512 (void) fprintf(stderr, "%s: Missing or invalid observations data month_begin setting. Aborting.\n", __FILE__); 01513 return -1; 01514 } 01515 if (val != NULL) 01516 (void) xmlFree(val); 01517 01519 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "dim_coordinates"); 01520 val = xml_get_setting(conf, path); 01521 if (val != NULL) 01522 data->conf->obs_var->dimcoords = strdup((char *) val); 01523 else 01524 data->conf->obs_var->dimcoords = strdup("1D"); 01525 (void) fprintf(stdout, "%s: Observations coords = %s\n", __FILE__, data->conf->obs_var->dimcoords); 01526 if (val != NULL) 01527 (void) xmlFree(val); 01528 01530 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "longitude_name"); 01531 val = xml_get_setting(conf, path); 01532 if (val != NULL) 01533 data->conf->obs_var->lonname = strdup((char *) val); 01534 else 01535 data->conf->obs_var->lonname = strdup("lon"); 01536 (void) fprintf(stdout, "%s: Observations longitude_name = %s\n", __FILE__, data->conf->obs_var->lonname); 01537 if (val != NULL) 01538 (void) xmlFree(val); 01539 01541 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "latitude_name"); 01542 val = xml_get_setting(conf, path); 01543 if (val != NULL) 01544 data->conf->obs_var->latname = strdup((char *) val); 01545 else 01546 data->conf->obs_var->latname = strdup("lat"); 01547 (void) fprintf(stdout, "%s: Observations latitude_name = %s\n", __FILE__, data->conf->obs_var->latname); 01548 if (val != NULL) 01549 (void) xmlFree(val); 01550 01552 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "dimx_name"); 01553 val = xml_get_setting(conf, path); 01554 if (val != NULL) 01555 data->conf->obs_var->dimxname = strdup((char *) val); 01556 else 01557 data->conf->obs_var->dimxname = strdup("lon"); 01558 (void) fprintf(stdout, "%s: Observations dimx_name = %s\n", __FILE__, data->conf->obs_var->dimxname); 01559 if (val != NULL) 01560 (void) xmlFree(val); 01561 01563 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "dimy_name"); 01564 val = xml_get_setting(conf, path); 01565 if (val != NULL) 01566 data->conf->obs_var->dimyname = strdup((char *) val); 01567 else 01568 data->conf->obs_var->dimyname = strdup("lat"); 01569 (void) fprintf(stdout, "%s: Observations dimy_name = %s\n", __FILE__, data->conf->obs_var->dimyname); 01570 if (val != NULL) 01571 (void) xmlFree(val); 01572 01574 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "time_name"); 01575 val = xml_get_setting(conf, path); 01576 if (val != NULL) 01577 data->conf->obs_var->timename = strdup((char *) val); 01578 else 01579 data->conf->obs_var->timename = strdup("time"); 01580 (void) fprintf(stdout, "%s: Observations time_name = %s\n", __FILE__, data->conf->obs_var->timename); 01581 if (val != NULL) 01582 (void) xmlFree(val); 01583 01585 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "coordinates"); 01586 val = xml_get_setting(conf, path); 01587 if (val != NULL) 01588 data->conf->obs_var->proj->coords = strdup((char *) val); 01589 else 01590 data->conf->obs_var->proj->coords = strdup("2D"); 01591 (void) fprintf(stdout, "%s: Observations coords = %s\n", __FILE__, data->conf->obs_var->proj->coords); 01592 if (val != NULL) 01593 (void) xmlFree(val); 01594 01596 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "altitude"); 01597 val = xml_get_setting(conf, path); 01598 if (val != NULL) 01599 data->conf->obs_var->altitude = strdup((char *) val); 01600 else { 01601 (void) fprintf(stderr, "%s: Missing observations altitude filename. Will not be able to calculate Relative Humidity if specified.\n", __FILE__); 01602 data->conf->obs_var->altitude = strdup(""); 01603 } 01604 if (val != NULL) 01605 (void) xmlFree(val); 01606 01608 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "observations", "altitude_name"); 01609 val = xml_get_setting(conf, path); 01610 if (val != NULL) 01611 data->conf->obs_var->altitudename = strdup((char *) val); 01612 else 01613 data->conf->obs_var->altitudename = strdup("Altitude"); 01614 (void) fprintf(stdout, "%s: Observations altitude_name = %s\n", __FILE__, data->conf->obs_var->altitudename); 01615 if (val != NULL) 01616 (void) xmlFree(val); 01617 01618 /**** LEARNING CONFIGURATION ****/ 01619 01620 /* Whole learning period */ 01621 data->learning->time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 01622 if (data->learning->time_s == NULL) alloc_error(__FILE__, __LINE__); 01623 01625 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "learning_provided"); 01626 val = xml_get_setting(conf, path); 01627 if (val != NULL) 01628 data->learning->learning_provided = (int) strtol((char *) val, (char **)NULL, 10); 01629 else 01630 data->learning->learning_provided = -1; 01631 if (data->learning->learning_provided != FALSE && data->learning->learning_provided != TRUE) { 01632 (void) fprintf(stderr, "%s: Invalid or missing learning_provided value %s in configuration file. Aborting.\n", __FILE__, val); 01633 return -1; 01634 } 01635 (void) fprintf(stdout, "%s: learning_provided=%d\n", __FILE__, data->learning->learning_provided); 01636 if (val != NULL) 01637 (void) xmlFree(val); 01638 01640 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "learning_save"); 01641 val = xml_get_setting(conf, path); 01642 if (val != NULL) 01643 data->learning->learning_save = (int) strtol((char *) val, (char **)NULL, 10); 01644 else 01645 data->learning->learning_save = FALSE; 01646 if (data->learning->learning_save != FALSE && data->learning->learning_save != TRUE) { 01647 (void) fprintf(stderr, "%s: Invalid learning_save value %s in configuration file. Aborting.\n", __FILE__, val); 01648 return -1; 01649 } 01650 (void) fprintf(stdout, "%s: learning_save=%d\n", __FILE__, data->learning->learning_save); 01651 if (val != NULL) 01652 (void) xmlFree(val); 01653 01655 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "number_of_eofs"); 01656 val = xml_get_setting(conf, path); 01657 if (val != NULL) { 01658 data->learning->rea_neof = xmlXPathCastStringToNumber(val); 01659 data->learning->obs_neof = xmlXPathCastStringToNumber(val); 01660 (void) fprintf(stdout, "%s: Number of EOF for learning period for reanalysis data = %d\n", __FILE__, data->learning->rea_neof); 01661 (void) fprintf(stdout, "%s: Number of EOF for learning period for observation data = %d\n", __FILE__, data->learning->obs_neof); 01662 (void) xmlFree(val); 01663 } 01664 else { 01666 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "number_of_obs_eofs"); 01667 val = xml_get_setting(conf, path); 01668 if (val != NULL) 01669 data->learning->obs_neof = xmlXPathCastStringToNumber(val); 01670 else 01671 data->learning->obs_neof = 10; 01672 (void) fprintf(stdout, "%s: Number of EOF for learning period for observation data = %d\n", __FILE__, data->learning->obs_neof); 01673 if (val != NULL) 01674 (void) xmlFree(val); 01675 01677 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "number_of_rea_eofs"); 01678 val = xml_get_setting(conf, path); 01679 if (val != NULL) 01680 data->learning->rea_neof = xmlXPathCastStringToNumber(val); 01681 else 01682 data->learning->rea_neof = 10; 01683 (void) fprintf(stdout, "%s: Number of EOF for learning period for reanalysis data = %d\n", __FILE__, data->learning->rea_neof); 01684 if (val != NULL) 01685 (void) xmlFree(val); 01686 } 01687 01688 /* If learning data is saved, additional parameters are needed */ 01689 if (data->learning->learning_save == TRUE) { 01690 01692 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_save_weight"); 01693 val = xml_get_setting(conf, path); 01694 if (val != NULL) { 01695 data->learning->filename_save_weight = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01696 if (data->learning->filename_save_weight == NULL) alloc_error(__FILE__, __LINE__); 01697 (void) strcpy(data->learning->filename_save_weight, (char *) val); 01698 (void) fprintf(stdout, "%s: Learning filename_save_weight = %s\n", __FILE__, data->learning->filename_save_weight); 01699 (void) xmlFree(val); 01700 } 01701 else { 01702 (void) fprintf(stderr, "%s: Missing learning filename_save_weight setting. Aborting.\n", __FILE__); 01703 (void) xmlFree(val); 01704 return -1; 01705 } 01706 01708 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_save_learn"); 01709 val = xml_get_setting(conf, path); 01710 if (val != NULL) { 01711 data->learning->filename_save_learn = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01712 if (data->learning->filename_save_learn == NULL) alloc_error(__FILE__, __LINE__); 01713 (void) strcpy(data->learning->filename_save_learn, (char *) val); 01714 (void) fprintf(stdout, "%s: Learning filename_save_learn = %s\n", __FILE__, data->learning->filename_save_learn); 01715 (void) xmlFree(val); 01716 } 01717 else { 01718 (void) fprintf(stderr, "%s: Missing learning filename_save_learn setting. Aborting.\n", __FILE__); 01719 (void) xmlFree(val); 01720 return -1; 01721 } 01722 01724 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_save_clust_learn"); 01725 val = xml_get_setting(conf, path); 01726 if (val != NULL) { 01727 data->learning->filename_save_clust_learn = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01728 if (data->learning->filename_save_clust_learn == NULL) alloc_error(__FILE__, __LINE__); 01729 (void) strcpy(data->learning->filename_save_clust_learn, (char *) val); 01730 (void) fprintf(stdout, "%s: Learning filename_save_clust_learn = %s\n", __FILE__, data->learning->filename_save_clust_learn); 01731 (void) xmlFree(val); 01732 } 01733 else { 01734 (void) fprintf(stderr, "%s: Missing learning filename_save_clust_learn setting. Aborting.\n", __FILE__); 01735 (void) xmlFree(val); 01736 return -1; 01737 } 01738 } 01739 01740 /* If learning data is provided, additional parameters are needed */ 01741 if (data->learning->learning_provided == TRUE) { 01742 01744 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_open_weight"); 01745 val = xml_get_setting(conf, path); 01746 if (val != NULL) { 01747 data->learning->filename_open_weight = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01748 if (data->learning->filename_open_weight == NULL) alloc_error(__FILE__, __LINE__); 01749 (void) strcpy(data->learning->filename_open_weight, (char *) val); 01750 (void) fprintf(stdout, "%s: Learning filename_open_weight = %s\n", __FILE__, data->learning->filename_open_weight); 01751 (void) xmlFree(val); 01752 } 01753 else { 01754 (void) fprintf(stderr, "%s: Missing learning filename_open_weight setting. Aborting.\n", __FILE__); 01755 (void) xmlFree(val); 01756 return -1; 01757 } 01758 01760 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_open_learn"); 01761 val = xml_get_setting(conf, path); 01762 if (val != NULL) { 01763 data->learning->filename_open_learn = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01764 if (data->learning->filename_open_learn == NULL) alloc_error(__FILE__, __LINE__); 01765 (void) strcpy(data->learning->filename_open_learn, (char *) val); 01766 (void) fprintf(stdout, "%s: Learning filename_open_learn = %s\n", __FILE__, data->learning->filename_open_learn); 01767 (void) xmlFree(val); 01768 } 01769 else { 01770 (void) fprintf(stderr, "%s: Missing learning filename_open_learn setting. Aborting.\n", __FILE__); 01771 (void) xmlFree(val); 01772 return -1; 01773 } 01774 01776 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_open_clust_learn"); 01777 val = xml_get_setting(conf, path); 01778 if (val != NULL) { 01779 data->learning->filename_open_clust_learn = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01780 if (data->learning->filename_open_clust_learn == NULL) alloc_error(__FILE__, __LINE__); 01781 (void) strcpy(data->learning->filename_open_clust_learn, (char *) val); 01782 (void) fprintf(stdout, "%s: Learning filename_open_clust_learn = %s\n", __FILE__, data->learning->filename_open_clust_learn); 01783 (void) xmlFree(val); 01784 } 01785 else { 01786 (void) fprintf(stderr, "%s: Missing learning filename_open_clust_learn setting. Aborting.\n", __FILE__); 01787 (void) xmlFree(val); 01788 return -1; 01789 } 01790 } 01791 else { 01792 01793 data->learning->obs = (learning_eof_struct *) malloc(sizeof(learning_eof_struct)); 01794 if (data->learning->obs == NULL) alloc_error(__FILE__, __LINE__); 01795 data->learning->rea = (learning_eof_struct *) malloc(sizeof(learning_eof_struct)); 01796 if (data->learning->rea == NULL) alloc_error(__FILE__, __LINE__); 01797 01798 data->learning->obs->time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 01799 if (data->learning->obs->time_s == NULL) alloc_error(__FILE__, __LINE__); 01800 data->learning->rea->time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 01801 if (data->learning->rea->time_s == NULL) alloc_error(__FILE__, __LINE__); 01802 01804 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_obs_eof"); 01805 val = xml_get_setting(conf, path); 01806 if (val != NULL) { 01807 data->learning->obs->filename_eof = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01808 if (data->learning->obs->filename_eof == NULL) alloc_error(__FILE__, __LINE__); 01809 (void) strcpy(data->learning->obs->filename_eof, (char *) val); 01810 (void) fprintf(stdout, "%s: Learning filename_obs_eof = %s\n", __FILE__, data->learning->obs->filename_eof); 01811 (void) xmlFree(val); 01812 } 01813 else { 01814 (void) fprintf(stderr, "%s: Missing learning filename_obs_eof setting. Aborting.\n", __FILE__); 01815 (void) xmlFree(val); 01816 return -1; 01817 } 01818 01820 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_rea_eof"); 01821 val = xml_get_setting(conf, path); 01822 if (val != NULL) { 01823 data->learning->rea->filename_eof = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01824 if (data->learning->rea->filename_eof == NULL) alloc_error(__FILE__, __LINE__); 01825 (void) strcpy(data->learning->rea->filename_eof, (char *) val); 01826 (void) fprintf(stdout, "%s: Learning filename_rea_eof = %s\n", __FILE__, data->learning->rea->filename_eof); 01827 (void) xmlFree(val); 01828 } 01829 else { 01830 (void) fprintf(stderr, "%s: Missing learning filename_rea_eof setting. Aborting.\n", __FILE__); 01831 (void) xmlFree(val); 01832 return -1; 01833 } 01834 01836 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "filename_rea_sup"); 01837 val = xml_get_setting(conf, path); 01838 if (val != NULL) { 01839 data->learning->filename_rea_sup = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01840 if (data->learning->filename_rea_sup == NULL) alloc_error(__FILE__, __LINE__); 01841 (void) strcpy(data->learning->filename_rea_sup, (char *) val); 01842 (void) fprintf(stdout, "%s: Learning filename_rea_sup = %s\n", __FILE__, data->learning->filename_rea_sup); 01843 (void) xmlFree(val); 01844 } 01845 else { 01846 (void) fprintf(stderr, "%s: Missing learning filename_rea_sup setting. Aborting.\n", __FILE__); 01847 (void) xmlFree(val); 01848 return -1; 01849 } 01850 01852 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_obs_eof"); 01853 val = xml_get_setting(conf, path); 01854 if (val != NULL) { 01855 data->learning->obs->nomvar_eof = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01856 if (data->learning->obs->nomvar_eof == NULL) alloc_error(__FILE__, __LINE__); 01857 (void) strcpy(data->learning->obs->nomvar_eof, (char *) val); 01858 (void) xmlFree(val); 01859 } 01860 else { 01861 data->learning->obs->nomvar_eof = strdup("pre_pc"); 01862 } 01863 (void) fprintf(stdout, "%s: Learning nomvar_eof = %s\n", __FILE__, data->learning->obs->nomvar_eof); 01864 01866 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_rea_eof"); 01867 val = xml_get_setting(conf, path); 01868 if (val != NULL) { 01869 data->learning->rea->nomvar_eof = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01870 if (data->learning->rea->nomvar_eof == NULL) alloc_error(__FILE__, __LINE__); 01871 (void) strcpy(data->learning->rea->nomvar_eof, (char *) val); 01872 (void) xmlFree(val); 01873 } 01874 else { 01875 data->learning->rea->nomvar_eof = strdup("psl_pc"); 01876 } 01877 (void) fprintf(stdout, "%s: Learning nomvar_eof = %s\n", __FILE__, data->learning->obs->nomvar_eof); 01878 01880 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_obs_sing"); 01881 val = xml_get_setting(conf, path); 01882 if (val != NULL) { 01883 data->learning->obs->nomvar_sing = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01884 if (data->learning->obs->nomvar_sing == NULL) alloc_error(__FILE__, __LINE__); 01885 (void) strcpy(data->learning->obs->nomvar_sing, (char *) val); 01886 (void) xmlFree(val); 01887 } 01888 else 01889 data->learning->obs->nomvar_sing = strdup("pre_sing"); 01890 (void) fprintf(stdout, "%s: Learning nomvar_obs_sing = %s\n", __FILE__, data->learning->obs->nomvar_sing); 01891 01893 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_rea_sing"); 01894 val = xml_get_setting(conf, path); 01895 if (val != NULL) { 01896 data->learning->rea->nomvar_sing = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01897 if (data->learning->rea->nomvar_sing == NULL) alloc_error(__FILE__, __LINE__); 01898 (void) strcpy(data->learning->rea->nomvar_sing, (char *) val); 01899 (void) xmlFree(val); 01900 } 01901 else 01902 data->learning->rea->nomvar_sing = strdup("pre_sing"); 01903 (void) fprintf(stdout, "%s: Learning nomvar_rea_sing = %s\n", __FILE__, data->learning->rea->nomvar_sing); 01904 01906 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_rea_sup"); 01907 val = xml_get_setting(conf, path); 01908 if (val != NULL) { 01909 data->learning->nomvar_rea_sup = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01910 if (data->learning->nomvar_rea_sup == NULL) alloc_error(__FILE__, __LINE__); 01911 (void) strcpy(data->learning->nomvar_rea_sup, (char *) val); 01912 (void) xmlFree(val); 01913 } 01914 else { 01915 data->learning->nomvar_rea_sup = strdup("tas"); 01916 } 01917 (void) fprintf(stdout, "%s: Learning nomvar_sup = %s\n", __FILE__, data->learning->nomvar_rea_sup); 01918 01920 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "rea_coords"); 01921 val = xml_get_setting(conf, path); 01922 if (val != NULL) { 01923 data->learning->rea_coords = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01924 if (data->learning->rea_coords == NULL) alloc_error(__FILE__, __LINE__); 01925 (void) strcpy(data->learning->rea_coords, (char *) val); 01926 (void) xmlFree(val); 01927 } 01928 else { 01929 data->learning->rea_coords = strdup("1D"); 01930 } 01931 (void) fprintf(stdout, "%s: Learning rea_coords = %s\n", __FILE__, data->learning->rea_coords); 01932 01934 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "rea_gridname"); 01935 val = xml_get_setting(conf, path); 01936 if (val != NULL) { 01937 data->learning->rea_gridname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01938 if (data->learning->rea_gridname == NULL) alloc_error(__FILE__, __LINE__); 01939 (void) strcpy(data->learning->rea_gridname, (char *) val); 01940 (void) xmlFree(val); 01941 } 01942 else { 01943 data->learning->rea_gridname = strdup("Latitude_Longitude"); 01944 } 01945 (void) fprintf(stdout, "%s: Learning rea_gridname = %s\n", __FILE__, data->learning->rea_gridname); 01946 01948 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "rea_dimx_name"); 01949 val = xml_get_setting(conf, path); 01950 if (val != NULL) { 01951 data->learning->rea_dimxname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01952 if (data->learning->rea_dimxname == NULL) alloc_error(__FILE__, __LINE__); 01953 (void) strcpy(data->learning->rea_dimxname, (char *) val); 01954 (void) xmlFree(val); 01955 } 01956 else { 01957 data->learning->rea_dimxname = strdup("lon"); 01958 } 01959 (void) fprintf(stdout, "%s: Learning rea_dimx_name = %s\n", __FILE__, data->learning->rea_dimxname); 01960 01962 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "rea_dimy_name"); 01963 val = xml_get_setting(conf, path); 01964 if (val != NULL) { 01965 data->learning->rea_dimyname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01966 if (data->learning->rea_dimyname == NULL) alloc_error(__FILE__, __LINE__); 01967 (void) strcpy(data->learning->rea_dimyname, (char *) val); 01968 (void) xmlFree(val); 01969 } 01970 else { 01971 data->learning->rea_dimyname = strdup("lat"); 01972 } 01973 (void) fprintf(stdout, "%s: Learning rea_latitude_name = %s\n", __FILE__, data->learning->rea_dimyname); 01974 01976 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "rea_longitude_name"); 01977 val = xml_get_setting(conf, path); 01978 if (val != NULL) { 01979 data->learning->rea_lonname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01980 if (data->learning->rea_lonname == NULL) alloc_error(__FILE__, __LINE__); 01981 (void) strcpy(data->learning->rea_lonname, (char *) val); 01982 (void) xmlFree(val); 01983 } 01984 else { 01985 data->learning->rea_lonname = strdup("lon"); 01986 } 01987 (void) fprintf(stdout, "%s: Learning rea_longitude_name = %s\n", __FILE__, data->learning->rea_lonname); 01988 01990 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "rea_latitude_name"); 01991 val = xml_get_setting(conf, path); 01992 if (val != NULL) { 01993 data->learning->rea_latname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 01994 if (data->learning->rea_latname == NULL) alloc_error(__FILE__, __LINE__); 01995 (void) strcpy(data->learning->rea_latname, (char *) val); 01996 (void) xmlFree(val); 01997 } 01998 else { 01999 data->learning->rea_latname = strdup("lat"); 02000 } 02001 (void) fprintf(stdout, "%s: Learning rea_latitude_name = %s\n", __FILE__, data->learning->rea_latname); 02002 02004 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "rea_time_name"); 02005 val = xml_get_setting(conf, path); 02006 if (val != NULL) { 02007 data->learning->rea_timename = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02008 if (data->learning->rea_timename == NULL) alloc_error(__FILE__, __LINE__); 02009 (void) strcpy(data->learning->rea_timename, (char *) val); 02010 (void) xmlFree(val); 02011 } 02012 else { 02013 data->learning->rea_timename = strdup("time"); 02014 } 02015 (void) fprintf(stdout, "%s: Learning rea_time_name = %s\n", __FILE__, data->learning->rea_timename); 02016 02018 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "obs_dimx_name"); 02019 val = xml_get_setting(conf, path); 02020 if (val != NULL) { 02021 data->learning->obs_dimxname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02022 if (data->learning->obs_dimxname == NULL) alloc_error(__FILE__, __LINE__); 02023 (void) strcpy(data->learning->obs_dimxname, (char *) val); 02024 (void) xmlFree(val); 02025 } 02026 else { 02027 data->learning->obs_dimxname = strdup("lon"); 02028 } 02029 (void) fprintf(stdout, "%s: Learning obs_dimx_name = %s\n", __FILE__, data->learning->obs_dimxname); 02030 02032 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "obs_dimy_name"); 02033 val = xml_get_setting(conf, path); 02034 if (val != NULL) { 02035 data->learning->obs_dimyname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02036 if (data->learning->obs_dimyname == NULL) alloc_error(__FILE__, __LINE__); 02037 (void) strcpy(data->learning->obs_dimyname, (char *) val); 02038 (void) xmlFree(val); 02039 } 02040 else { 02041 data->learning->obs_dimyname = strdup("lat"); 02042 } 02043 (void) fprintf(stdout, "%s: Learning obs_dimy_name = %s\n", __FILE__, data->learning->obs_dimyname); 02044 02046 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "obs_longitude_name"); 02047 val = xml_get_setting(conf, path); 02048 if (val != NULL) { 02049 data->learning->obs_lonname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02050 if (data->learning->obs_lonname == NULL) alloc_error(__FILE__, __LINE__); 02051 (void) strcpy(data->learning->obs_lonname, (char *) val); 02052 (void) xmlFree(val); 02053 } 02054 else { 02055 data->learning->obs_lonname = strdup("lon"); 02056 } 02057 (void) fprintf(stdout, "%s: Learning obs_longitude_name = %s\n", __FILE__, data->learning->obs_lonname); 02058 02060 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "obs_latitude_name"); 02061 val = xml_get_setting(conf, path); 02062 if (val != NULL) { 02063 data->learning->obs_latname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02064 if (data->learning->obs_latname == NULL) alloc_error(__FILE__, __LINE__); 02065 (void) strcpy(data->learning->obs_latname, (char *) val); 02066 (void) xmlFree(val); 02067 } 02068 else { 02069 data->learning->obs_latname = strdup("lat"); 02070 } 02071 (void) fprintf(stdout, "%s: Learning obs_latitude_name = %s\n", __FILE__, data->learning->obs_latname); 02072 02074 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "obs_time_name"); 02075 val = xml_get_setting(conf, path); 02076 if (val != NULL) { 02077 data->learning->obs_timename = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02078 if (data->learning->obs_timename == NULL) alloc_error(__FILE__, __LINE__); 02079 (void) strcpy(data->learning->obs_timename, (char *) val); 02080 (void) xmlFree(val); 02081 } 02082 else { 02083 data->learning->obs_timename = strdup("time"); 02084 } 02085 (void) fprintf(stdout, "%s: Learning obs_time_name = %s\n", __FILE__, data->learning->obs_timename); 02086 02088 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "obs_eof_name"); 02089 val = xml_get_setting(conf, path); 02090 if (val != NULL) { 02091 data->learning->obs_eofname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02092 if (data->learning->obs_eofname == NULL) alloc_error(__FILE__, __LINE__); 02093 (void) strcpy(data->learning->obs_eofname, (char *) val); 02094 (void) xmlFree(val); 02095 } 02096 else { 02097 data->learning->obs_eofname = strdup("eof"); 02098 } 02099 (void) fprintf(stdout, "%s: Learning obs_eof_name = %s\n", __FILE__, data->learning->obs_eofname); 02100 } 02101 02103 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "sup_lonname"); 02104 val = xml_get_setting(conf, path); 02105 if (val != NULL) { 02106 data->learning->sup_lonname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02107 if (data->learning->sup_lonname == NULL) alloc_error(__FILE__, __LINE__); 02108 (void) strcpy(data->learning->sup_lonname, (char *) val); 02109 (void) xmlFree(val); 02110 } 02111 else 02112 data->learning->sup_lonname = strdup("lon"); 02113 (void) fprintf(stdout, "%s: Learning sup_lonname = %s\n", __FILE__, data->learning->sup_lonname); 02114 02116 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "sup_latname"); 02117 val = xml_get_setting(conf, path); 02118 if (val != NULL) { 02119 data->learning->sup_latname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02120 if (data->learning->sup_latname == NULL) alloc_error(__FILE__, __LINE__); 02121 (void) strcpy(data->learning->sup_latname, (char *) val); 02122 (void) xmlFree(val); 02123 } 02124 else 02125 data->learning->sup_latname = strdup("lat"); 02126 (void) fprintf(stdout, "%s: Learning sup_latname = %s\n", __FILE__, data->learning->sup_latname); 02127 02129 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_time"); 02130 val = xml_get_setting(conf, path); 02131 if (val != NULL) { 02132 data->learning->nomvar_time = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02133 if (data->learning->nomvar_time == NULL) alloc_error(__FILE__, __LINE__); 02134 (void) strcpy(data->learning->nomvar_time, (char *) val); 02135 (void) xmlFree(val); 02136 } 02137 else 02138 data->learning->nomvar_time = strdup("time"); 02139 (void) fprintf(stdout, "%s: Learning nomvar_time = %s\n", __FILE__, data->learning->nomvar_time); 02140 02142 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_weight"); 02143 val = xml_get_setting(conf, path); 02144 if (val != NULL) { 02145 data->learning->nomvar_weight = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02146 if (data->learning->nomvar_weight == NULL) alloc_error(__FILE__, __LINE__); 02147 (void) strcpy(data->learning->nomvar_weight, (char *) val); 02148 (void) xmlFree(val); 02149 } 02150 else 02151 data->learning->nomvar_weight = strdup("poid"); 02152 (void) fprintf(stdout, "%s: Learning nomvar_weight = %s\n", __FILE__, data->learning->nomvar_weight); 02153 02155 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_class_clusters"); 02156 val = xml_get_setting(conf, path); 02157 if (val != NULL) { 02158 data->learning->nomvar_class_clusters = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02159 if (data->learning->nomvar_class_clusters == NULL) alloc_error(__FILE__, __LINE__); 02160 (void) strcpy(data->learning->nomvar_class_clusters, (char *) val); 02161 (void) xmlFree(val); 02162 } 02163 else 02164 data->learning->nomvar_class_clusters = strdup("clust_learn"); 02165 (void) fprintf(stdout, "%s: Learning nomvar_class_clusters = %s\n", __FILE__, data->learning->nomvar_class_clusters); 02166 02168 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_reg"); 02169 val = xml_get_setting(conf, path); 02170 if (val != NULL) { 02171 data->learning->nomvar_precip_reg = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02172 if (data->learning->nomvar_precip_reg == NULL) alloc_error(__FILE__, __LINE__); 02173 (void) strcpy(data->learning->nomvar_precip_reg, (char *) val); 02174 (void) xmlFree(val); 02175 } 02176 else 02177 data->learning->nomvar_precip_reg = strdup("reg"); 02178 (void) fprintf(stdout, "%s: Learning nomvar_precip_reg = %s\n", __FILE__, data->learning->nomvar_precip_reg); 02179 02181 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_reg_cst"); 02182 val = xml_get_setting(conf, path); 02183 if (val != NULL) { 02184 data->learning->nomvar_precip_reg_cst = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02185 if (data->learning->nomvar_precip_reg_cst == NULL) alloc_error(__FILE__, __LINE__); 02186 (void) strcpy(data->learning->nomvar_precip_reg_cst, (char *) val); 02187 (void) xmlFree(val); 02188 } 02189 else 02190 data->learning->nomvar_precip_reg_cst = strdup("cst"); 02191 (void) fprintf(stdout, "%s: Learning nomvar_precip_reg_cst = %s\n", __FILE__, data->learning->nomvar_precip_reg_cst); 02192 02194 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_reg_rsq"); 02195 val = xml_get_setting(conf, path); 02196 if (val != NULL) { 02197 data->learning->nomvar_precip_reg_rsq = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02198 if (data->learning->nomvar_precip_reg_rsq == NULL) alloc_error(__FILE__, __LINE__); 02199 (void) strcpy(data->learning->nomvar_precip_reg_rsq, (char *) val); 02200 (void) xmlFree(val); 02201 } 02202 else 02203 data->learning->nomvar_precip_reg_rsq = strdup("rsquare"); 02204 (void) fprintf(stdout, "%s: Learning nomvar_precip_reg_rsq = %s\n", __FILE__, data->learning->nomvar_precip_reg_rsq); 02205 02207 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_reg_acor"); 02208 val = xml_get_setting(conf, path); 02209 if (val != NULL) { 02210 data->learning->nomvar_precip_reg_acor = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02211 if (data->learning->nomvar_precip_reg_acor == NULL) alloc_error(__FILE__, __LINE__); 02212 (void) strcpy(data->learning->nomvar_precip_reg_acor, (char *) val); 02213 (void) xmlFree(val); 02214 } 02215 else 02216 data->learning->nomvar_precip_reg_acor = strdup("autocor"); 02217 (void) fprintf(stdout, "%s: Learning nomvar_precip_reg_acor = %s\n", __FILE__, data->learning->nomvar_precip_reg_acor); 02218 02220 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_reg_vif"); 02221 val = xml_get_setting(conf, path); 02222 if (val != NULL) { 02223 data->learning->nomvar_precip_reg_vif = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02224 if (data->learning->nomvar_precip_reg_vif == NULL) alloc_error(__FILE__, __LINE__); 02225 (void) strcpy(data->learning->nomvar_precip_reg_vif, (char *) val); 02226 (void) xmlFree(val); 02227 } 02228 else 02229 data->learning->nomvar_precip_reg_vif = strdup("vif"); 02230 (void) fprintf(stdout, "%s: Learning nomvar_precip_reg_vif = %s\n", __FILE__, data->learning->nomvar_precip_reg_vif); 02231 02233 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_reg_dist"); 02234 val = xml_get_setting(conf, path); 02235 if (val != NULL) { 02236 data->learning->nomvar_precip_reg_dist = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02237 if (data->learning->nomvar_precip_reg_dist == NULL) alloc_error(__FILE__, __LINE__); 02238 (void) strcpy(data->learning->nomvar_precip_reg_dist, (char *) val); 02239 (void) xmlFree(val); 02240 } 02241 else 02242 data->learning->nomvar_precip_reg_dist = strdup("dist"); 02243 (void) fprintf(stdout, "%s: Learning nomvar_precip_reg_dist = %s\n", __FILE__, data->learning->nomvar_precip_reg_dist); 02244 02246 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_reg_err"); 02247 val = xml_get_setting(conf, path); 02248 if (val != NULL) { 02249 data->learning->nomvar_precip_reg_err = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02250 if (data->learning->nomvar_precip_reg_err == NULL) alloc_error(__FILE__, __LINE__); 02251 (void) strcpy(data->learning->nomvar_precip_reg_err, (char *) val); 02252 (void) xmlFree(val); 02253 } 02254 else 02255 data->learning->nomvar_precip_reg_err = strdup("err"); 02256 (void) fprintf(stdout, "%s: Learning nomvar_precip_reg_err = %s\n", __FILE__, data->learning->nomvar_precip_reg_err); 02257 02259 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_index"); 02260 val = xml_get_setting(conf, path); 02261 if (val != NULL) { 02262 data->learning->nomvar_precip_index = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02263 if (data->learning->nomvar_precip_index == NULL) alloc_error(__FILE__, __LINE__); 02264 (void) strcpy(data->learning->nomvar_precip_index, (char *) val); 02265 (void) xmlFree(val); 02266 } 02267 else 02268 data->learning->nomvar_precip_index = strdup("rrd"); 02269 (void) fprintf(stdout, "%s: Learning nomvar_precip_index = %s\n", __FILE__, data->learning->nomvar_precip_index); 02270 02272 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_precip_index_obs"); 02273 val = xml_get_setting(conf, path); 02274 if (val != NULL) { 02275 data->learning->nomvar_precip_index_obs = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02276 if (data->learning->nomvar_precip_index_obs == NULL) alloc_error(__FILE__, __LINE__); 02277 (void) strcpy(data->learning->nomvar_precip_index_obs, (char *) val); 02278 (void) xmlFree(val); 02279 } 02280 else 02281 data->learning->nomvar_precip_index_obs = strdup("rro"); 02282 (void) fprintf(stdout, "%s: Learning nomvar_precip_index_obs = %s\n", __FILE__, data->learning->nomvar_precip_index_obs); 02283 02285 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_sup_index"); 02286 val = xml_get_setting(conf, path); 02287 if (val != NULL) { 02288 data->learning->nomvar_sup_index = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02289 if (data->learning->nomvar_sup_index == NULL) alloc_error(__FILE__, __LINE__); 02290 (void) strcpy(data->learning->nomvar_sup_index, (char *) val); 02291 (void) xmlFree(val); 02292 } 02293 else 02294 data->learning->nomvar_sup_index = strdup("ta"); 02295 (void) fprintf(stdout, "%s: Learning nomvar_sup_index = %s\n", __FILE__, data->learning->nomvar_sup_index); 02296 02298 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_sup_val"); 02299 val = xml_get_setting(conf, path); 02300 if (val != NULL) { 02301 data->learning->nomvar_sup_val = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02302 if (data->learning->nomvar_sup_val == NULL) alloc_error(__FILE__, __LINE__); 02303 (void) strcpy(data->learning->nomvar_sup_val, (char *) val); 02304 (void) xmlFree(val); 02305 } 02306 else 02307 data->learning->nomvar_sup_val = strdup("tad"); 02308 (void) fprintf(stdout, "%s: Learning nomvar_sup_val = %s\n", __FILE__, data->learning->nomvar_sup_val); 02309 02311 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_sup_index_mean"); 02312 val = xml_get_setting(conf, path); 02313 if (val != NULL) { 02314 data->learning->nomvar_sup_index_mean = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02315 if (data->learning->nomvar_sup_index_mean == NULL) alloc_error(__FILE__, __LINE__); 02316 (void) strcpy(data->learning->nomvar_sup_index_mean, (char *) val); 02317 (void) xmlFree(val); 02318 } 02319 else 02320 data->learning->nomvar_sup_index_mean = strdup("tancp_mean"); 02321 (void) fprintf(stdout, "%s: Learning nomvar_sup_index_mean = %s\n", __FILE__, data->learning->nomvar_sup_index_mean); 02322 02324 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_sup_index_var"); 02325 val = xml_get_setting(conf, path); 02326 if (val != NULL) { 02327 data->learning->nomvar_sup_index_var = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02328 if (data->learning->nomvar_sup_index_var == NULL) alloc_error(__FILE__, __LINE__); 02329 (void) strcpy(data->learning->nomvar_sup_index_var, (char *) val); 02330 (void) xmlFree(val); 02331 } 02332 else 02333 data->learning->nomvar_sup_index_var = strdup("tancp_var"); 02334 (void) fprintf(stdout, "%s: Learning nomvar_sup_index_var = %s\n", __FILE__, data->learning->nomvar_sup_index_var); 02335 02337 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "learning", "nomvar_pc_normalized_var"); 02338 val = xml_get_setting(conf, path); 02339 if (val != NULL) { 02340 data->learning->nomvar_pc_normalized_var = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02341 if (data->learning->nomvar_pc_normalized_var == NULL) alloc_error(__FILE__, __LINE__); 02342 (void) strcpy(data->learning->nomvar_pc_normalized_var, (char *) val); 02343 (void) xmlFree(val); 02344 } 02345 else 02346 data->learning->nomvar_pc_normalized_var = strdup("eca_pc_learn"); 02347 (void) fprintf(stdout, "%s: Learning nomvar_pc_normalized_var = %s\n", __FILE__, data->learning->nomvar_pc_normalized_var); 02348 02349 02350 /**** REGRESSION CONFIGURATION ****/ 02352 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "filename"); 02353 val = xml_get_setting(conf, path); 02354 if (val != NULL) { 02355 data->reg->filename = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02356 if (data->reg->filename == NULL) alloc_error(__FILE__, __LINE__); 02357 (void) strcpy(data->reg->filename, (char *) val); 02358 (void) fprintf(stdout, "%s: Regression points filename = %s\n", __FILE__, data->reg->filename); 02359 (void) xmlFree(val); 02360 02362 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "dimx_name"); 02363 val = xml_get_setting(conf, path); 02364 if (val != NULL) { 02365 data->reg->dimxname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02366 if (data->reg->dimxname == NULL) alloc_error(__FILE__, __LINE__); 02367 (void) strcpy(data->reg->dimxname, (char *) val); 02368 (void) fprintf(stdout, "%s: Regression points dimx_name = %s\n", __FILE__, data->reg->dimxname); 02369 (void) xmlFree(val); 02370 } 02371 else { 02372 data->reg->dimxname = strdup("lon"); 02373 (void) fprintf(stderr, "%s: Default regression points dimx_name setting = %s.\n", __FILE__, data->reg->dimxname); 02374 (void) xmlFree(val); 02375 } 02377 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "dimy_name"); 02378 val = xml_get_setting(conf, path); 02379 if (val != NULL) { 02380 data->reg->dimyname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02381 if (data->reg->dimyname == NULL) alloc_error(__FILE__, __LINE__); 02382 (void) strcpy(data->reg->dimyname, (char *) val); 02383 (void) fprintf(stdout, "%s: Regression points dimy_name = %s\n", __FILE__, data->reg->dimyname); 02384 (void) xmlFree(val); 02385 } 02386 else { 02387 data->reg->dimyname = strdup("dimy"); 02388 (void) fprintf(stderr, "%s: Default regression points dimy_name setting = %s.\n", __FILE__, data->reg->dimyname); 02389 (void) xmlFree(val); 02390 } 02392 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "longitude_name"); 02393 val = xml_get_setting(conf, path); 02394 if (val != NULL) { 02395 data->reg->lonname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02396 if (data->reg->lonname == NULL) alloc_error(__FILE__, __LINE__); 02397 (void) strcpy(data->reg->lonname, (char *) val); 02398 (void) fprintf(stdout, "%s: Regression points longitude_name = %s\n", __FILE__, data->reg->lonname); 02399 (void) xmlFree(val); 02400 } 02401 else { 02402 data->reg->lonname = strdup("lon"); 02403 (void) fprintf(stderr, "%s: Default regression points longitude_name setting = %s.\n", __FILE__, data->reg->lonname); 02404 (void) xmlFree(val); 02405 } 02407 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "latitude_name"); 02408 val = xml_get_setting(conf, path); 02409 if (val != NULL) { 02410 data->reg->latname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02411 if (data->reg->latname == NULL) alloc_error(__FILE__, __LINE__); 02412 (void) strcpy(data->reg->latname, (char *) val); 02413 (void) fprintf(stdout, "%s: Regression points latitude_name = %s\n", __FILE__, data->reg->latname); 02414 (void) xmlFree(val); 02415 } 02416 else { 02417 data->reg->latname = strdup("lat"); 02418 (void) fprintf(stderr, "%s: Default regression points latitude_name setting = %s.\n", __FILE__, data->reg->latname); 02419 (void) xmlFree(val); 02420 } 02422 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "pts_name"); 02423 val = xml_get_setting(conf, path); 02424 if (val != NULL) { 02425 data->reg->ptsname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02426 if (data->reg->ptsname == NULL) alloc_error(__FILE__, __LINE__); 02427 (void) strcpy(data->reg->ptsname, (char *) val); 02428 (void) fprintf(stdout, "%s: Regression points pts_name = %s\n", __FILE__, data->reg->ptsname); 02429 (void) xmlFree(val); 02430 } 02431 else { 02432 data->reg->ptsname = strdup("pts"); 02433 (void) fprintf(stderr, "%s: Default regression points pts_name setting = %s.\n", __FILE__, data->reg->ptsname); 02434 (void) xmlFree(val); 02435 } 02437 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "distance"); 02438 val = xml_get_setting(conf, path); 02439 if (val != NULL) { 02440 data->reg->dist = xmlXPathCastStringToNumber(val); 02441 (void) xmlFree(val); 02442 (void) fprintf(stdout, "%s: Regression distance in meters for spatial mean = %lf\n", __FILE__, data->reg->dist); 02443 } 02444 else { 02445 data->reg->dist = 40000.0; 02446 (void) fprintf(stdout, "%s: Regression distance in meters for spatial mean = %lf.\n", __FILE__, data->reg->dist); 02447 } 02449 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "regression_save"); 02450 val = xml_get_setting(conf, path); 02451 if (val != NULL) 02452 data->reg->reg_save = (int) strtol((char *) val, (char **)NULL, 10); 02453 else 02454 data->reg->reg_save = FALSE; 02455 if (data->reg->reg_save != FALSE && data->reg->reg_save != TRUE) { 02456 (void) fprintf(stderr, "%s: Invalid regression_save value %s in configuration file. Aborting.\n", __FILE__, val); 02457 return -1; 02458 } 02459 (void) fprintf(stdout, "%s: regression_save=%d\n", __FILE__, data->reg->reg_save); 02460 if (val != NULL) 02461 (void) xmlFree(val); 02462 } 02463 else { 02464 (void) fprintf(stderr, "%s: No regression points. Cannot perform learning or downscale. Can just output data given analog days.\n", 02465 __FILE__); 02466 data->reg->filename = NULL; 02467 (void) xmlFree(val); 02468 } 02469 02470 /* If regression data is saved, additional parameters are needed */ 02471 if (data->reg->reg_save == TRUE) { 02473 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "filename_save_ctrl_reg"); 02474 val = xml_get_setting(conf, path); 02475 if (val != NULL) { 02476 data->reg->filename_save_ctrl_reg = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02477 if (data->reg->filename_save_ctrl_reg == NULL) alloc_error(__FILE__, __LINE__); 02478 (void) strcpy(data->reg->filename_save_ctrl_reg, (char *) val); 02479 (void) fprintf(stdout, "%s: Regression filename_save_ctrl_reg = %s\n", __FILE__, data->reg->filename_save_ctrl_reg); 02480 (void) xmlFree(val); 02481 } 02482 else { 02483 (void) fprintf(stderr, "%s: Missing regression filename_save_ctrl_reg setting. Aborting.\n", __FILE__); 02484 (void) xmlFree(val); 02485 return -1; 02486 } 02488 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "filename_save_other_reg"); 02489 val = xml_get_setting(conf, path); 02490 if (val != NULL) { 02491 data->reg->filename_save_other_reg = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02492 if (data->reg->filename_save_other_reg == NULL) alloc_error(__FILE__, __LINE__); 02493 (void) strcpy(data->reg->filename_save_other_reg, (char *) val); 02494 (void) fprintf(stdout, "%s: Regression filename_save_other_reg = %s\n", __FILE__, data->reg->filename_save_other_reg); 02495 (void) xmlFree(val); 02496 } 02497 else { 02498 (void) fprintf(stderr, "%s: Missing regression filename_save_other_reg setting. Aborting.\n", __FILE__); 02499 (void) xmlFree(val); 02500 return -1; 02501 } 02502 02504 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "regression", "time_name"); 02505 val = xml_get_setting(conf, path); 02506 if (val != NULL) { 02507 data->reg->timename = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02508 if (data->reg->timename == NULL) alloc_error(__FILE__, __LINE__); 02509 (void) strcpy(data->reg->timename, (char *) val); 02510 (void) xmlFree(val); 02511 } 02512 else 02513 data->reg->timename = strdup("time"); 02514 (void) fprintf(stdout, "%s: Regression time dimension name = %s\n", __FILE__, data->reg->timename); 02515 } 02516 02517 /**** LARGE-SCALE FIELDS CONFIGURATION ****/ 02518 02520 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "number_of_large_scale_fields"); 02521 val = xml_get_setting(conf, path); 02522 if (val != NULL) 02523 data->field[0].n_ls = (int) strtol((char *) val, (char **)NULL, 10); 02524 else { 02525 (void) fprintf(stderr, "%s: Missing or invalid number_of_large_scale_fields setting. Aborting.\n", __FILE__); 02526 return -1; 02527 } 02528 if (val != NULL) 02529 (void) xmlFree(val); 02530 // if (data->field[0].n_ls == 0) { 02531 // (void) fprintf(stderr, "%s: number_of_large_scale_fields cannot be 0. Aborting.\n", __FILE__); 02532 // return -1; 02533 // } 02534 02536 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "number_of_large_scale_control_fields"); 02537 val = xml_get_setting(conf, path); 02538 if (val != NULL) 02539 data->field[1].n_ls = (int) strtol((char *) val, (char **)NULL, 10); 02540 else { 02541 (void) fprintf(stderr, "%s: Missing or invalid number_of_large_scale_control_fields setting. Aborting.\n", __FILE__); 02542 return -1; 02543 } 02544 if (val != NULL) 02545 (void) xmlFree(val); 02546 if (data->field[1].n_ls == 0) { 02547 (void) fprintf(stderr, "%s: number_of_large_scale_control_fields cannot be 0. Aborting.\n", __FILE__); 02548 return -1; 02549 } 02550 02552 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "number_of_secondary_large_scale_fields"); 02553 val = xml_get_setting(conf, path); 02554 if (val != NULL) 02555 data->field[2].n_ls = (int) strtol((char *) val, (char **)NULL, 10); 02556 else 02557 data->field[2].n_ls = 0; 02558 if (val != NULL) 02559 (void) xmlFree(val); 02560 if (data->field[2].n_ls == 0) { 02561 (void) fprintf(stderr, "%s: number_of_secondary_large_scale_fields cannot be 0. Aborting.\n", __FILE__); 02562 return -1; 02563 } 02564 02566 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "number_of_secondary_large_scale_control_fields"); 02567 val = xml_get_setting(conf, path); 02568 if (val != NULL) 02569 data->field[3].n_ls = (int) strtol((char *) val, (char **)NULL, 10); 02570 else { 02571 (void) fprintf(stderr, "%s: Missing or invalid number_of_secondary_large_scale_control_fields setting. Aborting.\n", __FILE__); 02572 return -1; 02573 } 02574 if (val != NULL) 02575 (void) xmlFree(val); 02576 if (data->field[3].n_ls == 0) { 02577 (void) fprintf(stderr, "%s: number_of_secondary_large_scale_control_fields cannot be 0. Aborting.\n", __FILE__); 02578 return -1; 02579 } 02580 02581 /* Loop over field categories */ 02582 for (i=0; i<NCAT; i++) { 02583 02584 /* Only process if at least one field defined */ 02585 if (data->field[i].n_ls > 0) { 02586 02587 if (data->field[i].n_ls > 1) { 02588 (void) fprintf(stderr, "%s: WARNING: only 1 large-scale field supported. Going back to one field and ignoring others in the configuration file!!!\n", __FILE__); 02589 data->field[i].n_ls = 1; 02590 } 02591 02592 /* Allocate appropriate memory for data structures for each large-scale field */ 02593 data->field[i].data = (field_data_struct *) malloc(data->field[i].n_ls * sizeof(field_data_struct)); 02594 if (data->field[i].data == NULL) alloc_error(__FILE__, __LINE__); 02595 data->field[i].proj = (proj_struct *) malloc(data->field[i].n_ls * sizeof(proj_struct)); 02596 if (data->field[i].proj == NULL) alloc_error(__FILE__, __LINE__); 02597 02598 for (j=0; j<data->field[i].n_ls; j++) { 02599 data->field[i].data[j].info = (info_field_struct *) malloc(sizeof(info_field_struct)); 02600 if (data->field[i].data[j].info == NULL) alloc_error(__FILE__, __LINE__); 02601 data->field[i].data[j].clim_info = (clim_info_struct *) malloc(sizeof(clim_info_struct)); 02602 if (data->field[i].data[j].clim_info == NULL) alloc_error(__FILE__, __LINE__); 02603 data->field[i].data[j].eof_info = (eof_info_struct *) malloc(sizeof(eof_info_struct)); 02604 if (data->field[i].data[j].eof_info == NULL) alloc_error(__FILE__, __LINE__); 02605 data->field[i].data[j].eof_info->info = (info_field_struct *) malloc(sizeof(info_field_struct)); 02606 if (data->field[i].data[j].eof_info->info == NULL) alloc_error(__FILE__, __LINE__); 02607 data->field[i].data[j].eof_data = (eof_data_struct *) malloc(sizeof(eof_data_struct)); 02608 if (data->field[i].data[j].eof_data == NULL) alloc_error(__FILE__, __LINE__); 02609 data->field[i].data[j].down = (downscale_struct *) malloc(sizeof(downscale_struct)); 02610 if (data->field[i].data[j].down == NULL) alloc_error(__FILE__, __LINE__); 02611 02612 data->field[i].proj[j].grid_mapping_name = NULL; 02613 data->field[i].proj[j].name = NULL; 02614 data->field[i].proj[j].coords = NULL; 02615 02616 data->field[i].data[j].field_ls = NULL; 02617 data->field[i].data[j].field_eof_ls = NULL; 02618 data->field[i].data[j].eof_data->eof_ls = NULL; 02619 data->field[i].data[j].eof_data->sing_ls = NULL; 02620 data->field[i].data[j].down->mean_dist = NULL; 02621 data->field[i].data[j].down->var_dist = NULL; 02622 } 02623 } 02624 } 02625 02626 /* Loop over field categories */ 02627 for (cat=0; cat<NCAT; cat++) { 02628 02629 /* Set strings */ 02630 if (cat == 0) { 02631 catstr = strdup("large_scale_fields"); 02632 catstrt = strdup("Large-scale fields"); 02633 } 02634 else if (cat == 1) { 02635 catstr = strdup("large_scale_control_fields"); 02636 catstrt = strdup("Large-scale control fields"); 02637 } 02638 else if (cat == 2) { 02639 catstr = strdup("secondary_large_scale_fields"); 02640 catstrt = strdup("Large-scale secondary fields"); 02641 } 02642 else if (cat == 3) { 02643 catstr = strdup("secondary_large_scale_control_fields"); 02644 catstrt = strdup("Large-scale secondary control fields"); 02645 } 02646 else { 02647 catstr = strdup("large_scale_fields"); 02648 catstrt = strdup("Large-scale fields"); 02649 } 02650 02651 /* Process only if at least one large-scale field defined */ 02652 if (data->field[cat].n_ls > 0) { 02653 02654 (void) fprintf(stdout, "%s: number_of_%s = %d\n", __FILE__, catstr, data->field[cat].n_ls); 02655 02656 /* Loop over large-scale fields */ 02657 /* Set filename and variable name strings */ 02658 for (i=0; i<data->field[cat].n_ls; i++) { 02659 02660 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "name", i+1); 02661 val = xml_get_setting(conf, path); 02662 if (val != NULL) { 02663 data->field[cat].data[i].nomvar_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02664 if (data->field[cat].data[i].nomvar_ls == NULL) alloc_error(__FILE__, __LINE__); 02665 (void) strcpy( data->field[cat].data[i].nomvar_ls, (char *) val); 02666 (void) xmlFree(val); 02667 } 02668 else { 02669 (void) fprintf(stderr, "%s: Missing name setting %s. Aborting.\n", __FILE__, catstrt); 02670 return -1; 02671 } 02672 02673 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "filename", i+1); 02674 val = xml_get_setting(conf, path); 02675 if (val != NULL) { 02676 data->field[cat].data[i].filename_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02677 if (data->field[cat].data[i].filename_ls == NULL) alloc_error(__FILE__, __LINE__); 02678 (void) strcpy(data->field[cat].data[i].filename_ls, (char *) val); 02679 (void) xmlFree(val); 02680 (void) fprintf(stdout, "%s: %s #%d: name = %s filename = %s\n", 02681 __FILE__, catstrt, i, data->field[cat].data[i].nomvar_ls, data->field[cat].data[i].filename_ls); 02682 } 02683 else { 02684 (void) fprintf(stderr, "%s: Missing filename setting %s. Aborting.\n", __FILE__, catstrt); 02685 return -1; 02686 } 02687 02688 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "dimy_name", i+1); 02689 val = xml_get_setting(conf, path); 02690 if (val != NULL) { 02691 data->field[cat].data[i].dimyname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02692 if (data->field[cat].data[i].dimyname == NULL) alloc_error(__FILE__, __LINE__); 02693 (void) strcpy(data->field[cat].data[i].dimyname, (char *) val); 02694 (void) xmlFree(val); 02695 } 02696 else 02697 data->field[cat].data[i].dimyname = strdup("lat"); 02698 02699 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "dimx_name", i+1); 02700 val = xml_get_setting(conf, path); 02701 if (val != NULL) { 02702 data->field[cat].data[i].dimxname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02703 if (data->field[cat].data[i].dimxname == NULL) alloc_error(__FILE__, __LINE__); 02704 (void) strcpy(data->field[cat].data[i].dimxname, (char *) val); 02705 (void) xmlFree(val); 02706 } 02707 else 02708 data->field[cat].data[i].dimxname = strdup("lon"); 02709 02710 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "latitude_name", i+1); 02711 val = xml_get_setting(conf, path); 02712 if (val != NULL) { 02713 data->field[cat].data[i].latname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02714 if (data->field[cat].data[i].latname == NULL) alloc_error(__FILE__, __LINE__); 02715 (void) strcpy(data->field[cat].data[i].latname, (char *) val); 02716 (void) xmlFree(val); 02717 } 02718 else 02719 data->field[cat].data[i].latname = strdup("lat"); 02720 02721 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "longitude_name", i+1); 02722 val = xml_get_setting(conf, path); 02723 if (val != NULL) { 02724 data->field[cat].data[i].lonname = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02725 if (data->field[cat].data[i].lonname == NULL) alloc_error(__FILE__, __LINE__); 02726 (void) strcpy(data->field[cat].data[i].lonname, (char *) val); 02727 (void) xmlFree(val); 02728 } 02729 else 02730 data->field[cat].data[i].lonname = strdup("lon"); 02731 02732 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "time_name", i+1); 02733 val = xml_get_setting(conf, path); 02734 if (val != NULL) { 02735 data->field[cat].data[i].timename = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02736 if (data->field[cat].data[i].timename == NULL) alloc_error(__FILE__, __LINE__); 02737 (void) strcpy(data->field[cat].data[i].timename, (char *) val); 02738 (void) xmlFree(val); 02739 } 02740 else 02741 data->field[cat].data[i].timename = strdup("time"); 02742 02743 /* Fallback projection type */ 02744 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "projection", i+1); 02745 val = xml_get_setting(conf, path); 02746 if (val != NULL) { 02747 data->field[cat].proj[i].name = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02748 if (data->field[cat].proj[i].name == NULL) alloc_error(__FILE__, __LINE__); 02749 (void) strcpy(data->field[cat].proj[i].name, (char *) val); 02750 (void) xmlFree(val); 02751 (void) fprintf(stdout, "%s: %s #%d: name = %s projection = %s\n", 02752 __FILE__, catstrt, i, data->field[cat].data[i].nomvar_ls, data->field[cat].proj[i].name); 02753 } 02754 else 02755 data->field[cat].proj[i].name = strdup("unknown"); 02756 02757 /* Fallback coordinate system dimensions */ 02758 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "coordinates", i+1); 02759 val = xml_get_setting(conf, path); 02760 if (val != NULL) { 02761 data->field[cat].proj[i].coords = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02762 if (data->field[cat].proj[i].coords == NULL) alloc_error(__FILE__, __LINE__); 02763 (void) strcpy(data->field[cat].proj[i].coords, (char *) val); 02764 (void) xmlFree(val); 02765 (void) fprintf(stdout, "%s: %s #%d: name = %s coordinates = %s\n", 02766 __FILE__, catstrt, i, data->field[cat].data[i].nomvar_ls, data->field[cat].proj[i].coords); 02767 } 02768 else 02769 data->field[cat].proj[i].coords = strdup("2D"); 02770 02771 02775 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "clim_remove", i+1); 02776 val = xml_get_setting(conf, path); 02777 if (val != NULL) 02778 data->field[cat].data[i].clim_info->clim_remove = (int) xmlXPathCastStringToNumber(val); 02779 else 02780 data->field[cat].data[i].clim_info->clim_remove = FALSE; 02781 (void) fprintf(stdout, "%s: clim_remove = %d\n", __FILE__, data->field[cat].data[i].clim_info->clim_remove); 02782 if (val != NULL) 02783 (void) xmlFree(val); 02784 02786 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "clim_provided", i+1); 02787 val = xml_get_setting(conf, path); 02788 if (val != NULL) { 02789 if ( !xmlStrcmp(val, (xmlChar *) "1") ) 02790 data->field[cat].data[i].clim_info->clim_provided = TRUE; 02791 else 02792 data->field[cat].data[i].clim_info->clim_provided = FALSE; 02793 (void) fprintf(stdout, "%s: clim_provided #%d = %d\n", __FILE__, i+1, data->field[cat].data[i].clim_info->clim_provided); 02794 (void) xmlFree(val); 02795 02796 /* If climatology is provided, additional parameters are needed */ 02797 if (data->field[cat].data[i].clim_info->clim_provided == TRUE) { 02798 02800 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "clim_openfilename",i+1); 02801 val = xml_get_setting(conf, path); 02802 if (val != NULL) { 02803 data->field[cat].data[i].clim_info->clim_filein_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02804 if (data->field[cat].data[i].clim_info->clim_filein_ls == NULL) alloc_error(__FILE__, __LINE__); 02805 (void) strcpy(data->field[cat].data[i].clim_info->clim_filein_ls, (char *) val); 02806 (void) fprintf(stdout, "%s: Climatology input filename #%d = %s\n", __FILE__, i+1, 02807 data->field[cat].data[i].clim_info->clim_filein_ls); 02808 (void) xmlFree(val); 02809 } 02810 else { 02811 (void) fprintf(stderr, "%s: Missing clim_openfilename setting %s. Aborting.\n", __FILE__, catstrt); 02812 return -1; 02813 } 02814 } 02815 } 02816 else 02817 data->field[cat].data[i].clim_info->clim_provided = FALSE; 02818 02820 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "clim_save", i+1); 02821 val = xml_get_setting(conf, path); 02822 if (val != NULL) { 02823 if ( !xmlStrcmp(val, (xmlChar *) "1") ) 02824 data->field[cat].data[i].clim_info->clim_save = TRUE; 02825 else 02826 data->field[cat].data[i].clim_info->clim_save = FALSE; 02827 (void) fprintf(stdout, "%s: clim_save #%d = %d\n", __FILE__, i+1, data->field[cat].data[i].clim_info->clim_save); 02828 (void) xmlFree(val); 02829 02830 /* If we want to save climatology in output file */ 02831 if (data->field[cat].data[i].clim_info->clim_save == TRUE) { 02832 02833 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "clim_savefilename",i+1); 02834 val = xml_get_setting(conf, path); 02835 if (val != NULL) { 02836 data->field[cat].data[i].clim_info->clim_fileout_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02837 if (data->field[cat].data[i].clim_info->clim_fileout_ls == NULL) alloc_error(__FILE__, __LINE__); 02838 (void) strcpy(data->field[cat].data[i].clim_info->clim_fileout_ls, (char *) val); 02839 (void) fprintf(stdout, "%s: Climatology output filename #%d = %s\n", __FILE__, i+1, 02840 data->field[cat].data[i].clim_info->clim_fileout_ls); 02841 (void) xmlFree(val); 02842 } 02843 else { 02844 (void) fprintf(stderr, "%s: Missing clim_savefilename setting %s. Aborting.\n", __FILE__, catstrt); 02845 return -1; 02846 } 02847 } 02848 02849 /* Climatology variable name */ 02850 if (data->field[cat].data[i].clim_info->clim_save == TRUE || data->field[cat].data[i].clim_info->clim_provided == TRUE) { 02851 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "clim_name", i+1); 02852 val = xml_get_setting(conf, path); 02853 if (val != NULL) { 02854 data->field[cat].data[i].clim_info->clim_nomvar_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02855 if (data->field[cat].data[i].clim_info->clim_nomvar_ls == NULL) alloc_error(__FILE__, __LINE__); 02856 (void) strcpy(data->field[cat].data[i].clim_info->clim_nomvar_ls, (char *) val); 02857 (void) fprintf(stdout, "%s: Climatology variable name #%d = %s\n", __FILE__, i+1, 02858 data->field[cat].data[i].clim_info->clim_nomvar_ls); 02859 (void) xmlFree(val); 02860 } 02861 else { 02862 (void) fprintf(stderr, "%s: Missing clim_name setting %s. Aborting.\n", __FILE__, catstrt); 02863 return -1; 02864 } 02865 } 02866 } 02867 else 02868 data->field[cat].data[i].clim_info->clim_save = FALSE; 02869 02872 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "eof_project", i+1); 02873 val = xml_get_setting(conf, path); 02874 if (val != NULL) 02875 data->field[cat].data[i].eof_info->eof_project = (int) xmlXPathCastStringToNumber(val); 02876 else 02877 data->field[cat].data[i].eof_info->eof_project = FALSE; 02878 (void) fprintf(stdout, "%s: eof_project = %d\n", __FILE__, data->field[cat].data[i].eof_info->eof_project); 02879 if (val != NULL) 02880 (void) xmlFree(val); 02881 02882 if (data->field[cat].data[i].eof_info->eof_project == TRUE) { 02884 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, 02885 "number_of_eofs", i+1); 02886 val = xml_get_setting(conf, path); 02887 if (val != NULL) { 02888 data->field[cat].data[i].eof_info->neof_ls = (int) xmlXPathCastStringToNumber(val); 02889 if (data->field[cat].data[i].eof_info->neof_ls != data->learning->rea_neof) { 02890 (void) fprintf(stderr, 02891 "%s: Fatal error in configuration. The number of eof for field #%d of category %d is %d and the corresponding learning number of eof is %d. They should be equal!! Aborting.\n", 02892 __FILE__, i, cat, data->field[cat].data[i].eof_info->neof_ls, data->learning->rea_neof); 02893 return -1; 02894 } 02895 (void) fprintf(stdout, "%s: number_of_eofs = %d\n", __FILE__, data->field[cat].data[i].eof_info->neof_ls); 02896 (void) xmlFree(val); 02897 } 02898 02900 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "eof_coordinates", i+1); 02901 val = xml_get_setting(conf, path); 02902 if (val != NULL) { 02903 data->field[cat].data[i].eof_info->eof_coords = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02904 if (data->field[cat].data[i].eof_info->eof_coords == NULL) alloc_error(__FILE__, __LINE__); 02905 (void) strcpy(data->field[cat].data[i].eof_info->eof_coords, (char *) val); 02906 (void) fprintf(stdout, "%s: %s #%d: name = %s eof_coordinates = %s\n", 02907 __FILE__, catstrt, i, data->field[cat].data[i].nomvar_ls, data->field[cat].data[i].eof_info->eof_coords); 02908 (void) xmlFree(val); 02909 } 02910 else 02911 data->field[cat].data[i].eof_info->eof_coords = strdup("2D"); 02912 02914 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "eof_openfilename", i+1); 02915 val = xml_get_setting(conf, path); 02916 if (val != NULL) { 02917 data->field[cat].data[i].eof_info->eof_filein_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02918 if (data->field[cat].data[i].eof_info->eof_filein_ls == NULL) alloc_error(__FILE__, __LINE__); 02919 (void) strcpy(data->field[cat].data[i].eof_info->eof_filein_ls, (char *) val); 02920 (void) fprintf(stdout, "%s: EOF/Singular values input filename #%d = %s\n", __FILE__, i+1, 02921 data->field[cat].data[i].eof_info->eof_filein_ls); 02922 (void) xmlFree(val); 02923 } 02924 else { 02925 (void) fprintf(stderr, "%s: Missing eof_openfilename setting. Aborting.\n", __FILE__); 02926 return -1; 02927 } 02928 02930 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "eof_scale", i+1); 02931 val = xml_get_setting(conf, path); 02932 if (val != NULL) 02933 data->field[cat].data[i].eof_info->eof_scale = xmlXPathCastStringToNumber(val); 02934 else 02935 data->field[cat].data[i].eof_info->eof_scale = 1.0; 02936 (void) fprintf(stdout, "%s: units scaling = %lf\n", __FILE__, data->field[cat].data[i].eof_info->eof_scale); 02937 if (val != NULL) 02938 (void) xmlFree(val); 02939 02941 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "eof_weight", i+1); 02942 val = xml_get_setting(conf, path); 02943 if (val != NULL) 02944 data->field[cat].data[i].eof_info->eof_weight = (int) strtol((char *) val, (char **)NULL, 10); 02945 else 02946 data->field[cat].data[i].eof_info->eof_weight = FALSE; 02947 if (data->field[cat].data[i].eof_info->eof_weight != FALSE && data->field[cat].data[i].eof_info->eof_weight != TRUE) { 02948 (void) fprintf(stderr, "%s: Invalid or missing downscaling eof_weight value %s in configuration file. Aborting.\n", __FILE__, val); 02949 return -1; 02950 } 02951 (void) fprintf(stdout, "%s: downscaling eof_weight = %d\n", __FILE__, data->field[cat].data[i].eof_info->eof_weight); 02952 if (val != NULL) 02953 (void) xmlFree(val); 02954 02956 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "eof_name", i+1); 02957 val = xml_get_setting(conf, path); 02958 if (val != NULL) { 02959 data->field[cat].data[i].eof_data->eof_nomvar_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02960 if (data->field[cat].data[i].eof_data->eof_nomvar_ls == NULL) alloc_error(__FILE__, __LINE__); 02961 (void) strcpy(data->field[cat].data[i].eof_data->eof_nomvar_ls, (char *) val); 02962 (void) fprintf(stdout, "%s: EOF variable name #%d = %s\n", __FILE__, i+1, data->field[cat].data[i].eof_data->eof_nomvar_ls); 02963 (void) xmlFree(val); 02964 } 02965 else { 02966 (void) fprintf(stderr, "%s: Missing eof_name setting. Aborting.\n", __FILE__); 02967 return -1; 02968 } 02970 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s[@id=\"%d\"]", "setting", catstr, "sing_name", i+1); 02971 val = xml_get_setting(conf, path); 02972 if (val != NULL) { 02973 data->field[cat].data[i].eof_data->sing_nomvar_ls = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 02974 if (data->field[cat].data[i].eof_data->sing_nomvar_ls == NULL) alloc_error(__FILE__, __LINE__); 02975 (void) strcpy(data->field[cat].data[i].eof_data->sing_nomvar_ls, (char *) val); 02976 (void) fprintf(stdout, "%s: Singular values variable name #%d = %s\n", __FILE__, i+1, 02977 data->field[cat].data[i].eof_data->sing_nomvar_ls); 02978 (void) xmlFree(val); 02979 } 02980 else { 02981 (void) fprintf(stderr, "%s: Missing sing_name setting. Aborting.\n", __FILE__); 02982 return -1; 02983 } 02984 } 02985 } 02986 } 02987 (void) free(catstr); 02988 (void) free(catstrt); 02989 } 02990 02991 02992 /**** CONTROL-RUN PERIOD CONFIGURATION ****/ 02993 02995 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "period_ctrl", "downscale"); 02996 val = xml_get_setting(conf, path); 02997 if (val != NULL) { 02998 data->conf->period_ctrl->downscale = xmlXPathCastStringToNumber(val); 02999 (void) fprintf(stdout, "%s: period_ctrl downscale = %d\n", __FILE__, data->conf->period_ctrl->downscale); 03000 (void) xmlFree(val); 03001 } 03002 else 03003 data->conf->period_ctrl->downscale = TRUE; 03004 if (data->conf->period_ctrl->downscale == TRUE) { 03006 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period_ctrl", "period", "year_begin"); 03007 val = xml_get_setting(conf, path); 03008 if (val != NULL) { 03009 data->conf->period_ctrl->year_begin = xmlXPathCastStringToNumber(val); 03010 (void) fprintf(stdout, "%s: period_ctrl year_begin = %d\n", __FILE__, data->conf->period_ctrl->year_begin); 03011 (void) xmlFree(val); 03012 } 03013 else 03014 data->conf->period_ctrl->year_begin = -1; 03016 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period_ctrl", "period", "month_begin"); 03017 val = xml_get_setting(conf, path); 03018 if (val != NULL) { 03019 data->conf->period_ctrl->month_begin = xmlXPathCastStringToNumber(val); 03020 (void) fprintf(stdout, "%s: period_ctrl month_begin = %d\n", __FILE__, data->conf->period_ctrl->month_begin); 03021 (void) xmlFree(val); 03022 } 03023 else 03024 data->conf->period_ctrl->month_begin = -1; 03026 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period_ctrl", "period", "day_begin"); 03027 val = xml_get_setting(conf, path); 03028 if (val != NULL) { 03029 data->conf->period_ctrl->day_begin = xmlXPathCastStringToNumber(val); 03030 (void) fprintf(stdout, "%s: period_ctrl day_begin = %d\n", __FILE__, data->conf->period_ctrl->day_begin); 03031 (void) xmlFree(val); 03032 } 03033 else 03034 data->conf->period_ctrl->day_begin = -1; 03036 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period_ctrl", "period", "year_end"); 03037 val = xml_get_setting(conf, path); 03038 if (val != NULL) { 03039 data->conf->period_ctrl->year_end = xmlXPathCastStringToNumber(val); 03040 (void) fprintf(stdout, "%s: period_ctrl year_end = %d\n", __FILE__, data->conf->period_ctrl->year_end); 03041 (void) xmlFree(val); 03042 } 03043 else 03044 data->conf->period_ctrl->year_end = -1; 03046 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period_ctrl", "period", "month_end"); 03047 val = xml_get_setting(conf, path); 03048 if (val != NULL) { 03049 data->conf->period_ctrl->month_end = xmlXPathCastStringToNumber(val); 03050 (void) fprintf(stdout, "%s: period_ctrl month_end = %d\n", __FILE__, data->conf->period_ctrl->month_end); 03051 (void) xmlFree(val); 03052 } 03053 else 03054 data->conf->period_ctrl->month_end = -1; 03056 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period_ctrl", "period", "day_end"); 03057 val = xml_get_setting(conf, path); 03058 if (val != NULL) { 03059 data->conf->period_ctrl->day_end = xmlXPathCastStringToNumber(val); 03060 (void) fprintf(stdout, "%s: period_ctrl day_end = %d\n", __FILE__, data->conf->period_ctrl->day_end); 03061 (void) xmlFree(val); 03062 } 03063 else 03064 data->conf->period_ctrl->day_end = -1; 03065 } 03066 else { 03067 data->conf->period_ctrl->year_begin = -1; 03068 data->conf->period_ctrl->month_begin = -1; 03069 data->conf->period_ctrl->day_begin = -1; 03070 data->conf->period_ctrl->year_end = -1; 03071 data->conf->period_ctrl->month_end = -1; 03072 data->conf->period_ctrl->day_end = -1; 03073 } 03074 03075 03076 /**** PERIODS CONFIGURATION FOR NON-CONTROL ****/ 03077 03078 if (data->field[0].n_ls > 0) { 03079 data->conf->nperiods = 1; 03080 data->conf->period = (period_struct *) malloc(sizeof(period_struct)); 03081 if (data->conf->period == NULL) alloc_error(__FILE__, __LINE__); 03082 } 03083 else 03084 data->conf->nperiods = 0; 03085 03086 /* Loop over periods */ 03087 for (i=0; i<data->conf->nperiods; i++) { 03089 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s", "setting", "period", "downscale"); 03090 val = xml_get_setting(conf, path); 03091 if (val != NULL) { 03092 data->conf->period[i].downscale = xmlXPathCastStringToNumber(val); 03093 (void) fprintf(stdout, "%s: period downscale = %d\n", __FILE__, data->conf->period[i].downscale); 03094 (void) xmlFree(val); 03095 } 03096 else 03097 data->conf->period[i].downscale = TRUE; 03098 if (data->conf->period[i].downscale == TRUE) { 03100 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period", "period", "year_begin"); 03101 val = xml_get_setting(conf, path); 03102 if (val != NULL) { 03103 data->conf->period[i].year_begin = xmlXPathCastStringToNumber(val); 03104 (void) xmlFree(val); 03105 (void) fprintf(stdout, "%s: period #%d year_begin = %d\n", __FILE__, i+1, data->conf->period[i].year_begin); 03106 } 03107 else 03108 data->conf->period[i].year_begin = -1; 03110 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period", "period", "month_begin"); 03111 val = xml_get_setting(conf, path); 03112 if (val != NULL) { 03113 data->conf->period[i].month_begin = xmlXPathCastStringToNumber(val); 03114 (void) xmlFree(val); 03115 (void) fprintf(stdout, "%s: period #%d month_begin = %d\n", __FILE__, i+1, data->conf->period[i].month_begin); 03116 } 03117 else 03118 data->conf->period[i].month_begin = -1; 03120 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period", "period", "day_begin"); 03121 val = xml_get_setting(conf, path); 03122 if (val != NULL) { 03123 data->conf->period[i].day_begin = xmlXPathCastStringToNumber(val); 03124 (void) xmlFree(val); 03125 (void) fprintf(stdout, "%s: period #%d day_begin = %d\n", __FILE__, i+1, data->conf->period[i].day_begin); 03126 } 03127 else 03128 data->conf->period[i].day_begin = -1; 03130 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period", "period", "year_end"); 03131 val = xml_get_setting(conf, path); 03132 if (val != NULL) { 03133 data->conf->period[i].year_end = xmlXPathCastStringToNumber(val); 03134 (void) xmlFree(val); 03135 (void) fprintf(stdout, "%s: period #%d year_end = %d\n", __FILE__, i+1, data->conf->period[i].year_end); 03136 } 03137 else 03138 data->conf->period[i].year_end = -1; 03140 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period", "period", "month_end"); 03141 val = xml_get_setting(conf, path); 03142 if (val != NULL) { 03143 data->conf->period[i].month_end = xmlXPathCastStringToNumber(val); 03144 (void) xmlFree(val); 03145 (void) fprintf(stdout, "%s: period #%d month_end = %d\n", __FILE__, i+1, data->conf->period[i].month_end); 03146 } 03147 else 03148 data->conf->period[i].month_end = -1; 03150 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]/%s/%s", "setting", "period", "period", "day_end"); 03151 val = xml_get_setting(conf, path); 03152 if (val != NULL) { 03153 data->conf->period[i].day_end = xmlXPathCastStringToNumber(val); 03154 (void) xmlFree(val); 03155 (void) fprintf(stdout, "%s: period #%d day_end = %d\n", __FILE__, i+1, data->conf->period[i].day_end); 03156 } 03157 else 03158 data->conf->period[i].day_end = -1; 03159 } 03160 else { 03161 data->conf->period[i].year_begin = -1; 03162 data->conf->period[i].month_begin = -1; 03163 data->conf->period[i].day_begin = -1; 03164 data->conf->period[i].year_end = -1; 03165 data->conf->period[i].month_end = -1; 03166 data->conf->period[i].day_end = -1; 03167 } 03168 } 03169 03170 03171 /**** SEASONS CONFIGURATION ****/ 03172 03174 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "number_of_seasons"); 03175 val = xml_get_setting(conf, path); 03176 if (val != NULL) { 03177 data->conf->nseasons = (int) xmlXPathCastStringToNumber(val); 03178 (void) fprintf(stdout, "%s: number_of_seasons = %d\n", __FILE__, data->conf->nseasons); 03179 (void) xmlFree(val); 03180 03183 data->conf->season = (season_struct *) malloc(data->conf->nseasons * sizeof(season_struct)); 03184 if (data->conf->season == NULL) alloc_error(__FILE__, __LINE__); 03185 03186 data->learning->data = (learning_data_struct *) malloc(data->conf->nseasons * sizeof(learning_data_struct)); 03187 if (data->learning->data == NULL) alloc_error(__FILE__, __LINE__); 03188 03189 /* Loop over field categories */ 03190 for (cat=0; cat<NCAT; cat++) { 03191 03192 data->field[cat].precip_index = (double **) malloc(data->conf->nseasons * sizeof(double *)); 03193 if (data->field[cat].precip_index == NULL) alloc_error(__FILE__, __LINE__); 03194 data->field[cat].analog_days = (analog_day_struct *) malloc(data->conf->nseasons * sizeof(analog_day_struct)); 03195 if (data->field[cat].analog_days == NULL) alloc_error(__FILE__, __LINE__); 03196 03197 /* Loop over large-scale fields */ 03198 for (i=0; i<data->field[cat].n_ls; i++) { 03199 if (cat == 0 || cat == 1) { 03200 /* Large-scale fields */ 03201 data->field[cat].data[i].down->mean_dist = (double **) malloc(data->conf->nseasons * sizeof(double *)); 03202 if (data->field[cat].data[i].down->mean_dist == NULL) alloc_error(__FILE__, __LINE__); 03203 data->field[cat].data[i].down->var_dist = (double **) malloc(data->conf->nseasons * sizeof(double *)); 03204 if (data->field[cat].data[i].down->var_dist == NULL) alloc_error(__FILE__, __LINE__); 03205 data->field[cat].data[i].down->dist = (double **) malloc(data->conf->nseasons * sizeof(double *)); 03206 if (data->field[cat].data[i].down->dist == NULL) alloc_error(__FILE__, __LINE__); 03207 data->field[cat].data[i].down->days_class_clusters = (int **) malloc(data->conf->nseasons * sizeof(int *)); 03208 if (data->field[cat].data[i].down->days_class_clusters == NULL) alloc_error(__FILE__, __LINE__); 03209 data->field[cat].data[i].down->var_pc_norm = (double *) malloc(data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 03210 if (data->field[cat].data[i].down->var_pc_norm == NULL) alloc_error(__FILE__, __LINE__); 03211 } 03212 else { 03213 /* Secondary large-scale fields */ 03214 data->field[cat].data[i].down->smean_norm = (double **) malloc(data->conf->nseasons * sizeof(double *)); 03215 if (data->field[cat].data[i].down->smean_norm == NULL) alloc_error(__FILE__, __LINE__); 03216 data->field[cat].data[i].down->mean = (double *) malloc(data->conf->nseasons * sizeof(double)); 03217 if (data->field[cat].data[i].down->mean == NULL) alloc_error(__FILE__, __LINE__); 03218 data->field[cat].data[i].down->var = (double *) malloc(data->conf->nseasons * sizeof(double)); 03219 if (data->field[cat].data[i].down->var == NULL) alloc_error(__FILE__, __LINE__); 03220 data->field[cat].data[i].down->delta = (double **) malloc(data->conf->nseasons * sizeof(double *)); 03221 if (data->field[cat].data[i].down->delta == NULL) alloc_error(__FILE__, __LINE__); 03222 data->field[cat].data[i].down->delta_dayschoice = (double ***) malloc(data->conf->nseasons * sizeof(double **)); 03223 if (data->field[cat].data[i].down->delta_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 03224 data->field[cat].data[i].down->sup_val_norm = (double **) malloc(data->conf->nseasons * sizeof(double)); 03225 if (data->field[cat].data[i].down->sup_val_norm == NULL) alloc_error(__FILE__, __LINE__); 03226 /* Only needed for secondary large-scale control field */ 03227 if (cat == 3) { 03228 data->field[cat].data[i].down->smean_2d = (double **) malloc(data->conf->nseasons * sizeof(double)); 03229 if (data->field[cat].data[i].down->smean_2d == NULL) alloc_error(__FILE__, __LINE__); 03230 data->field[cat].data[i].down->svar_2d = (double **) malloc(data->conf->nseasons * sizeof(double)); 03231 if (data->field[cat].data[i].down->svar_2d == NULL) alloc_error(__FILE__, __LINE__); 03232 } 03233 } 03234 } 03235 } 03236 03237 /* Loop over seasons: season-dependent parameters */ 03238 for (i=0; i<data->conf->nseasons; i++) { 03239 03240 data->learning->data[i].time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 03241 if (data->learning->data[i].time_s == NULL) alloc_error(__FILE__, __LINE__); 03242 03244 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "number_of_clusters", i+1); 03245 val = xml_get_setting(conf, path); 03246 if (val != NULL) { 03247 data->conf->season[i].nclusters = xmlXPathCastStringToNumber(val); 03248 (void) fprintf(stdout, "%s: season #%d number_of_clusters = %d\n", __FILE__, i+1, data->conf->season[i].nclusters); 03249 (void) xmlFree(val); 03250 } 03251 else 03252 data->conf->season[i].nclusters = -1; 03253 03255 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "number_of_regression_vars", i+1); 03256 val = xml_get_setting(conf, path); 03257 if (val != NULL) { 03258 data->conf->season[i].nreg = xmlXPathCastStringToNumber(val); 03259 (void) fprintf(stdout, "%s: season #%d number_of_regression_vars = %d\n", __FILE__, i+1, data->conf->season[i].nreg); 03260 (void) xmlFree(val); 03261 } 03262 else 03263 data->conf->season[i].nreg = -1; 03264 03265 if ( ! ((data->conf->season[i].nreg == data->conf->season[i].nclusters) || 03266 ( data->conf->season[i].nreg == data->conf->season[i].nclusters+1 ) ) ) { 03267 (void) fprintf(stderr, "%s: For season=%d, invalid correspondence between number_of_clusters=%d and number_of_regression_vars=%d. number_of_regression_vars should be equal to number_of_clusters or number_of_clusters+1 (temperature as supplemental regression variable). Aborting.\n", 03268 __FILE__, i, data->conf->season[i].nclusters, data->conf->season[i].nreg); 03269 return -1; 03270 } 03271 03273 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "number_of_days_search", i+1); 03274 val = xml_get_setting(conf, path); 03275 if (val != NULL) { 03276 data->conf->season[i].ndays = xmlXPathCastStringToNumber(val); 03277 (void) fprintf(stdout, "%s: season #%d number_of_days_search = %d\n", __FILE__, i+1, data->conf->season[i].ndays); 03278 (void) xmlFree(val); 03279 } 03280 else 03281 data->conf->season[i].ndays = 10; 03282 03284 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "number_of_days_choices", i+1); 03285 val = xml_get_setting(conf, path); 03286 if (val != NULL) { 03287 data->conf->season[i].ndayschoices = xmlXPathCastStringToNumber(val); 03288 (void) fprintf(stdout, "%s: season #%d number_of_days_choices = %d\n", __FILE__, i+1, data->conf->season[i].ndayschoices); 03289 (void) xmlFree(val); 03290 } 03291 else 03292 if (i == 0 || i == 1) 03293 data->conf->season[i].ndayschoices = 16; 03294 else 03295 data->conf->season[i].ndayschoices = 11; 03296 03298 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "number_of_days_choices_min", i+1); 03299 val = xml_get_setting(conf, path); 03300 if (val != NULL) { 03301 data->conf->season[i].ndayschoices_min = xmlXPathCastStringToNumber(val); 03302 (void) fprintf(stdout, "%s: season #%d number_of_days_choices_min = %d\n", __FILE__, i+1, data->conf->season[i].ndayschoices_min); 03303 (void) xmlFree(val); 03304 } 03305 else 03306 data->conf->season[i].ndayschoices_min = 5; 03307 if (data->conf->season[i].ndayschoices_min > data->conf->season[i].ndayschoices) { 03308 (void) fprintf(stderr, "%s: WARNING: number_of_days_choices_min (%d) > number_of_days_choices (%d). Setting number_of_days_choices_min = number_of_days_choices (%d)\n", __FILE__, data->conf->season[i].ndayschoices_min, data->conf->season[i].ndayschoices, data->conf->season[i].ndayschoices); 03309 data->conf->season[i].ndayschoices_min = data->conf->season[i].ndayschoices; 03310 } 03311 03313 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "days_shuffle", i+1); 03314 val = xml_get_setting(conf, path); 03315 if (val != NULL) { 03316 data->conf->season[i].shuffle = xmlXPathCastStringToNumber(val); 03317 (void) fprintf(stdout, "%s: season #%d days_shuffle = %d\n", __FILE__, i+1, data->conf->season[i].shuffle); 03318 (void) xmlFree(val); 03319 } 03320 else 03321 if (i == 0 || i == 1) 03322 data->conf->season[i].shuffle = TRUE; 03323 else 03324 data->conf->season[i].shuffle = FALSE; 03325 03327 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "secondary_field_choice", i+1); 03328 val = xml_get_setting(conf, path); 03329 if (val != NULL) { 03330 data->conf->season[i].secondary_choice = xmlXPathCastStringToNumber(val); 03331 (void) fprintf(stdout, "%s: season #%d secondary_field_choice = %d\n", __FILE__, i+1, data->conf->season[i].secondary_choice); 03332 (void) xmlFree(val); 03333 } 03334 else 03335 if (i == 0 || i == 1) 03336 data->conf->season[i].secondary_choice = FALSE; 03337 else 03338 data->conf->season[i].secondary_choice = TRUE; 03339 03341 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "secondary_field_main_choice", i+1); 03342 val = xml_get_setting(conf, path); 03343 if (val != NULL) { 03344 data->conf->season[i].secondary_main_choice = xmlXPathCastStringToNumber(val); 03345 (void) fprintf(stdout, "%s: season #%d secondary_field_main_choice = %d\n", __FILE__, i+1, data->conf->season[i].secondary_main_choice); 03346 (void) xmlFree(val); 03347 } 03348 else 03349 if (data->conf->season[i].secondary_choice == FALSE) 03350 data->conf->season[i].secondary_main_choice = TRUE; 03351 else 03352 data->conf->season[i].secondary_main_choice = FALSE; 03353 03355 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "secondary_covariance", i+1); 03356 val = xml_get_setting(conf, path); 03357 if (val != NULL) { 03358 data->conf->season[i].secondary_cov = xmlXPathCastStringToNumber(val); 03359 (void) fprintf(stdout, "%s: season #%d secondary_covariance = %d\n", __FILE__, i+1, data->conf->season[i].secondary_cov); 03360 (void) xmlFree(val); 03361 } 03362 else 03363 data->conf->season[i].secondary_cov = FALSE; 03364 03366 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]/@nmonths", "setting", "season", i+1); 03367 val = xml_get_setting(conf, path); 03368 if (val != NULL) { 03369 data->conf->season[i].nmonths = xmlXPathCastStringToNumber(val); 03370 (void) fprintf(stdout, "%s: season #%d number_of_months = %d\n", __FILE__, i+1, data->conf->season[i].nmonths); 03371 data->conf->season[i].month = (int *) malloc(data->conf->season[i].nmonths * sizeof(int)); 03372 if (data->conf->season[i].month == NULL) alloc_error(__FILE__, __LINE__); 03373 (void) sprintf(path, "/configuration/%s/%s[@id=\"%d\"]", "setting", "season", i+1); 03374 (void) xmlFree(val); 03375 val = xml_get_setting(conf, path); 03376 if (val != NULL) { 03377 token = NULL; 03378 token = strtok_r((char *) val, " ", &saveptr); 03379 for (j=0; j<data->conf->season[i].nmonths; j++) { 03380 if (token != NULL) { 03381 (void) sscanf(token, "%d", &(data->conf->season[i].month[j])); 03382 (void) fprintf(stdout, "%s: season #%d month=%d\n", __FILE__, i+1, data->conf->season[i].month[j]); 03383 token = strtok_r(NULL, " ", &saveptr); 03384 } 03385 } 03386 (void) xmlFree(val); 03387 } 03388 } 03389 else 03390 data->conf->season[i].nmonths = 0; 03391 } 03392 } 03393 else { 03394 (void) fprintf(stderr, "%s: Invalid number_of_seasons value %s in configuration file. Aborting.\n", __FILE__, val); 03395 return -1; 03396 } 03397 03398 /**** ANALOG DATA CONFIGURATION ****/ 03399 03401 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "output_only"); 03402 val = xml_get_setting(conf, path); 03403 if (val != NULL) 03404 data->conf->output_only = (int) strtol((char *) val, (char **)NULL, 10); 03405 else 03406 data->conf->output_only = FALSE; 03407 if (data->conf->output_only != FALSE && data->conf->output_only != TRUE) { 03408 (void) fprintf(stderr, "%s: Invalid or missing analog data output_only value %s in configuration file. Aborting.\n", __FILE__, val); 03409 return -1; 03410 } 03411 (void) fprintf(stdout, "%s: analog data output_only=%d\n", __FILE__, data->conf->output_only); 03412 if (val != NULL) 03413 (void) xmlFree(val); 03414 if (data->conf->output_only == TRUE) { 03415 if (data->learning->learning_provided == FALSE) { 03416 (void) fprintf(stderr, "%s: WARNING: Desactivating learning process because option for output only has been set!\n", __FILE__); 03417 data->learning->learning_provided = TRUE; 03418 } 03419 if (data->learning->learning_save == TRUE) { 03420 (void) fprintf(stderr, "%s: WARNING: Desactivating learning save process because option for output only has been set!\n", __FILE__); 03421 data->learning->learning_save = FALSE; 03422 } 03423 if (data->conf->learning_maskfile->use_mask == TRUE) { 03424 (void) fprintf(stderr, "%s: WARNING: Desactivating use_mask for learning because option for output only has been set!\n", __FILE__); 03425 data->conf->learning_maskfile->use_mask = FALSE; 03426 } 03427 if (data->reg->reg_save == TRUE) { 03428 (void) fprintf(stderr, "%s: WARNING: Desactivating regression save process because option for output only has been set!\n", __FILE__); 03429 data->reg->reg_save = FALSE; 03430 } 03431 if (data->secondary_mask->use_mask == TRUE) { 03432 (void) fprintf(stderr, "%s: WARNING: Desactivating use_mask for secondary large-scale fields because option for output only has been set!\n", __FILE__); 03433 data->secondary_mask->use_mask = FALSE; 03434 } 03435 } 03436 03438 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "output_downscaling_data"); 03439 val = xml_get_setting(conf, path); 03440 if (val != NULL) 03441 data->conf->output = (int) strtol((char *) val, (char **)NULL, 10); 03442 else 03443 data->conf->output = TRUE; 03444 if (data->conf->output != FALSE && data->conf->output != TRUE) { 03445 (void) fprintf(stderr, "%s: Invalid or missing downscaling output_downscaling_data value %s in configuration file. Aborting.\n", __FILE__, val); 03446 return -1; 03447 } 03448 (void) fprintf(stdout, "%s: downscaling output_downscaling_data=%d\n", __FILE__, data->conf->output); 03449 if (val != NULL) 03450 (void) xmlFree(val); 03451 03453 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "analog_save"); 03454 val = xml_get_setting(conf, path); 03455 if (val != NULL) 03456 data->conf->analog_save = (int) strtol((char *) val, (char **)NULL, 10); 03457 else 03458 data->conf->analog_save = FALSE; 03459 if (data->conf->analog_save != FALSE && data->conf->analog_save != TRUE) { 03460 (void) fprintf(stderr, "%s: Invalid or missing analog data analog_save value %s in configuration file. Aborting.\n", __FILE__, val); 03461 return -1; 03462 } 03463 (void) fprintf(stdout, "%s: analog data analog_save=%d\n", __FILE__, data->conf->analog_save); 03464 if (val != NULL) 03465 (void) xmlFree(val); 03466 03468 if ( (data->conf->analog_save == TRUE || data->conf->output_only == TRUE) && data->conf->period_ctrl->downscale == TRUE) { 03469 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "analog_file_ctrl"); 03470 val = xml_get_setting(conf, path); 03471 if (val != NULL) { 03472 data->conf->analog_file_ctrl = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 03473 if (data->conf->analog_file_ctrl == NULL) alloc_error(__FILE__, __LINE__); 03474 (void) strcpy(data->conf->analog_file_ctrl, (char *) val); 03475 (void) fprintf(stdout, "%s: analog_file_ctrl = %s\n", __FILE__, data->conf->analog_file_ctrl); 03476 (void) xmlFree(val); 03477 } 03478 else { 03479 (void) fprintf(stderr, "%s: Missing analog_file_ctrl setting. Aborting.\n", __FILE__); 03480 (void) xmlFree(val); 03481 return -1; 03482 } 03483 } 03484 03486 if (data->conf->analog_save == TRUE || data->conf->output_only == TRUE) { 03487 (void) sprintf(path, "/configuration/%s[@name=\"%s\"]", "setting", "analog_file_other"); 03488 val = xml_get_setting(conf, path); 03489 if (val != NULL) { 03490 data->conf->analog_file_other = (char *) malloc((xmlStrlen(val)+1) * sizeof(char)); 03491 if (data->conf->analog_file_other == NULL) alloc_error(__FILE__, __LINE__); 03492 (void) strcpy(data->conf->analog_file_other, (char *) val); 03493 (void) fprintf(stdout, "%s: analog_file_other = %s\n", __FILE__, data->conf->analog_file_other); 03494 (void) xmlFree(val); 03495 } 03496 else { 03497 (void) fprintf(stderr, "%s: Missing analog_file_other setting. Aborting.\n", __FILE__); 03498 (void) xmlFree(val); 03499 return -1; 03500 } 03501 } 03502 03503 /* Warning for some combinations of settings */ 03504 for (i=0; i<data->conf->obs_var->nobs_var; i++) { 03505 if (strcmp(data->conf->obs_var->netcdfname[i], "rsds") && strcmp(data->conf->obs_var->clim[i], "no") ) 03506 for (ii=0; ii<data->conf->nseasons; ii++) 03507 if (data->conf->season[ii].ndays > 10) 03508 fprintf(stderr, "%s: WARNING: Number of days to search around downscaled date is greater than 10 at +-%d days and Global Solar Radiation output variable has not the clim setting set to yes.\n", __FILE__, data->conf->season[i].ndays); 03509 } 03510 03511 /* Free memory */ 03512 (void) xml_free_config(conf); 03513 (void) xmlCleanupParser(); 03514 (void) free(path); 03515 03516 /* Success status */ 03517 return 0; 03518 }
int merge_seasonal_data | ( | double * | buf_merged, | |
double * | buf, | |||
analog_day_struct | analog_days, | |||
int * | merged_itimes, | |||
int | dimx, | |||
int | dimy, | |||
int | ntimes_merged, | |||
int | ntimes | |||
) |
Merge seasonal 3D double field data using analog day structure.
[out] | buf_merged | 3D field dimx X dimy X ntimes_merged |
[in] | buf | 3D field dimx X dimy X ntimes |
[in] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled (non-merged) |
[in] | merged_itimes | Time index of total time vector against current merged time vector that could span less than the whole year |
[in] | dimx | X dimension |
[in] | dimy | Y dimension |
[in] | ntimes_merged | Number of times of days to downscale for this period, all seasons merged |
[in] | ntimes | Number of times of days to downscale for this period |
Definition at line 59 of file merge_seasonal_data.c.
References analog_day_struct::tindex_s_all.
Referenced by wt_downscaling().
00060 { 00072 int t; /* Time loop counter */ 00073 int i; /* Loop counter */ 00074 int j; /* Loop counter */ 00075 int curindex; /* Current index in the merged times vector */ 00076 int index_all; /* Current index in the whole time vector */ 00077 00078 /* Process each downscaled day for a specific season subperiod */ 00079 for (t=0; t<ntimes; t++) { 00080 /* Index of season-specific time into ntime_ls whole time vector */ 00081 index_all = analog_days.tindex_s_all[t]; 00082 /* Retrieve index in merge time vector from index of whole time vector ntime_ls */ 00083 curindex = merged_itimes[index_all]; 00084 /* Retrieve values */ 00085 for (i=0; i<dimx; i++) 00086 for (j=0; j<dimy; j++) 00087 buf_merged[i+j*dimx+curindex*dimx*dimy] = buf[i+j*dimx+t*dimx*dimy]; 00088 } 00089 00090 /* Success status */ 00091 return 0; 00092 }
int merge_seasonal_data_2d | ( | double ** | buf_merged, | |
double ** | buf, | |||
analog_day_struct | analog_days, | |||
int * | merged_itimes, | |||
int | dimx, | |||
int | dimy, | |||
int | supdim, | |||
int | ntimes_merged, | |||
int | ntimes | |||
) |
Merge seasonal 3D double field data using analog day structure, with another supplemental dimension.
[out] | buf_merged | 3D field dimx X dimy X ntimes_merged |
[in] | buf | 3D field dimx X dimy X ntimes |
[in] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled (non-merged) |
[in] | merged_itimes | Time index of total time vector against current merged time vector that could span less than the whole year |
[in] | dimx | X dimension |
[in] | dimy | Y dimension |
[in] | supdim | Supplemental dimension |
[in] | ntimes_merged | Number of times of days to downscale for this period, all seasons merged |
[in] | ntimes | Number of times of days to downscale for this period |
Definition at line 59 of file merge_seasonal_data_2d.c.
References merge_seasonal_data_2d(), and analog_day_struct::tindex_s_all.
Referenced by merge_seasonal_data_2d(), and wt_downscaling().
00060 { 00073 int t; /* Time loop counter */ 00074 int i; /* Loop counter */ 00075 int j; /* Loop counter */ 00076 int ii; /* Loop counter */ 00077 int curindex; /* Current index in the merged times vector */ 00078 int index_all; /* Current index in the whole time vector */ 00079 00080 /* Process each downscaled day for a specific season subperiod */ 00081 for (t=0; t<ntimes; t++) { 00082 /* Index of season-specific time into ntime_ls whole time vector */ 00083 index_all = analog_days.tindex_s_all[t]; 00084 /* Retrieve index in merge time vector from index of whole time vector ntime_ls */ 00085 curindex = merged_itimes[index_all]; 00086 /* Retrieve values */ 00087 for (i=0; i<dimx; i++) 00088 for (j=0; j<dimy; j++) 00089 for (ii=0; ii<supdim; ii++) 00090 buf_merged[i+j*dimx+curindex*dimx*dimy][ii] = buf[i+j*dimx+t*dimx*dimy][ii]; 00091 } 00092 00093 /* Success status */ 00094 return 0; 00095 }
int merge_seasonal_data_i | ( | int * | buf_merged, | |
int * | buf, | |||
analog_day_struct | analog_days, | |||
int * | merged_itimes, | |||
int | dimx, | |||
int | dimy, | |||
int | ntimes_merged, | |||
int | ntimes | |||
) |
Merge seasonal 3D integer field data using analog day structure.
[out] | buf_merged | 3D field dimx X dimy X ntimes_merged |
[in] | buf | 3D field dimx X dimy X ntimes |
[in] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled (non-merged) |
[in] | merged_itimes | Time index of total time vector against current merged time vector that could span less than the whole year |
[in] | dimx | X dimension |
[in] | dimy | Y dimension |
[in] | ntimes_merged | Number of times of days to downscale for this period, all seasons merged |
[in] | ntimes | Number of times of days to downscale for this period |
Definition at line 59 of file merge_seasonal_data_i.c.
References analog_day_struct::tindex_s_all.
Referenced by wt_downscaling().
00060 { 00072 int t; /* Time loop counter */ 00073 int i; /* Loop counter */ 00074 int j; /* Loop counter */ 00075 int curindex; /* Current index in the merged times vector */ 00076 int index_all; /* Current index in the whole time vector */ 00077 00078 /* Process each downscaled day for a specific season subperiod */ 00079 for (t=0; t<ntimes; t++) { 00080 /* Index of season-specific time into ntime_ls whole time vector */ 00081 index_all = analog_days.tindex_s_all[t]; 00082 /* Retrieve index in merge time vector from index of whole time vector ntime_ls */ 00083 curindex = merged_itimes[index_all]; 00084 /* Retrieve values */ 00085 for (i=0; i<dimx; i++) 00086 for (j=0; j<dimy; j++) 00087 buf_merged[i+j*dimx+curindex*dimx*dimy] = buf[i+j*dimx+t*dimx*dimy]; 00088 } 00089 00090 /* Success status */ 00091 return 0; 00092 }
int merge_seasons | ( | analog_day_struct | analog_days_merged, | |
analog_day_struct | analog_days, | |||
int * | merged_itimes, | |||
int | ntimes_merged, | |||
int | ntimes | |||
) |
Merge seasonal analog date data.
[out] | analog_days_merged | Analog days time indexes and dates with corresponding dates being downscaled, all seasons merged |
[in] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled |
[in] | merged_itimes | Time index of total time vector against current merged time vector that could span less than the whole year |
[in] | ntimes_merged | Number of times of days to downscale for this period, all seasons merged |
[in] | ntimes | Number of times of days to downscale for this period |
Definition at line 58 of file merge_seasons.c.
References alloc_error(), analog_day_struct::analog_dayschoice, tstruct::day, analog_day_struct::day, analog_day_struct::day_s, tstruct::hour, analog_day_struct::metric_norm, tstruct::min, tstruct::month, analog_day_struct::month, analog_day_struct::month_s, analog_day_struct::ndayschoice, tstruct::sec, analog_day_struct::time, analog_day_struct::tindex_all, analog_day_struct::tindex_s_all, tstruct::year, analog_day_struct::year, and analog_day_struct::year_s.
Referenced by wt_downscaling().
00058 { 00067 int t; /* Time loop counter */ 00068 int i; /* Loop counter */ 00069 int curindex; /* Current index in the merged times vector */ 00070 int index_all; /* Current index in the whole time vector */ 00071 00072 /* Process each downscaled day for a specific season subperiod */ 00073 for (t=0; t<ntimes; t++) { 00074 /* Index of season-specific time into ntime_ls whole time vector */ 00075 index_all = analog_days.tindex_s_all[t]; 00076 /* Retrieve index in merge time vector from index of whole time vector ntime_ls */ 00077 curindex = merged_itimes[index_all]; 00078 /* Retrieve values */ 00079 analog_days_merged.tindex_all[curindex] = analog_days.tindex_all[t]; 00080 analog_days_merged.time[curindex] = analog_days.time[t]; 00081 analog_days_merged.year[curindex] = analog_days.year[t]; 00082 analog_days_merged.month[curindex] = analog_days.month[t]; 00083 analog_days_merged.day[curindex] = analog_days.day[t]; 00084 00085 analog_days_merged.tindex_s_all[curindex] = analog_days.tindex_s_all[t]; 00086 analog_days_merged.year_s[curindex] = analog_days.year_s[t]; 00087 analog_days_merged.month_s[curindex] = analog_days.month_s[t]; 00088 analog_days_merged.day_s[curindex] = analog_days.day_s[t]; 00089 // printf("IDM %d %d %d\n",t,curindex,index_all); 00090 analog_days_merged.ndayschoice[curindex] = analog_days.ndayschoice[t]; 00091 // printf("%d %d\n",analog_days_merged.ndayschoice[curindex],analog_days.ndayschoice[t]); 00092 if (analog_days_merged.analog_dayschoice[curindex] == NULL) { 00093 analog_days_merged.analog_dayschoice[curindex] = 00094 (tstruct *) malloc(analog_days_merged.ndayschoice[curindex] * sizeof(tstruct)); 00095 // printf("%d %d\n",analog_days_merged.ndayschoice[curindex],analog_days.ndayschoice[t]); 00096 if (analog_days_merged.analog_dayschoice[curindex] == NULL) alloc_error(__FILE__, __LINE__); 00097 } 00098 // printf("%d %d\n",analog_days_merged.ndayschoice[curindex],analog_days.ndayschoice[t]); 00099 if (analog_days_merged.metric_norm[curindex] == NULL) { 00100 analog_days_merged.metric_norm[curindex] = (float *) malloc(analog_days_merged.ndayschoice[curindex] * sizeof(float)); 00101 // printf("%d %d\n",analog_days_merged.ndayschoice[curindex],analog_days.ndayschoice[t]); 00102 if (analog_days_merged.metric_norm[curindex] == NULL) alloc_error(__FILE__, __LINE__); 00103 } 00104 // printf("%d %d\n",analog_days_merged.ndayschoice[curindex],analog_days.ndayschoice[t]); 00105 for (i=0; i<analog_days_merged.ndayschoice[curindex]; i++) { 00106 analog_days_merged.metric_norm[curindex][i] = analog_days.metric_norm[t][i]; 00107 analog_days_merged.analog_dayschoice[curindex][i].year = analog_days.analog_dayschoice[t][i].year; 00108 analog_days_merged.analog_dayschoice[curindex][i].month = analog_days.analog_dayschoice[t][i].month; 00109 analog_days_merged.analog_dayschoice[curindex][i].day = analog_days.analog_dayschoice[t][i].day; 00110 analog_days_merged.analog_dayschoice[curindex][i].hour = analog_days.analog_dayschoice[t][i].hour; 00111 analog_days_merged.analog_dayschoice[curindex][i].min = analog_days.analog_dayschoice[t][i].min; 00112 analog_days_merged.analog_dayschoice[curindex][i].sec = analog_days.analog_dayschoice[t][i].sec; 00113 } 00114 00115 } 00116 00117 /* Success status */ 00118 return 0; 00119 }
int output_downscaled_analog | ( | analog_day_struct | analog_days, | |
double * | delta, | |||
int | output_month_begin, | |||
char * | output_path, | |||
char * | config, | |||
char * | time_units, | |||
char * | cal_type, | |||
double | deltat, | |||
int | file_format, | |||
int | file_compression, | |||
int | file_compression_level, | |||
int | debug, | |||
info_struct * | info, | |||
var_struct * | obs_var, | |||
period_struct * | period, | |||
double * | time_ls, | |||
int | ntime | |||
) |
Read analog day data and write it for downscaled period.
[in] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled. |
[in] | delta | Temperature difference to apply to analog day data |
[in] | output_month_begin | First month for yearly file output |
[in] | output_path | Output path directory |
[in] | config | Whole configuration text |
[in] | time_units | Output base time units |
[in] | cal_type | Output calendar-type |
[in] | deltat | Absolute difference of large-scale temperature threshold to apply as a correction |
[in] | file_format | File format version for NetCDF |
[in] | file_compression | Compression flag for NetCDF-4 file format |
[in] | file_compression_level | Compression level for NetCDF-4 file format |
[in] | debug | Debugging supplemental info (TRUE or FALSE) |
[in] | info | General meta-data information structure for NetCDF output file |
[in] | obs_var | Input/output observation variables data structure |
[in] | period | Period structure for downscaling output |
[in] | time_ls | Time values |
[in] | ntime | Number of times dimension |
Add algorithm configuration
Retrieve temperature change and apply to analog day temperature and other variables
Definition at line 59 of file output_downscaled_analog.c.
References var_struct::acronym, alloc_error(), alt_to_press(), var_struct::altitude, var_struct::altitudename, calc_etp_mf(), info_struct::contact_email, info_struct::contact_name, info_field_struct::coordinates, proj_struct::coords, info_struct::country, create_netcdf(), info_struct::creator_email, info_struct::creator_name, info_struct::creator_url, analog_day_struct::day, time_vect_struct::day, period_struct::day_begin, period_struct::day_end, analog_day_struct::day_s, var_struct::delta, info_struct::description, var_struct::dimcoords, var_struct::dimxname, var_struct::dimyname, info_struct::downscaling_forcing, var_struct::factor, FALSE, proj_struct::false_easting, proj_struct::false_northing, info_field_struct::fillvalue, find_str_value(), var_struct::frequency, get_time_info(), info_field_struct::grid_mapping, proj_struct::grid_mapping_name, handle_netcdf_error(), info_field_struct::height, var_struct::height, time_vect_struct::hour, info_struct::institution, info_struct::institution_model, K_TKELVIN, info_struct::keywords, proj_struct::lat0, proj_struct::latin1, proj_struct::latin2, var_struct::latname, proj_struct::latpole, proj_struct::lonc, info_field_struct::long_name, var_struct::lonname, proj_struct::lonpole, MAXPATH, info_struct::member, time_vect_struct::minutes, info_struct::model, time_vect_struct::month, analog_day_struct::month, var_struct::month_begin, period_struct::month_begin, period_struct::month_end, analog_day_struct::month_s, var_struct::name, proj_struct::name, var_struct::netcdfname, var_struct::nobs_var, info_struct::other_contact_email, info_struct::other_contact_name, var_struct::output, var_struct::path, var_struct::post, info_struct::processor, var_struct::proj, read_netcdf_latlon(), read_netcdf_var_2d(), read_netcdf_var_3d_2d(), read_netcdf_xy(), info_struct::scenario, info_struct::scenario_co2, time_vect_struct::seconds, info_struct::software, spechum_to_hr(), info_struct::summary, info_struct::summary_french, var_struct::template, var_struct::timename, info_struct::timestep, info_struct::title, info_struct::title_french, TRUE, info_field_struct::units, var_struct::units, info_struct::version, write_netcdf_dims_3d(), write_netcdf_var_3d_2d(), time_vect_struct::year, analog_day_struct::year, period_struct::year_begin, var_struct::year_digits, period_struct::year_end, and analog_day_struct::year_s.
Referenced by wt_downscaling().
00064 { 00085 char **infile = NULL; /* Input filename */ 00086 char *infile_alt = NULL; /* Input filename for optional altitudes */ 00087 char **outfile = NULL; /* Output filename */ 00088 char ***outfiles = NULL; /* Output filelist */ 00089 int year1 = 0; /* First year of data input file */ 00090 int year2 = 0; /* End year of data input file */ 00091 double **buf = NULL; /* Temporary data buffer */ 00092 double **bufsave = NULL; /* Temporary data buffer for averaging hourly data */ 00093 double *buftmp = NULL; /* Temporary buffer for mean temperature */ 00094 double *alt = NULL; /* Altitudes of observation points (optional) */ 00095 double *pmsl = NULL; /* Standard Pressure of observation points (optional) */ 00096 double *timeval = NULL; /* Temporary time information buffer */ 00097 double *lat = NULL; /* Temporary latitude buffer */ 00098 double *lon = NULL; /* Temporary longitude buffer */ 00099 double *y = NULL; /* Temporary Y buffer */ 00100 double *x = NULL; /* Temporary X buffer */ 00101 char *cal_type_tmp = NULL; /* Input observations calendar type (udunits) */ 00102 char *time_units_tmp = NULL; /* Input observations time units (udunits) */ 00103 double ctimeval[1]; /* Dummy time info */ 00104 int ntime_file; /* Number of times dimension */ 00105 int ntime_obs; /* Number of times dimension in observation database */ 00106 int nlon; /* Longitude dimension */ 00107 int nlat; /* Latitude dimension */ 00108 int *noutf = NULL; /* Number of files in filelist */ 00109 int found = FALSE; /* Used to tag if we found a specific date */ 00110 int *found_file = NULL; /* Used to tag if we found a specific filename in the filelist */ 00111 int output_month_end; /* Ending month for observation database */ 00112 time_vect_struct *time_s = NULL; /* Time structure for observation database */ 00113 00114 info_field_struct **info_tmp = NULL; /* Temporary field information structure */ 00115 proj_struct *proj_tmp = NULL; /* Temporary field projection structure */ 00116 00117 int tmpi; /* Temporay integer value */ 00118 double curtas; /* Current temperature value */ 00119 double newcurtas; /* New current temperature value */ 00120 char *format = NULL; /* Temporay format string */ 00121 00122 int varid_tas; /* Variable index ID */ 00123 int varid_tasmax; /* Variable index ID */ 00124 int varid_tasmin; /* Variable index ID */ 00125 int varid_prsn; /* Variable index ID */ 00126 int varid_prr; /* Variable index ID */ 00127 int varid_rlds; /* Variable index ID */ 00128 int varid_rsds; /* Variable index ID */ 00129 int varid_hur; /* Variable index ID */ 00130 int varid_hus; /* Variable index ID */ 00131 int varid_husmin; /* Variable index ID */ 00132 int varid_husmax; /* Variable index ID */ 00133 int varid_etp; /* Variable index ID */ 00134 int varid_uvas; /* Variable index ID */ 00135 int varid_prtot; /* Variable index ID */ 00136 00137 int configstrdimid; /* Variable dimension ID for configuration */ 00138 int configstroutid; /* Variable ID for configuration */ 00139 size_t start[1]; /* Start element when writing */ 00140 size_t count[1]; /* Count of elements to write */ 00141 00142 int t; /* Time loop counter */ 00143 int tl; /* Time loop counter */ 00144 int var; /* Variable counter */ 00145 int vare; /* Variable counter */ 00146 int istat; /* Diagnostic status */ 00147 int f; /* Loop counter for files */ 00148 int i; /* Loop counter */ 00149 int j; /* Loop counter */ 00150 00151 double curtime; 00152 00153 int ncoutid; 00154 ut_system *unitSystem = NULL; /* Unit System (udunits) */ 00155 ut_unit *dataunits = NULL; /* udunits variable */ 00156 00157 double period_begin; 00158 double period_end; 00159 00160 int year; 00161 int month; 00162 int day; 00163 int hour; 00164 int minutes; 00165 double seconds; 00166 00167 int yy; 00168 int mm; 00169 int dd; 00170 int hh; 00171 00172 int minh; 00173 int maxh; 00174 00175 char *tmpstr = NULL; 00176 short int tas_correction = TRUE; 00177 00178 /* J F M A M J J A S O N D */ 00179 static int days_per_month_reg_year[] = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }; 00180 00181 infile = (char **) malloc(obs_var->nobs_var * sizeof(char *)); 00182 if (infile == NULL) alloc_error(__FILE__, __LINE__); 00183 outfile = (char **) malloc(obs_var->nobs_var * sizeof(char *)); 00184 if (outfile == NULL) alloc_error(__FILE__, __LINE__); 00185 outfiles = (char ***) malloc(obs_var->nobs_var * sizeof(char **)); 00186 if (outfiles == NULL) alloc_error(__FILE__, __LINE__); 00187 buf = (double **) malloc(obs_var->nobs_var * sizeof(double *)); 00188 if (buf == NULL) alloc_error(__FILE__, __LINE__); 00189 for (i=0; i<obs_var->nobs_var; i++) 00190 buf[i] = NULL; 00191 if ( !strcmp(info->timestep, "daily") && !strcmp(obs_var->frequency, "hourly") ) { 00192 bufsave = (double **) malloc(obs_var->nobs_var * sizeof(double *)); 00193 if (bufsave == NULL) alloc_error(__FILE__, __LINE__); 00194 } 00195 found_file = (int *) malloc(obs_var->nobs_var * sizeof(int)); 00196 if (found_file == NULL) alloc_error(__FILE__, __LINE__); 00197 noutf = (int *) malloc(obs_var->nobs_var * sizeof(int)); 00198 if (noutf == NULL) alloc_error(__FILE__, __LINE__); 00199 info_tmp = (info_field_struct **) malloc(obs_var->nobs_var * sizeof(info_field_struct *)); 00200 if (info_tmp == NULL) alloc_error(__FILE__, __LINE__); 00201 00202 found = FALSE; 00203 for (var=0; var<obs_var->nobs_var; var++) { 00204 infile[var] = (char *) malloc(MAXPATH * sizeof(char)); 00205 if (infile[var] == NULL) alloc_error(__FILE__, __LINE__); 00206 outfile[var] = (char *) malloc(MAXPATH * sizeof(char)); 00207 if (outfile[var] == NULL) alloc_error(__FILE__, __LINE__); 00208 found_file[var] = FALSE; 00209 } 00210 format = (char *) malloc(MAXPATH * sizeof(char)); 00211 if (format == NULL) alloc_error(__FILE__, __LINE__); 00212 00213 if (output_month_begin == 1) 00214 output_month_end = 12; 00215 else 00216 output_month_end = output_month_begin - 1; 00217 00218 if (obs_var->proj->name != NULL) 00219 (void) free(obs_var->proj->name); 00220 obs_var->proj->name = NULL; 00221 00222 /* Initialize udunits */ 00223 ut_set_error_message_handler(ut_ignore); 00224 unitSystem = ut_read_xml(NULL); 00225 ut_set_error_message_handler(ut_write_to_stderr); 00226 dataunits = ut_parse(unitSystem, time_units, UT_ASCII); 00227 00228 /* Read altitudes if available, and compute pressure using standard atmosphere */ 00229 if ( strcmp(obs_var->altitude, "") ) { 00230 infile_alt = (char *) malloc(MAXPATH * sizeof(char)); 00231 if (infile_alt == NULL) alloc_error(__FILE__, __LINE__); 00232 (void) sprintf(infile_alt, "%s/%s", obs_var->path, obs_var->altitude); 00233 istat = read_netcdf_var_2d(&alt, (info_field_struct *) NULL, (proj_struct *) NULL, infile_alt, obs_var->altitudename, 00234 obs_var->dimxname, obs_var->dimyname, &nlon, &nlat, FALSE); 00235 if (istat < 0) 00236 (void) fprintf(stderr, "%s: WARNING: Cannot read observation altitude field in file %s.\n", __FILE__, infile_alt); 00237 else { 00238 pmsl = (double *) malloc(nlon*nlat*sizeof(double)); 00239 if (pmsl == NULL) alloc_error(__FILE__, __LINE__); 00240 (void) alt_to_press(pmsl, alt, nlon, nlat); 00241 } 00242 (void) free(infile_alt); 00243 } 00244 00245 /* Compute time limits for writing */ 00246 if (period->year_begin != -1) { 00247 (void) printf("%s: Downscaling output from %02d/%02d/%04d to %02d/%02d/%04d inclusively.\n", __FILE__, 00248 period->month_begin, period->day_begin, period->year_begin, 00249 period->month_end, period->day_end, period->year_end); 00250 istat = utInvCalendar2(period->year_begin, period->month_begin, period->day_begin, 0, 0, 0.0, dataunits, &period_begin); 00251 istat = utInvCalendar2(period->year_end, period->month_end, period->day_end, 23, 59, 0.0, dataunits, &period_end); 00252 } 00253 else { 00254 istat = utCalendar2(time_ls[0], dataunits, &year, &month, &day, &hour, &minutes, &seconds); 00255 (void) printf("%s: Downscaling whole period: %02d/%02d/%04d", __FILE__, month, day, year); 00256 istat = utCalendar2(time_ls[ntime-1], dataunits, &year, &month, &day, &hour, &minutes, &seconds); 00257 (void) printf(" to %02d/%02d/%04d inclusively.\n", month, day, year); 00258 period_begin = time_ls[0]; 00259 period_end = time_ls[ntime-1]; 00260 } 00261 00262 /* Process each downscaled day */ 00263 for (var=0; var<obs_var->nobs_var; var++) { 00264 noutf[var] = 0; 00265 outfiles[var] = NULL; 00266 } 00267 for (t=0; t<ntime; t++) { 00268 00269 /* Check if we want to write data for this date */ 00270 if (time_ls[t] >= period_begin && time_ls[t] <= period_end) { 00271 00272 /* Create output filename for writing data */ 00273 if (analog_days.month_s[t] < output_month_begin) 00274 year1 = analog_days.year_s[t] - 1; 00275 else 00276 year1 = analog_days.year_s[t]; 00277 if (output_month_begin != 1) 00278 year2 = year1 + 1; 00279 else 00280 year2 = year1; 00281 /* Process each variable and create output filenames, and output files if necessary */ 00282 for (var=0; var<obs_var->nobs_var; var++) { 00283 /* Example: evapn_1d_19790801_19800731.nc */ 00284 (void) sprintf(outfile[var], "%s/%s_1d_%04d%02d%02d_%04d%02d%02d.nc", output_path, obs_var->netcdfname[var], 00285 year1, output_month_begin, 1, 00286 year2, output_month_end, days_per_month_reg_year[output_month_end-1]); 00287 /* Check if output file has already been created */ 00288 found_file[var] = FALSE; 00289 f = 0; 00290 while (f < noutf[var] && found_file[var] == FALSE) { 00291 if ( !strcmp(outfiles[var][f], outfile[var]) ) { 00292 found_file[var] = TRUE; 00293 break; 00294 } 00295 f++; 00296 } 00297 if (found_file[var] == FALSE) { 00298 00299 if ( !strcmp(obs_var->output[var], "yes") ) { 00300 00301 /* File was not created already by this algorithm run */ 00302 outfiles[var] = (char **) realloc(outfiles[var], (noutf[var]+1) * sizeof(char *)); 00303 if (outfiles[var] == NULL) alloc_error(__FILE__, __LINE__); 00304 outfiles[var][noutf[var]++] = strdup(outfile[var]); 00305 00306 /* Verify if file exists and if we can write into it */ 00307 istat = nc_open(outfile[var], NC_WRITE, &ncoutid); 00308 00309 if (istat != NC_NOERR) { 00310 /* File does not exists */ 00311 00312 /* Create output file */ 00313 istat = create_netcdf(info->title, info->title_french, info->summary, info->summary_french, 00314 info->keywords, info->processor, info->software, 00315 info->description, info->institution, 00316 info->creator_email, info->creator_url, info->creator_name, 00317 info->version, info->scenario, info->scenario_co2, info->model, 00318 info->institution_model, info->country, info->member, 00319 info->downscaling_forcing, info->contact_email, info->contact_name, 00320 info->other_contact_email, info->other_contact_name, 00321 outfile[var], TRUE, file_format, file_compression); 00322 if (istat != 0) { 00323 /* In case of failure */ 00324 (void) free(outfile[var]); 00325 for (f=0; f<noutf[var]; f++) 00326 (void) free(outfiles[var][f]); 00327 if (noutf[var] > 0) 00328 (void) free(outfiles[var]); 00329 if (pmsl != NULL) (void) free(pmsl); 00330 if (alt != NULL) (void) free(alt); 00331 (void) ut_free(dataunits); 00332 (void) ut_free_system(unitSystem); 00333 return istat; 00334 } 00335 00338 istat = nc_open(outfile[var], NC_WRITE, &ncoutid); /* open NetCDF file */ 00339 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00340 00341 /* Go into redefine mode */ 00342 istat = nc_redef(ncoutid); 00343 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00344 00345 /* Define configuration */ 00346 istat = nc_def_dim(ncoutid, "configstr", strlen(config)+1, &configstrdimid); 00347 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00348 istat = nc_def_var(ncoutid, "dsclim_configuration", NC_CHAR, 1, &configstrdimid, &configstroutid); 00349 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00350 00351 /* Update also time_coverage_end and time_coverage_start global attribute */ 00352 tmpstr = (char *) malloc(MAXPATH * sizeof(char)); 00353 if (tmpstr == NULL) alloc_error(__FILE__, __LINE__); 00354 hour=0; 00355 minutes=0; 00356 seconds=0; 00357 (void) sprintf(tmpstr, "%04d-%02d-%02dT%02d:%02d:%02dZ", year1, output_month_begin, 1, hour, minutes, (int) seconds); 00358 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "time_coverage_start", strlen(tmpstr), tmpstr); 00359 hour=23; 00360 minutes=59; 00361 seconds=59; 00362 (void) sprintf(tmpstr, "%04d-%02d-%02dT%02d:%02d:%02dZ", year2, output_month_end, days_per_month_reg_year[output_month_end-1], 00363 hour, minutes, (int) seconds); 00364 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "time_coverage_end", strlen(tmpstr), tmpstr); 00365 (void) free(tmpstr); 00366 00367 /* End definition mode */ 00368 istat = nc_enddef(ncoutid); 00369 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00370 00371 /* Write configuration */ 00372 start[0] = 0; 00373 count[0] = strlen(config) + 1; 00374 istat = nc_put_vara_text(ncoutid, configstroutid, start, count, config); 00375 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00376 } 00377 else 00378 found_file[var] = TRUE; 00379 00380 /* Close the output netCDF file. */ 00381 istat = ncclose(ncoutid); 00382 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00383 } 00384 } 00385 } 00386 00387 /* Create input filename for reading data */ 00388 (void) strcpy(format, "%s/%s/"); 00389 (void) strcat(format, obs_var->template); 00390 if (obs_var->month_begin != 1) { 00391 /* Months in observation files *does not* begin in January: must have 2 years in filename */ 00392 if (analog_days.month[t] < obs_var->month_begin) 00393 year1 = analog_days.year[t] - 1; 00394 else 00395 year1 = analog_days.year[t]; 00396 year2 = year1 + 1; 00397 if (obs_var->year_digits == 4) 00398 /* Process each variable and create input filenames */ 00399 for (var=0; var<obs_var->nobs_var; var++) 00400 (void) sprintf(infile[var], format, obs_var->path, obs_var->frequency, 00401 obs_var->acronym[var], year1, year2); 00402 else { 00403 tmpi = year1 / 100; 00404 year1 = year1 - (tmpi*100); 00405 tmpi = year2 / 100; 00406 year2 = year2 - (tmpi*100); 00407 /* Process each variable and create input filenames */ 00408 for (var=0; var<obs_var->nobs_var; var++) 00409 (void) sprintf(infile[var], format, obs_var->path, obs_var->frequency, 00410 obs_var->acronym[var], year1, year2); 00411 } 00412 } 00413 else { 00414 /* Months in observation files begins in January: must have 1 year in filename */ 00415 year1 = analog_days.year[t]; 00416 if (obs_var->year_digits == 4) 00417 /* Process each variable and create input filenames */ 00418 for (var=0; var<obs_var->nobs_var; var++) 00419 (void) sprintf(infile[var], format, obs_var->path, obs_var->frequency, 00420 obs_var->acronym[var], year1); 00421 else { 00422 tmpi = year1 / 100; 00423 year1 = year1 - (tmpi*100); 00424 /* Process each variable and create input filenames */ 00425 for (var=0; var<obs_var->nobs_var; var++) 00426 (void) sprintf(infile[var], format, obs_var->path, obs_var->frequency, 00427 obs_var->acronym[var], year1); 00428 } 00429 } 00430 00431 /* Get time information for first input observation file and assume all files are alike */ 00432 time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 00433 if (time_s == NULL) alloc_error(__FILE__, __LINE__); 00434 istat = get_time_info(time_s, &timeval, &time_units_tmp, &cal_type_tmp, &ntime_obs, infile[0], obs_var->timename, FALSE); 00435 (void) free(cal_type_tmp); 00436 (void) free(time_units_tmp); 00437 (void) free(timeval); 00438 if (istat < 0) { 00439 for (var=0; var<obs_var->nobs_var; var++) { 00440 (void) free(outfile[var]); 00441 for (f=0; f<noutf[var]; f++) { 00442 (void) free(outfiles[var][f]); 00443 } 00444 if (noutf[var] > 0) { 00445 (void) free(outfiles[var]); 00446 } 00447 } 00448 (void) free(time_s); 00449 if (pmsl != NULL) (void) free(pmsl); 00450 if (alt != NULL) (void) free(alt); 00451 (void) ut_free(dataunits); 00452 (void) ut_free_system(unitSystem); 00453 return istat; 00454 } 00455 00456 /* Find date in observation database */ 00457 #if DEBUG > 7 00458 (void) printf("Processing %d %d %d %d\n",t,analog_days.year_s[t],analog_days.month_s[t],analog_days.day_s[t]); 00459 #endif 00460 00461 if ( !strcmp(obs_var->frequency, "hourly") ) { 00462 /* For hourly frequency data, find hours from 0 to 23 */ 00463 minh = 0; 00464 maxh = 23; 00465 } 00466 else { 00467 /* For daily data, only read data for one day */ 00468 minh = 0; 00469 maxh = 0; 00470 } 00471 00472 /* Loop over hours if needed */ 00473 for (hour=minh; hour<=maxh; hour++) { 00474 found = FALSE; 00475 tl = 0; 00476 00477 if ( !strcmp(obs_var->frequency, "hourly") ) { 00478 while (tl<ntime_obs && found == FALSE) { 00479 #if DEBUG > 7 00480 (void) printf("%d %d %d %d %d\n",tl,time_s->year[tl],time_s->month[tl],time_s->day[tl],time_s->hour[tl]); 00481 #endif 00482 if (analog_days.year[t] == time_s->year[tl] && analog_days.month[t] == time_s->month[tl] && 00483 analog_days.day[t] == time_s->day[tl] && hour == time_s->hour[tl]) { 00484 found = TRUE; 00485 #if DEBUG > 7 00486 (void) printf("Found analog %d %d %d %d\n",tl,analog_days.year[t],analog_days.month[t],analog_days.day[t]); 00487 #endif 00488 } 00489 tl++; 00490 } 00491 } 00492 else { 00493 while (tl<ntime_obs && found == FALSE) { 00494 #if DEBUG > 7 00495 (void) printf("%d %d %d %d\n",tl,time_s->year[tl],time_s->month[tl],time_s->day[tl]); 00496 #endif 00497 if (analog_days.year[t] == time_s->year[tl] && analog_days.month[t] == time_s->month[tl] && 00498 analog_days.day[t] == time_s->day[tl]) { 00499 found = TRUE; 00500 #if DEBUG > 7 00501 (void) printf("Found analog %d %d %d %d\n",tl,analog_days.year[t],analog_days.month[t],analog_days.day[t]); 00502 #endif 00503 } 00504 tl++; 00505 } 00506 } 00507 00508 if (found == TRUE) { 00509 00510 tl--; 00511 00512 proj_tmp = (proj_struct *) malloc(sizeof(proj_struct)); 00513 if (proj_tmp == NULL) alloc_error(__FILE__, __LINE__); 00514 proj_tmp->name = NULL; 00515 proj_tmp->grid_mapping_name = NULL; 00516 00517 /* Process each variable and read data */ 00518 for (var=0; var<obs_var->nobs_var; var++) { 00519 info_tmp[var] = (info_field_struct *) malloc(sizeof(info_field_struct)); 00520 if (info_tmp[var] == NULL) alloc_error(__FILE__, __LINE__); 00521 /* Don't read variables which will be calculated : read only variables already available in datafiles */ 00522 if ( !strcmp(obs_var->post[var], "no") ) { 00523 if (proj_tmp->name != NULL) { 00524 (void) free(proj_tmp->name); 00525 proj_tmp->name = NULL; 00526 } 00527 if (proj_tmp->grid_mapping_name != NULL) { 00528 (void) free(proj_tmp->grid_mapping_name); 00529 proj_tmp->grid_mapping_name = NULL; 00530 } 00531 istat = read_netcdf_var_3d_2d(&(buf[var]), info_tmp[var], proj_tmp, infile[var], obs_var->acronym[var], 00532 obs_var->dimxname, obs_var->dimyname, obs_var->timename, 00533 tl, &nlon, &nlat, &ntime_file, debug); 00534 /* Apply factor and delta */ 00535 for (j=0; j<nlat; j++) 00536 for (i=0; i<nlon; i++) 00537 buf[var][i+j*nlon] = (buf[var][i+j*nlon] * obs_var->factor[var]) + obs_var->delta[var]; 00538 /* Overwrite units and height if it was specified in configuration file. In that case, the value is not unknown. */ 00539 if ( strcmp(obs_var->units[var], "unknown")) { 00540 (void) free(info_tmp[var]->units); 00541 info_tmp[var]->units = strdup(obs_var->units[var]); 00542 } 00543 if ( strcmp(obs_var->height[var], "unknown")) { 00544 (void) free(info_tmp[var]->height); 00545 info_tmp[var]->height = strdup(obs_var->height[var]); 00546 } 00547 } 00548 else { 00549 /* For post-processing variables, must fill in the info field structure info_tmp. 00550 The projection structure proj_tmp used is the one of the previous variable, 00551 because the first variable in the list is enforced to be a non post-processing variable 00552 when loading the configuration file. */ 00553 info_tmp[var]->fillvalue = info_tmp[0]->fillvalue; 00554 info_tmp[var]->coordinates = strdup(info_tmp[0]->coordinates); 00555 info_tmp[var]->grid_mapping = strdup(info_tmp[0]->grid_mapping); 00556 info_tmp[var]->units = strdup(obs_var->units[var]); 00557 info_tmp[var]->height = strdup(obs_var->height[var]); 00558 info_tmp[var]->long_name = strdup(obs_var->name[var]); 00559 } 00560 } 00561 00562 if (obs_var->proj->name == NULL) { 00563 /* Retrieve observation grid parameters if not done already */ 00564 obs_var->proj->name = strdup(proj_tmp->name); 00565 obs_var->proj->grid_mapping_name = strdup(proj_tmp->grid_mapping_name); 00566 obs_var->proj->latin1 = proj_tmp->latin1; 00567 obs_var->proj->latin2 = proj_tmp->latin2; 00568 obs_var->proj->lonc = proj_tmp->lonc; 00569 obs_var->proj->lat0 = proj_tmp->lat0; 00570 obs_var->proj->false_easting = proj_tmp->false_easting; 00571 obs_var->proj->false_northing = proj_tmp->false_northing; 00572 00573 /* Get latitude and longitude coordinates information from first file */ 00574 istat = read_netcdf_latlon(&lon, &lat, &nlon, &nlat, obs_var->dimcoords, obs_var->proj->coords, 00575 obs_var->proj->name, obs_var->lonname, 00576 obs_var->latname, obs_var->dimxname, 00577 obs_var->dimyname, infile[0]); 00578 if ( !strcmp(obs_var->proj->name, "list") ) 00579 /* List of lat + lon points only : keep only X dimension */ 00580 nlat = 0; 00581 else 00582 /* Read coordinates information */ 00583 istat = read_netcdf_xy(&x, &y, &nlon, &nlat, obs_var->dimxname, obs_var->dimyname, 00584 obs_var->dimxname, obs_var->dimyname, infile[0]); 00585 } 00586 00587 /*** Apply modifications to data ***/ 00590 /* Find known variable IDs for correction or calculation */ 00591 varid_tas = find_str_value("tas", obs_var->netcdfname, obs_var->nobs_var); 00592 varid_tasmin = find_str_value("tasmin", obs_var->netcdfname, obs_var->nobs_var); 00593 varid_tasmax = find_str_value("tasmax", obs_var->netcdfname, obs_var->nobs_var); 00594 varid_prsn = find_str_value("prsn", obs_var->netcdfname, obs_var->nobs_var); 00595 varid_prr = find_str_value("prr", obs_var->netcdfname, obs_var->nobs_var); 00596 varid_rlds = find_str_value("rlds", obs_var->netcdfname, obs_var->nobs_var); 00597 varid_rsds = find_str_value("rsds", obs_var->netcdfname, obs_var->nobs_var); 00598 varid_hus = find_str_value("hus", obs_var->netcdfname, obs_var->nobs_var); 00599 varid_husmin = find_str_value("husmin", obs_var->netcdfname, obs_var->nobs_var); 00600 varid_husmax = find_str_value("husmax", obs_var->netcdfname, obs_var->nobs_var); 00601 varid_uvas = find_str_value("uvas", obs_var->netcdfname, obs_var->nobs_var); 00602 varid_hur = find_str_value("hur", obs_var->netcdfname, obs_var->nobs_var); 00603 varid_etp = find_str_value("evapn", obs_var->netcdfname, obs_var->nobs_var); 00604 varid_prtot = find_str_value("prtot", obs_var->netcdfname, obs_var->nobs_var); 00605 00606 tas_correction = TRUE; 00607 00608 if ( (varid_tasmax >= 0 || varid_tasmin >= 0 || varid_husmin >= 0 || varid_husmax >= 0) && !strcmp(obs_var->frequency, "hourly")) { 00609 (void) fprintf(stderr, "%s: WARNING: Cannot mix min and/or max observation variables with hourly data! Min and/or max variables will be ignored! \n", __FILE__); 00610 varid_tasmin = -1; 00611 varid_tasmax = -1; 00612 varid_husmin = -1; 00613 varid_husmax = -1; 00614 } 00615 00616 if ( !strcmp(obs_var->frequency, "daily") ) { 00617 if (varid_tas < 0 && ( varid_tasmin < 0 || varid_tasmax < 0 ) ) 00618 tas_correction = FALSE; 00619 } 00620 else { 00621 if (varid_tas < 0) 00622 tas_correction = FALSE; 00623 } 00624 00625 // (void) fprintf(stderr, "%s: WARNING: No temperature correction can be done to precipitation partition or infra-red radiation required temperature variables are not available! It needs at least either average daily or hourly temperature, or, with daily data, min and max temperatures.\n", __FILE__); 00626 00627 /* Correct average temperature and related variables (precipitation partition, infra-red radiation) */ 00628 if (varid_tas >= 0 && tas_correction == TRUE) { 00629 if (fabs(delta[t]) >= deltat) 00630 for (j=0; j<nlat; j++) 00631 for (i=0; i<nlon; i++) 00632 00633 if (buf[varid_tas][i+j*nlon] != info_tmp[varid_tas]->fillvalue) { 00634 00635 /* Save non-corrected temperature */ 00636 curtas = buf[varid_tas][i+j*nlon]; 00637 /* Compute new temperature */ 00638 buf[varid_tas][i+j*nlon] += delta[t]; 00639 00640 /* Compute new rain/snow partition, if needed */ 00641 if (varid_prsn != -1 && varid_prr != -1) 00642 if (buf[varid_prsn][i+j*nlon] != info_tmp[varid_prsn]->fillvalue && 00643 buf[varid_prr][i+j*nlon] != info_tmp[varid_prr]->fillvalue) 00644 if ( buf[varid_tas][i+j*nlon] >= (K_TKELVIN + 1.5) ) { 00645 buf[varid_prr][i+j*nlon] += buf[varid_prsn][i+j*nlon]; 00646 buf[varid_prsn][i+j*nlon] = 0.0; 00647 } 00648 00649 /* Compute new infra-red radiation, if needed */ 00650 if (varid_rlds != -1) 00651 if (buf[varid_rlds][i+j*nlon] != info_tmp[varid_rlds]->fillvalue) 00652 buf[varid_rlds][i+j*nlon] += (4.0 * delta[t] / curtas ) * buf[varid_rlds][i+j*nlon]; 00653 00654 } 00655 } 00656 00657 /* Correct min and max temperatures and related variables when having daily data */ 00658 if (varid_tasmax >= 0 && varid_tasmin >= 0 && tas_correction == TRUE && !strcmp(obs_var->frequency, "daily")) { 00659 if (fabs(delta[t]) >= deltat) 00660 for (j=0; j<nlat; j++) 00661 for (i=0; i<nlon; i++) 00662 00663 if (buf[varid_tasmax][i+j*nlon] != info_tmp[varid_tasmax]->fillvalue) { 00664 00665 /* Save non-corrected mean temperature */ 00666 curtas = (buf[varid_tasmax][i+j*nlon] + buf[varid_tasmin][i+j*nlon]) / 2.0; 00667 /* Compute new temperature */ 00668 buf[varid_tasmax][i+j*nlon] += delta[t]; 00669 buf[varid_tasmin][i+j*nlon] += delta[t]; 00670 /* New averaged temperature */ 00671 newcurtas = (buf[varid_tasmax][i+j*nlon] + buf[varid_tasmin][i+j*nlon]) / 2.0; 00672 00673 /* Do not perform correction twice! */ 00674 if (varid_tas < 0) { 00675 /* Compute new rain/snow partition, if needed */ 00676 if (varid_prsn != -1 && varid_prr != -1) 00677 if (buf[varid_prsn][i+j*nlon] != info_tmp[varid_prsn]->fillvalue && 00678 buf[varid_prr][i+j*nlon] != info_tmp[varid_prr]->fillvalue) 00679 if ( newcurtas >= (K_TKELVIN + 1.5) ) { 00680 buf[varid_prr][i+j*nlon] += buf[varid_prsn][i+j*nlon]; 00681 buf[varid_prsn][i+j*nlon] = 0.0; 00682 } 00683 00684 /* Compute new infra-red radiation, if needed */ 00685 if (varid_rlds != -1) 00686 if (buf[varid_rlds][i+j*nlon] != info_tmp[varid_rlds]->fillvalue) 00687 buf[varid_rlds][i+j*nlon] += (4.0 * delta[t] / curtas ) * buf[varid_rlds][i+j*nlon]; 00688 } 00689 } 00690 } 00691 00692 /* Calculate only known post-processed variables */ 00693 00694 if (varid_hur >= 0) { 00695 /* Relative humidity */ 00696 if ( !strcmp(obs_var->post[varid_hur], "yes") ) { 00697 if ( varid_hus >= 0 && (varid_tas >= 0 || (varid_tasmax >= 0 && varid_tasmin >= 0 )) && pmsl != NULL ) { 00698 /* Calculate relative humidity from temperature and specific humidity */ 00699 buf[varid_hur] = (double *) malloc(nlat*nlon * sizeof(double)); 00700 if (buf[varid_hur] == NULL) alloc_error(__FILE__, __LINE__); 00701 if (varid_tas >= 0) 00702 info_tmp[varid_hur]->fillvalue = info_tmp[varid_tas]->fillvalue; 00703 else if (varid_tasmax >= 0) 00704 info_tmp[varid_hur]->fillvalue = info_tmp[varid_tasmax]->fillvalue; 00705 else 00706 info_tmp[varid_hur]->fillvalue = -9999.0; 00707 /* Create mean temperature temporary matrix when having only min and max temperature */ 00708 if (varid_tas < 0) { 00709 buftmp = (double *) malloc(nlat*nlon* sizeof(double)); 00710 if (buftmp == NULL) alloc_error(__FILE__, __LINE__); 00711 info_tmp[varid_tas]->fillvalue = info_tmp[varid_tasmax]->fillvalue; 00712 for (i=0; i<(nlon*nlat); i++) 00713 if ((buf[varid_tasmax][i] != info_tmp[varid_tasmax]->fillvalue) && 00714 (buf[varid_tasmin][i] != info_tmp[varid_tasmin]->fillvalue)) 00715 buftmp[i] = (buf[varid_tasmax][i] + buf[varid_tasmin][i]) / 2.0; 00716 else 00717 buftmp[i] = info_tmp[varid_tas]->fillvalue; 00718 } 00719 else 00720 buftmp = buf[varid_tas]; 00721 (void) spechum_to_hr(buf[varid_hur], buftmp, buf[varid_hus], pmsl, info_tmp[varid_hur]->fillvalue, nlon, nlat); 00722 if (varid_tas < 0) 00723 (void) free(buftmp); 00724 } 00725 else { 00726 (void) fprintf(stderr, "%s: WARNING: Cannot calculate Relative Humidity because needed variables are not available: Specific Humidity; Averaged temperature or Min/Max temperature, Standard Pressure from altitude.\n", __FILE__); 00727 buf[varid_hur] = NULL; 00728 } 00729 } 00730 } 00731 00732 if (varid_prsn >= 0 && varid_prr >= 0 && varid_prtot >= 0) { 00733 /* Total precipitation */ 00734 if ( !strcmp(obs_var->post[varid_prtot], "yes") ) { 00735 /* Calculate total precipitation from liquid and solid precipitation */ 00736 buf[varid_prtot] = (double *) malloc(nlat*nlon * sizeof(double)); 00737 if (buf[varid_prtot] == NULL) alloc_error(__FILE__, __LINE__); 00738 info_tmp[varid_prtot]->fillvalue = info_tmp[varid_prr]->fillvalue; 00739 for (i=0; i<(nlon*nlat); i++) { 00740 if ( (buf[varid_prr][i] != info_tmp[varid_prr]->fillvalue) && (buf[varid_prsn][i] != info_tmp[varid_prsn]->fillvalue)) 00741 buf[varid_prtot][i] = buf[varid_prr][i] + buf[varid_prsn][i]; 00742 else 00743 buf[varid_prtot][i] = info_tmp[varid_prtot]->fillvalue; 00744 } 00745 } 00746 else { 00747 (void) fprintf(stderr, "%s: WARNING: Cannot calculate Total Precipitation because needed variables are not available: Liquid and Solid Precipitation.\n", __FILE__); 00748 buf[varid_prtot] = NULL; 00749 } 00750 } 00751 00752 if (varid_etp >= 0) { 00753 /* ETP */ 00754 if ( !strcmp(obs_var->post[varid_etp], "yes") ) { 00755 if ( varid_hus >= 0 && (varid_tas >= 0 || (varid_tasmax >= 0 && varid_tasmin >= 0 )) && varid_rsds >= 0 && varid_rlds >= 0 && 00756 varid_uvas >= 0 && pmsl != NULL ) { 00757 /* Calculate ETP */ 00758 buf[varid_etp] = (double *) malloc(nlat*nlon * sizeof(double)); 00759 if (buf[varid_etp] == NULL) alloc_error(__FILE__, __LINE__); 00760 if (varid_tas >= 0) 00761 info_tmp[varid_etp]->fillvalue = info_tmp[varid_tas]->fillvalue; 00762 else if (varid_tasmax >= 0) 00763 info_tmp[varid_etp]->fillvalue = info_tmp[varid_tasmax]->fillvalue; 00764 else 00765 info_tmp[varid_etp]->fillvalue = -9999.0; 00766 /* Create mean temperature temporary matrix when having only min and max temperature */ 00767 if (varid_tas < 0) { 00768 buftmp = (double *) malloc(nlat*nlon* sizeof(double)); 00769 if (buftmp == NULL) alloc_error(__FILE__, __LINE__); 00770 for (i=0; i<(nlon*nlat); i++) { 00771 if ((buf[varid_tasmax][i] != info_tmp[varid_tasmax]->fillvalue) && 00772 (buf[varid_tasmin][i] != info_tmp[varid_tasmin]->fillvalue)) 00773 buftmp[i] = (buf[varid_tasmax][i] + buf[varid_tasmin][i]) / 2.0; 00774 else 00775 buftmp[i] = info_tmp[varid_tasmax]->fillvalue; 00776 } 00777 } 00778 else 00779 buftmp = buf[varid_tas]; 00780 (void) calc_etp_mf(buf[varid_etp], buftmp, buf[varid_hus], buf[varid_rsds], buf[varid_rlds], buf[varid_uvas], 00781 pmsl, info_tmp[varid_etp]->fillvalue, nlon, nlat); 00782 if (varid_tas < 0) 00783 (void) free(buftmp); 00784 } 00785 else { 00786 (void) fprintf(stderr, "%s: WARNING: Cannot calculate ETP because needed variables are not available: Specific Humidity; Averaged Temperature or Min/Max Temperature; Short and Long Wave Radiation; Wind Module, Standard Pressure from altitude.\n", __FILE__); 00787 buf[varid_etp] = NULL; 00788 } 00789 } 00790 } 00791 00792 /* Process each variable for writing */ 00793 for (var=0; var<obs_var->nobs_var; var++) { 00794 if ( !strcmp(obs_var->output[var], "yes") ) { 00795 /* Write dimensions of field in newly-created NetCDF output file */ 00796 if (found_file[var] == FALSE && hour == minh && buf[var] != NULL) { 00797 /* We just created output file: we need to write dimensions */ 00798 ctimeval[0] = time_ls[t]; 00799 istat = write_netcdf_dims_3d(lon, lat, x, y, alt, ctimeval, cal_type, 00800 time_units, nlon, nlat, 0, 00801 info->timestep, obs_var->proj->name, obs_var->proj->coords, 00802 obs_var->proj->grid_mapping_name, obs_var->proj->latin1, 00803 obs_var->proj->latin2, obs_var->proj->lonc, obs_var->proj->lat0, 00804 obs_var->proj->false_easting, obs_var->proj->false_northing, 00805 obs_var->proj->lonpole, obs_var->proj->latpole, 00806 obs_var->lonname, obs_var->latname, obs_var->timename, 00807 outfile[var], debug); 00808 if (istat != 0) { 00809 /* In case of failure */ 00810 (void) free(time_s->year); 00811 (void) free(time_s->month); 00812 (void) free(time_s->day); 00813 (void) free(time_s->hour); 00814 (void) free(time_s->minutes); 00815 (void) free(time_s->seconds); 00816 00817 (void) free(time_s); 00818 00819 (void) free(infile[var]); 00820 (void) free(outfile[var]); 00821 (void) free(info_tmp[var]->grid_mapping); 00822 (void) free(info_tmp[var]->units); 00823 (void) free(info_tmp[var]->height); 00824 (void) free(info_tmp[var]->coordinates); 00825 (void) free(info_tmp[var]->long_name); 00826 (void) free(info_tmp[var]); 00827 (void) free(proj_tmp->name); 00828 (void) free(proj_tmp->grid_mapping_name); 00829 (void) free(proj_tmp); 00830 for (f=0; f<noutf[var]; f++) 00831 (void) free(outfiles[var][f]); 00832 if (noutf[var] > 0) 00833 (void) free(outfiles[var]); 00834 (void) free(outfiles); 00835 if (pmsl != NULL) (void) free(pmsl); 00836 if (alt != NULL) (void) free(alt); 00837 (void) ut_free(dataunits); 00838 (void) ut_free_system(unitSystem); 00839 return istat; 00840 } 00841 } 00842 } 00843 } 00844 00845 /* Compute time if output timestep is hourly and not daily */ 00846 if ( !strcmp(info->timestep, "hourly") ) { 00847 istat = utCalendar2(time_ls[t], dataunits, &yy, &mm, &dd, &hh, &minutes, &seconds); 00848 istat = utInvCalendar2(yy, mm, dd, hour, 0, 0.0, dataunits, &curtime); 00849 } 00850 else 00851 curtime = time_ls[t]; 00852 00853 /* Process each variable */ 00854 for (var=0; var<obs_var->nobs_var; var++) { 00855 if (buf[var] != NULL && !strcmp(obs_var->output[var], "yes")) { 00856 if ( !strcmp(info->timestep, obs_var->frequency) ) { 00857 /* Output and input data are at same frequency */ 00858 if (found_file[var] == FALSE && hour == minh) 00859 (void) fprintf(stderr, "%s: Writing data to %s\n", __FILE__, outfile[var]); 00860 /* Write data */ 00861 istat = write_netcdf_var_3d_2d(buf[var], &curtime, info_tmp[var]->fillvalue, outfile[var], obs_var->netcdfname[var], 00862 info_tmp[var]->long_name, info_tmp[var]->units, info_tmp[var]->height, proj_tmp->name, 00863 obs_var->dimxname, obs_var->dimyname, obs_var->timename, 00864 0, !(found_file[var]), file_format, file_compression_level, 00865 nlon, nlat, ntime_file, debug); 00866 found_file[var] = TRUE; 00867 } 00868 else if ( !strcmp(info->timestep, "daily") && !strcmp(obs_var->frequency, "hourly") ) { 00869 if (hour == maxh) { 00870 /* Last hour of day */ 00871 for (i=0; i<nlon*nlat; i++) 00872 /* Average data */ 00873 buf[var][i] = (bufsave[var][i] + buf[var][i]) / 24.0; 00874 /* Free memory */ 00875 (void) free(bufsave[var]); 00876 bufsave[var] = NULL; 00877 if (found_file[var] == FALSE && hour == minh) 00878 (void) fprintf(stderr, "%s: Writing data to %s\n",__FILE__, outfile[var]); 00879 /* Write data */ 00880 istat = write_netcdf_var_3d_2d(buf[var], &curtime, info_tmp[var]->fillvalue, outfile[var], obs_var->netcdfname[var], 00881 info_tmp[var]->long_name, info_tmp[var]->units, info_tmp[var]->height, proj_tmp->name, 00882 obs_var->dimxname, obs_var->dimyname, obs_var->timename, 00883 0, !(found_file[var]), file_format, file_compression_level, 00884 nlon, nlat, ntime_file, debug); 00885 found_file[var] = TRUE; 00886 } 00887 else { 00888 /* Allocate memory if first hour accumulating */ 00889 if (bufsave[var] == NULL) { 00890 bufsave[var] = (double *) calloc(nlat*nlon, sizeof(double)); 00891 if (bufsave[var] == NULL) alloc_error(__FILE__, __LINE__); 00892 } 00893 /* Accumulate data to compute average when input data is hourly and output is daily */ 00894 for (i=0; i<nlon*nlat; i++) 00895 bufsave[var][i] += buf[var][i]; 00896 } 00897 } 00898 else { 00899 (void) fprintf(stderr, "%s: Fatal error in configuration of output timestep and observation variables frequency! Output timestep = %s Observation variables frequency = %s\n", __FILE__, info->timestep, obs_var->frequency); 00900 00901 /* Fatal error */ 00902 if (buf[var] != NULL) (void) free(buf[var]); 00903 (void) free(info_tmp[var]->grid_mapping); 00904 (void) free(info_tmp[var]->units); 00905 (void) free(info_tmp[var]->height); 00906 (void) free(info_tmp[var]->coordinates); 00907 (void) free(info_tmp[var]->long_name); 00908 (void) free(info_tmp[var]); 00909 00910 for (vare=0; vare<=var; vare++) { 00911 for (f=0; f<noutf[vare]; f++) 00912 (void) free(outfiles[vare][f]); 00913 if (noutf[vare] > 0) 00914 (void) free(outfiles[vare]); 00915 (void) free(infile[vare]); 00916 (void) free(outfile[vare]); 00917 } 00918 (void) free(outfiles); 00919 (void) free(noutf); 00920 (void) free(found_file); 00921 (void) free(info_tmp); 00922 (void) free(buf); 00923 00924 (void) free(lat); 00925 (void) free(lon); 00926 00927 (void) free(x); 00928 (void) free(y); 00929 00930 (void) free(format); 00931 00932 (void) free(proj_tmp->name); 00933 (void) free(proj_tmp->grid_mapping_name); 00934 (void) free(proj_tmp); 00935 00936 (void) free(time_s->year); 00937 (void) free(time_s->month); 00938 (void) free(time_s->day); 00939 (void) free(time_s->hour); 00940 (void) free(time_s->minutes); 00941 (void) free(time_s->seconds); 00942 00943 (void) free(time_s); 00944 00945 if (alt != NULL) (void) free(alt); 00946 00947 (void) ut_free(dataunits); 00948 (void) ut_free_system(unitSystem); 00949 00950 return -3; 00951 } 00952 } 00953 00954 /* Free allocated memory */ 00955 if (buf[var] != NULL) (void) free(buf[var]); 00956 (void) free(info_tmp[var]->grid_mapping); 00957 (void) free(info_tmp[var]->units); 00958 (void) free(info_tmp[var]->height); 00959 (void) free(info_tmp[var]->coordinates); 00960 (void) free(info_tmp[var]->long_name); 00961 (void) free(info_tmp[var]); 00962 } 00963 00964 /* Free allocated memory */ 00965 (void) free(proj_tmp->name); 00966 (void) free(proj_tmp->grid_mapping_name); 00967 (void) free(proj_tmp); 00968 } 00969 else { 00970 //output_downscaled_analog.c: Fatal error in algorithm: analog date 3276 2000 12 31 19 not found in database!! 00971 //output_downscaled_analog.c: Writing data to /home/globc/page/downscaling_v2/data/results/scratch2010/hourly/arpege/arpege_ref/uvas_1d_19820101_19821231.nc 00972 //output_downscaled_analog.c: Fatal error in algorithm: analog date 12050 2000 12 31 19 not found in database!! 00973 if ( !strcmp(obs_var->frequency, "hourly") ) { 00974 (void) fprintf(stderr, "%s: Fatal error in algorithm: analog date %d %d %d %d %d not found in database!!\n", __FILE__, t, 00975 analog_days.year[t],analog_days.month[t],analog_days.day[t],hour); 00976 minh = 0; 00977 maxh = 23; 00978 for (hour=minh; hour<=maxh; hour++) { 00979 found = FALSE; 00980 tl = 0; 00981 while (tl<ntime_obs && found == FALSE) { 00982 (void) printf("%d %d %d %d %d\n",tl,time_s->year[tl],time_s->month[tl],time_s->day[tl],time_s->hour[tl]); 00983 if (analog_days.year[t] == time_s->year[tl] && analog_days.month[t] == time_s->month[tl] && 00984 analog_days.day[t] == time_s->day[tl] && hour == time_s->hour[tl]) { 00985 found = TRUE; 00986 (void) printf("Found analog %d %d %d %d\n",tl,analog_days.year[t],analog_days.month[t],analog_days.day[t]); 00987 } 00988 tl++; 00989 } 00990 } 00991 } 00992 else 00993 (void) fprintf(stderr, "%s: Fatal error in algorithm: analog date %d %d %d %d not found in database!!\n", __FILE__, t, 00994 analog_days.year[t],analog_days.month[t],analog_days.day[t]); 00995 /* Fatal error */ 00996 for (var=0; var<obs_var->nobs_var; var++) { 00997 for (f=0; f<noutf[var]; f++) 00998 (void) free(outfiles[var][f]); 00999 if (noutf[var] > 0) 01000 (void) free(outfiles[var]); 01001 (void) free(infile[var]); 01002 (void) free(outfile[var]); 01003 } 01004 (void) free(outfiles); 01005 (void) free(noutf); 01006 (void) free(found_file); 01007 (void) free(info_tmp); 01008 (void) free(buf); 01009 01010 (void) free(lat); 01011 (void) free(lon); 01012 01013 (void) free(x); 01014 (void) free(y); 01015 01016 (void) free(format); 01017 01018 (void) free(proj_tmp->name); 01019 (void) free(proj_tmp->grid_mapping_name); 01020 (void) free(proj_tmp); 01021 01022 (void) free(time_s->year); 01023 (void) free(time_s->month); 01024 (void) free(time_s->day); 01025 (void) free(time_s->hour); 01026 (void) free(time_s->minutes); 01027 (void) free(time_s->seconds); 01028 01029 (void) free(time_s); 01030 01031 if (alt != NULL) (void) free(alt); 01032 01033 (void) ut_free(dataunits); 01034 (void) ut_free_system(unitSystem); 01035 01036 return -1; 01037 } 01038 } 01039 (void) free(time_s->year); 01040 (void) free(time_s->month); 01041 (void) free(time_s->day); 01042 (void) free(time_s->hour); 01043 (void) free(time_s->minutes); 01044 (void) free(time_s->seconds); 01045 01046 (void) free(time_s); 01047 } 01048 } 01049 01050 /* Free allocated memory */ 01051 for (var=0; var<obs_var->nobs_var; var++) { 01052 for (f=0; f<noutf[var]; f++) 01053 (void) free(outfiles[var][f]); 01054 if (noutf[var] > 0) 01055 (void) free(outfiles[var]); 01056 (void) free(infile[var]); 01057 (void) free(outfile[var]); 01058 } 01059 (void) free(outfiles); 01060 (void) free(noutf); 01061 (void) free(found_file); 01062 (void) free(info_tmp); 01063 (void) free(buf); 01064 01065 (void) free(x); 01066 (void) free(y); 01067 01068 (void) free(lat); 01069 (void) free(lon); 01070 01071 if (pmsl != NULL) (void) free(pmsl); 01072 if (alt != NULL) (void) free(alt); 01073 01074 (void) free(infile); 01075 (void) free(outfile); 01076 (void) free(format); 01077 01078 (void) ut_free(dataunits); 01079 (void) ut_free_system(unitSystem); 01080 01081 /* Success diagnostic */ 01082 return 0; 01083 }
void read_analog_data | ( | analog_day_struct * | analog_days, | |
double ** | delta, | |||
double ** | time_ls, | |||
char * | filename, | |||
char * | timename | |||
) |
Read analog data from NetCDF input file.
[out] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled. |
[out] | delta | Temperature difference to apply to analog day data. |
[out] | time_ls | Time values in udunit. |
[in] | filename | Analog days output filename. |
[in] | timename | Time dimension name in NetCDF file. |
Definition at line 66 of file read_analog_data.c.
References alloc_error(), analog_day_struct::day, analog_day_struct::day_s, handle_netcdf_error(), analog_day_struct::month, analog_day_struct::month_s, analog_day_struct::ntime, analog_day_struct::year, and analog_day_struct::year_s.
Referenced by wt_downscaling().
00067 { 00076 int istat; /* Diagnostic status */ 00077 00078 int ncinid; /* NetCDF input file handle ID */ 00079 int timediminid; /* NetCDF time dimension output ID */ 00080 int timeinid; /* NetCDF time variable ID */ 00081 int atimeinid; /* NetCDF analog dates variable ID */ 00082 int ayearinid; /* NetCDF analog dates variable ID */ 00083 int amonthinid; /* NetCDF analog dates variable ID */ 00084 int adayinid; /* NetCDF analog dates variable ID */ 00085 int dyearinid; /* NetCDF downscaled dates variable ID */ 00086 int dmonthinid; /* NetCDF downscaled dates variable ID */ 00087 int ddayinid; /* NetCDF downscaled dates variable ID */ 00088 int deltatinid; /* NetCDF delta T variable ID */ 00089 00090 size_t start[1]; /* Start element when reading */ 00091 size_t count[1]; /* Count of elements to read */ 00092 size_t ntime; /* Time dimension length */ 00093 00094 istat = nc_open(filename, NC_NOWRITE, &ncinid); /* open for reading */ 00095 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00096 00097 istat = nc_inq_dimid(ncinid, timename, &timediminid); /* get ID for time dimension */ 00098 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00099 istat = nc_inq_dimlen(ncinid, timediminid, &ntime); /* get time dimension length */ 00100 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00101 analog_days->ntime = ntime; 00102 00103 istat = nc_inq_varid(ncinid, timename, &timeinid); /* get time variable ID */ 00104 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00105 00106 istat = nc_inq_varid(ncinid, "downscaled_date_year", &dyearinid); /* get variable ID */ 00107 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00108 istat = nc_inq_varid(ncinid, "downscaled_date_month", &dmonthinid); /* get variable ID */ 00109 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00110 istat = nc_inq_varid(ncinid, "downscaled_date_day", &ddayinid); /* get variable ID */ 00111 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00112 00113 istat = nc_inq_varid(ncinid, "analog_date", &atimeinid); /* get variable ID */ 00114 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00115 istat = nc_inq_varid(ncinid, "analog_date_year", &ayearinid); /* get variable ID */ 00116 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00117 istat = nc_inq_varid(ncinid, "analog_date_month", &amonthinid); /* get variable ID */ 00118 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00119 istat = nc_inq_varid(ncinid, "analog_date_day", &adayinid); /* get variable ID */ 00120 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00121 00122 istat = nc_inq_varid(ncinid, "analog_delta_t", &deltatinid); /* get variable ID */ 00123 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00124 00125 /* Allocate memory and set start and count */ 00126 start[0] = 0; 00127 count[0] = (size_t) analog_days->ntime; 00128 00129 /* Read values from netCDF variables */ 00130 00131 analog_days->year_s = (int *) malloc(analog_days->ntime * sizeof(int)); 00132 if (analog_days->year_s == NULL) alloc_error(__FILE__, __LINE__); 00133 istat = nc_get_vara_int(ncinid, dyearinid, start, count, analog_days->year_s); 00134 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00135 00136 analog_days->month_s = (int *) malloc(analog_days->ntime * sizeof(int)); 00137 if (analog_days->month_s == NULL) alloc_error(__FILE__, __LINE__); 00138 istat = nc_get_vara_int(ncinid, dmonthinid, start, count, analog_days->month_s); 00139 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00140 00141 analog_days->day_s = (int *) malloc(analog_days->ntime * sizeof(int)); 00142 if (analog_days->day_s == NULL) alloc_error(__FILE__, __LINE__); 00143 istat = nc_get_vara_int(ncinid, ddayinid, start, count, analog_days->day_s); 00144 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00145 00146 analog_days->year = (int *) malloc(analog_days->ntime * sizeof(int)); 00147 if (analog_days->year == NULL) alloc_error(__FILE__, __LINE__); 00148 istat = nc_get_vara_int(ncinid, ayearinid, start, count, analog_days->year); 00149 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00150 00151 analog_days->month = (int *) malloc(analog_days->ntime * sizeof(int)); 00152 if (analog_days->month == NULL) alloc_error(__FILE__, __LINE__); 00153 istat = nc_get_vara_int(ncinid, amonthinid, start, count, analog_days->month); 00154 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00155 00156 analog_days->day = (int *) malloc(analog_days->ntime * sizeof(int)); 00157 if (analog_days->day == NULL) alloc_error(__FILE__, __LINE__); 00158 istat = nc_get_vara_int(ncinid, adayinid, start, count, analog_days->day); 00159 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00160 00161 (*delta) = (double *) malloc(analog_days->ntime * sizeof(double)); 00162 if ((*delta) == NULL) alloc_error(__FILE__, __LINE__); 00163 istat = nc_get_vara_double(ncinid, deltatinid, start, count, *delta); 00164 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00165 00166 (*time_ls) = (double *) malloc(analog_days->ntime * sizeof(double)); 00167 if ((*time_ls) == NULL) alloc_error(__FILE__, __LINE__); 00168 istat = nc_get_vara_double(ncinid, timeinid, start, count, *time_ls); 00169 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00170 00171 /* Close the input netCDF file. */ 00172 istat = ncclose(ncinid); 00173 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00174 }
int read_field_subdomain_period | ( | double ** | buffer, | |
double ** | lon, | |||
double ** | lat, | |||
double * | missing_value, | |||
char * | varname, | |||
int * | year, | |||
int * | month, | |||
int * | day, | |||
double | lonmin, | |||
double | lonmax, | |||
double | latmin, | |||
double | latmax, | |||
char * | coords, | |||
char * | gridname, | |||
char * | lonname, | |||
char * | latname, | |||
char * | dimxname, | |||
char * | dimyname, | |||
char * | timename, | |||
char * | filename, | |||
int * | nlon, | |||
int * | nlat, | |||
int | ntime | |||
) |
Read NetCDF field and extract subdomain and subperiod.
[out] | buffer | Output field 3D array |
[out] | lon | Longitude 2D array |
[out] | lat | Latitude 2D array |
[out] | missing_value | Missing value |
[in] | varname | Variable name to read |
[in] | year | Year vector for subperiod |
[in] | month | Month vector for subperiod |
[in] | day | Day vector for subperiod |
[in] | lonmin | Minimum longitude for subdomain |
[in] | lonmax | Maximum longitude for subdomain |
[in] | latmin | Minimum latitude for subdomain |
[in] | latmax | Maximum latitude for subdomain |
[in] | coords | Coordinates dimensions (1D or 2D) |
[in] | gridname | Projection name |
[in] | lonname | Longitude field name |
[in] | latname | Latitude field name |
[in] | dimxname | X Dimension name |
[in] | dimyname | Y Dimension name |
[in] | timename | Time dimension name |
[in] | filename | Input filename |
[out] | nlon | Longitude dimension |
[out] | nlat | Latitude dimension |
[in] | ntime | Time dimension |
Definition at line 67 of file read_field_subdomain_period.c.
References alloc_error(), compute_time_info(), info_field_struct::coordinates, time_vect_struct::day, extract_subdomain(), FALSE, info_field_struct::fillvalue, info_field_struct::grid_mapping, info_field_struct::height, time_vect_struct::hour, info_field_struct::long_name, time_vect_struct::minutes, time_vect_struct::month, read_netcdf_dims_3d(), read_netcdf_var_3d_2d(), time_vect_struct::seconds, info_struct::title, info_field_struct::units, and time_vect_struct::year.
Referenced by wt_learning().
00070 { 00099 int istat; /* Diagnostic status */ 00100 info_struct *info; 00101 info_field_struct *info_field; 00102 double *buf_total = NULL; 00103 double *buf_sub = NULL; 00104 double *time_ls = NULL; /* Temporary time information buffer */ 00105 time_vect_struct *time_s = NULL; 00106 char *cal_type = NULL; /* Calendar type (udunits) */ 00107 char *time_units = NULL; /* Time units (udunits) */ 00108 double *lon_total = NULL; 00109 double *lat_total = NULL; 00110 int ntime_file; /* Number of times dimension in input file */ 00111 int nlon_file; 00112 int nlat_file; 00113 int ntime_sub; 00114 00115 int nt; 00116 int tt; 00117 int i; 00118 int j; 00119 00120 *lon = NULL; 00121 *lat = NULL; 00122 *buffer = NULL; 00123 00124 *nlon = *nlat = -1; 00125 00126 info = (info_struct *) malloc(sizeof(info_struct)); 00127 if (info == NULL) alloc_error(__FILE__, __LINE__); 00128 00129 info_field = (info_field_struct *) malloc(sizeof(info_field_struct)); 00130 if (info_field == NULL) alloc_error(__FILE__, __LINE__); 00131 00132 time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 00133 if (time_s == NULL) alloc_error(__FILE__, __LINE__); 00134 00135 /* To prevent fetching of not needed info attributes */ 00136 info->title = strdup("none"); 00137 /* Read dimensions */ 00138 istat = read_netcdf_dims_3d(&lon_total, &lat_total, &time_ls, &cal_type, &time_units, &nlon_file, &nlat_file, &ntime_file, 00139 info, coords, gridname, lonname, latname, dimxname, dimyname, timename, filename); 00140 (void) free(info->title); 00141 (void) free(info); 00142 00143 /* Compute time information */ 00144 istat = compute_time_info(time_s, time_ls, time_units, cal_type, ntime_file); 00145 00146 /* Loop over time */ 00147 ntime_sub = 0; 00148 for (nt=0; nt<ntime; nt++) { 00149 /* Search in all second time vector times for matching date */ 00150 for (tt=0; tt<ntime_file; tt++) { 00151 if (year[nt] == time_s->year[tt] && 00152 month[nt] == time_s->month[tt] && 00153 day[nt] == time_s->day[tt]) { 00154 /* Found common date, process it. */ 00155 istat = read_netcdf_var_3d_2d(&buf_total, info_field, (proj_struct *) NULL, filename, varname, dimxname, dimyname, timename, 00156 tt, nlon, nlat, &ntime_file, FALSE); 00157 /* Free non-needed variables */ 00158 (void) free(info_field->coordinates); 00159 (void) free(info_field->grid_mapping); 00160 (void) free(info_field->units); 00161 (void) free(info_field->height); 00162 (void) free(info_field->long_name); 00163 if (istat != 0) { 00164 /* In case of failure */ 00165 (void) free(buf_total); 00166 (void) free(lon_total); 00167 (void) free(lat_total); 00168 (void) free(time_ls); 00169 (void) free(time_units); 00170 (void) free(cal_type); 00171 (void) free(time_s->year); 00172 (void) free(time_s->month); 00173 (void) free(time_s->day); 00174 (void) free(time_s->hour); 00175 (void) free(time_s->minutes); 00176 (void) free(time_s->seconds); 00177 (void) free(time_s); 00178 (void) free(info_field); 00179 return istat; 00180 } 00181 *missing_value = info_field->fillvalue; 00182 00183 /* Extract subdomain */ 00184 if ((*lat) != NULL) 00185 (void) free(*lat); 00186 if ((*lon) != NULL) 00187 (void) free(*lon); 00188 (void) extract_subdomain(&buf_sub, lon, lat, nlon, nlat, buf_total, lon_total, lat_total, 00189 lonmin, lonmax, latmin, latmax, nlon_file, nlat_file, 1); 00190 (void) free(buf_total); 00191 00192 /* Store into output field */ 00193 (*buffer) = realloc((*buffer), (*nlon)*(*nlat)*(ntime_sub+1) * sizeof(double)); 00194 if ((*buffer) == NULL) alloc_error(__FILE__, __LINE__); 00195 for (j=0; j<(*nlat); j++) 00196 for (i=0; i<(*nlon); i++) 00197 (*buffer)[i+j*(*nlon)+(ntime_sub)*(*nlon)*(*nlat)] = buf_sub[i+j*(*nlon)]; 00198 00199 (void) free(buf_sub); 00200 00201 ntime_sub++; 00202 00203 break; 00204 } 00205 } 00206 } 00207 00208 if (*nlat == -1 || *nlon == -1) { 00209 /* In case of failure */ 00210 (void) free(lon_total); 00211 (void) free(lat_total); 00212 (void) free(time_ls); 00213 (void) free(time_units); 00214 (void) free(cal_type); 00215 (void) free(time_s->year); 00216 (void) free(time_s->month); 00217 (void) free(time_s->day); 00218 (void) free(time_s->hour); 00219 (void) free(time_s->minutes); 00220 (void) free(time_s->seconds); 00221 (void) free(time_s); 00222 (void) free(info_field); 00223 00224 (void) fprintf(stderr, "%s: Cannot find any date!! Dates we try to find:: At index 0: %d %d %d, at last index: %d %d %d. Dates we are searching in (in the file):: At index 0: %d %d %d, at last index: %d %d %d. \n", __FILE__, year[0], month[0], day[0], year[ntime-1], month[ntime-1], day[ntime-1], time_s->year[0], time_s->month[0], time_s->day[0], time_s->year[ntime_file-1], time_s->month[ntime_file-1], time_s->day[ntime_file-1]); 00225 00226 return -1; 00227 } 00228 00229 (void) free(lon_total); 00230 (void) free(lat_total); 00231 00232 (void) free(time_s->year); 00233 (void) free(time_s->month); 00234 (void) free(time_s->day); 00235 (void) free(time_s->hour); 00236 (void) free(time_s->minutes); 00237 (void) free(time_s->seconds); 00238 (void) free(time_s); 00239 00240 (void) free(time_ls); 00241 (void) free(time_units); 00242 (void) free(cal_type); 00243 00244 (void) free(info_field); 00245 00246 /* Diagnostic status */ 00247 return 0; 00248 }
int read_large_scale_eof | ( | data_struct * | data | ) |
Read Large-Scale EOFs from input files.
Currently only NetCDF is implemented.
[in] | data | MASTER data structure. |
Extract subdomain
Definition at line 66 of file read_large_scale_eof.c.
References data_struct::conf, proj_struct::coords, field_struct::data, conf_struct::dimxname_eof, conf_struct::dimyname_eof, eof_info_struct::eof_coords, proj_struct::eof_coords, field_data_struct::eof_data, eof_info_struct::eof_filein_ls, field_data_struct::eof_info, eof_data_struct::eof_ls, eof_data_struct::eof_nomvar_ls, eof_info_struct::eof_project, conf_struct::eofname, extract_subdomain(), data_struct::field, info_field_struct::fillvalue, proj_struct::grid_mapping_name, eof_info_struct::info, field_struct::lat_eof_ls, conf_struct::latitude_max, conf_struct::latitude_min, conf_struct::latname_eof, field_struct::lon_eof_ls, conf_struct::longitude_max, conf_struct::longitude_min, conf_struct::lonname_eof, field_struct::n_ls, proj_struct::name, eof_info_struct::neof_ls, field_struct::nlat_eof_ls, field_struct::nlon_eof_ls, field_struct::proj, read_netcdf_dims_eof(), read_netcdf_var_1d(), read_netcdf_var_3d(), eof_data_struct::sing_ls, eof_data_struct::sing_nomvar_ls, and TRUE.
Referenced by wt_downscaling().
00066 { 00073 int istat; /* Diagnostic status */ 00074 int i; /* Loop counter */ 00075 int cat; /* Field category loop counter */ 00076 double *buf = NULL; /* Temporary buffer for reading */ 00077 double *lat = NULL; /* Temporary buffer for latitudes */ 00078 double *lon = NULL; /* Temporary buffer for longitudes */ 00079 int nlon; /* Longitude dimension */ 00080 int nlat; /* Latitude dimension */ 00081 int nlon_file; /* Longitude dimension for main large-scale fields in input file */ 00082 int nlat_file; /* Latitude dimension for main large-scale fields in input file */ 00083 int neof_file; /* EOF dimension for main large-scale fields in input file */ 00084 proj_struct proj_eof; /* EOF spatial projection structure */ 00085 00086 proj_eof.eof_coords = (char *) NULL; 00087 proj_eof.name = (char *) NULL; 00088 proj_eof.grid_mapping_name = (char *) NULL; 00089 00090 /* Loop over large-scale field categories (control and model run) */ 00091 for (cat=0; cat<2; cat++) { 00092 /* Loop over large-scale fields */ 00093 for (i=0; i<data->field[cat].n_ls; i++) { 00094 00095 /* Verify that we need to project field onto EOF */ 00096 if (data->field[cat].data[i].eof_info->eof_project == TRUE) { 00097 00098 if (data->field[cat].lon_eof_ls == NULL) { 00099 /* Read dimensions for EOF */ 00100 istat = read_netcdf_dims_eof(&lon, &lat, &nlon, &nlat, &neof_file, 00101 data->field[cat].data[i].eof_info->eof_coords, 00102 data->conf->lonname_eof, data->conf->latname_eof, 00103 data->conf->dimxname_eof, data->conf->dimyname_eof, 00104 data->conf->eofname, 00105 data->field[cat].data[i].eof_info->eof_filein_ls); 00106 if (istat < 0) { 00107 /* In case of failure */ 00108 (void) free(lon); 00109 (void) free(lat); 00110 return istat; 00111 } 00112 00113 /* Transfer data into proper data structure */ 00114 proj_eof.eof_coords = strdup(data->field[cat].proj[i].coords); 00115 proj_eof.name = strdup(data->field[cat].proj[i].name); 00116 proj_eof.grid_mapping_name = strdup(data->field[cat].proj[i].grid_mapping_name); 00117 } 00118 00119 /* Read EOF */ 00120 istat = read_netcdf_var_3d(&buf, data->field[cat].data[i].eof_info->info, &proj_eof, 00121 data->field[cat].data[i].eof_info->eof_filein_ls, 00122 data->field[cat].data[i].eof_data->eof_nomvar_ls, 00123 data->conf->dimxname_eof, data->conf->dimyname_eof, 00124 data->conf->eofname, &nlon_file, &nlat_file, &neof_file, TRUE); 00125 if (nlon != nlon_file || nlat != nlat_file) { 00126 (void) fprintf(stderr, "%s: Problems in dimensions! nlat=%d nlat_file=%d nlon=%d nlon_file=%d\n", 00127 __FILE__, nlat, nlat_file, nlon, nlon_file); 00128 istat = -1; 00129 } 00130 if (data->field[cat].data[i].eof_info->neof_ls != neof_file) { 00131 (void) fprintf(stderr, "%s: ERROR: Number of EOFs (%d) for %s field from large-scale fields file (%s) is not equal to number of EOFs specified in XML configuration file for large-scale fields (%d)!\n", __FILE__, neof_file, 00132 data->field[cat].data[i].eof_data->eof_nomvar_ls, 00133 data->field[cat].data[i].eof_info->eof_filein_ls, 00134 data->field[cat].data[i].eof_info->neof_ls); 00135 istat = -1; 00136 } 00137 if (istat != 0) { 00138 /* In case of failure */ 00139 (void) free(buf); 00140 (void) free(lon); 00141 (void) free(lat); 00142 if (proj_eof.eof_coords != NULL) 00143 (void) free(proj_eof.eof_coords); 00144 if (proj_eof.name != NULL) 00145 (void) free(proj_eof.name); 00146 if (proj_eof.grid_mapping_name != NULL) 00147 (void) free(proj_eof.grid_mapping_name); 00148 return istat; 00149 } 00150 00152 /* Free memory if needed because of loop. Set pointers to NULL for realloc. */ 00153 if (data->field[cat].lon_eof_ls != NULL) { 00154 (void) free(data->field[cat].lon_eof_ls); 00155 data->field[cat].lon_eof_ls = NULL; 00156 } 00157 if (data->field[cat].lat_eof_ls != NULL) { 00158 (void) free(data->field[cat].lat_eof_ls); 00159 data->field[cat].lat_eof_ls = NULL; 00160 } 00161 if (data->field[cat].data[i].eof_data->eof_ls != NULL) { 00162 (void) free(data->field[cat].data[i].eof_data->eof_ls); 00163 data->field[cat].data[i].eof_data->eof_ls = NULL; 00164 } 00165 /* Extraction */ 00166 (void) extract_subdomain(&(data->field[cat].data[i].eof_data->eof_ls), &(data->field[cat].lon_eof_ls), 00167 &(data->field[cat].lat_eof_ls), 00168 &(data->field[cat].nlon_eof_ls), &(data->field[cat].nlat_eof_ls), buf, lon, lat, 00169 data->conf->longitude_min, data->conf->longitude_max, data->conf->latitude_min, data->conf->latitude_max, 00170 nlon, nlat, data->field[cat].data[i].eof_info->neof_ls); 00171 (void) free(buf); 00172 00173 /* Print missing value */ 00174 printf("%s: EOF missing_value = %lf\n", __FILE__, (double) data->field[cat].data[i].eof_info->info->fillvalue); 00175 00176 /* Read Singular Values */ 00177 istat = read_netcdf_var_1d(&(data->field[cat].data[i].eof_data->sing_ls), (info_field_struct *) NULL, 00178 data->field[cat].data[i].eof_info->eof_filein_ls, data->field[cat].data[i].eof_data->sing_nomvar_ls, 00179 data->conf->eofname, &neof_file, TRUE); 00180 if (data->field[cat].data[i].eof_info->neof_ls != neof_file) { 00181 (void) fprintf(stderr, "%s: ERROR: Number of EOFs (%d) for %s singular values from large-scale fields file (%s) is not equal to number of EOFs specified in XML configuration file for large-scale fields (%d)!\n", __FILE__, neof_file, 00182 data->field[cat].data[i].eof_data->sing_nomvar_ls, data->field[cat].data[i].eof_info->eof_filein_ls, 00183 data->field[cat].data[i].eof_info->neof_ls); 00184 istat = -1; 00185 } 00186 if (istat != 0) { 00187 /* In case of failure */ 00188 (void) free(lon); 00189 (void) free(lat); 00190 if (proj_eof.eof_coords != NULL) 00191 (void) free(proj_eof.eof_coords); 00192 if (proj_eof.name != NULL) 00193 (void) free(proj_eof.name); 00194 if (proj_eof.grid_mapping_name != NULL) 00195 (void) free(proj_eof.grid_mapping_name); 00196 return istat; 00197 } 00198 } 00199 /* Free memory if needed */ 00200 if (lon != NULL) (void) free(lon); 00201 lon = NULL; 00202 if (lat != NULL) (void) free(lat); 00203 lat = NULL; 00204 00205 if (proj_eof.eof_coords != NULL) { 00206 (void) free(proj_eof.eof_coords); 00207 proj_eof.eof_coords = (char *) NULL; 00208 } 00209 if (proj_eof.name != NULL) { 00210 (void) free(proj_eof.name); 00211 proj_eof.name = (char *) NULL; 00212 } 00213 if (proj_eof.grid_mapping_name != NULL) { 00214 (void) free(proj_eof.grid_mapping_name); 00215 proj_eof.grid_mapping_name = NULL; 00216 } 00217 } 00218 } 00219 00220 /* Diagnostic status */ 00221 return 0; 00222 }
int read_large_scale_fields | ( | data_struct * | data | ) |
Read large-scale fields data from input files.
Currently only NetCDF is implemented.
[in] | data | MASTER data structure. |
Definition at line 67 of file read_large_scale_fields.c.
References alloc_error(), conf_struct::cal_type, change_date_origin(), compute_time_info(), data_struct::conf, proj_struct::coords, field_struct::data, data_to_gregorian_cal_d(), field_data_struct::dimxname, field_data_struct::dimyname, extract_subdomain(), data_struct::field, field_data_struct::field_ls, FIELD_LS, field_data_struct::filename_ls, conf_struct::fixtime, field_data_struct::info, data_struct::info, field_struct::lat_ls, conf_struct::latitude_max, conf_struct::latitude_min, field_data_struct::latname, field_struct::lon_ls, conf_struct::longitude_max, conf_struct::longitude_min, field_data_struct::lonname, field_struct::n_ls, proj_struct::name, NCAT, field_struct::nlat_ls, field_struct::nlon_ls, field_data_struct::nomvar_ls, field_struct::ntime_ls, field_struct::proj, read_netcdf_dims_3d(), read_netcdf_var_3d(), SEC_FIELD_LS, conf_struct::secondary_latitude_max, conf_struct::secondary_latitude_min, conf_struct::secondary_longitude_max, conf_struct::secondary_longitude_min, field_struct::time_ls, field_struct::time_s, conf_struct::time_units, field_data_struct::timename, TRUE, conf_struct::year_begin_ctrl, and conf_struct::year_begin_other.
Referenced by wt_downscaling().
00067 { 00074 int istat; /* Diagnostic status */ 00075 int i; /* Loop counter */ 00076 int t; /* Time loop counter */ 00077 int cat; /* Field category loop counter */ 00078 double *buf = NULL; /* Temporary data buffer */ 00079 double *time_ls = NULL; /* Temporary time information buffer */ 00080 double *lat = NULL; /* Temporary latitude buffer for main large-scale fields */ 00081 double *lon = NULL; /* Temporary longitude buffer for main large-scale fields */ 00082 char **cal_type; /* Calendar type (udunits) */ 00083 char **time_units; /* Time units (udunits) */ 00084 double longitude_min; /* Domain bounding box minimum longitude */ 00085 double longitude_max; /* Domain bounding box maximum longitude */ 00086 double latitude_min; /* Domain bounding box minimum latitude */ 00087 double latitude_max; /* Domain bounding box maximum latitude */ 00088 int ntime; /* Number of times dimension */ 00089 int nlon; /* Longitude dimension for main large-scale fields */ 00090 int nlat; /* Latitude dimension for main large-scale fields */ 00091 int ntime_file; /* Number of times dimension in input file */ 00092 int nlon_file; /* Longitude dimension for main large-scale fields in input file */ 00093 int nlat_file; /* Latitude dimension for main large-scale fields in input file */ 00094 00095 int year_begin; /* When fixing time units, year to use as start date. */ 00096 00097 cal_type = (char **) malloc(NCAT * sizeof(char *)); 00098 if (cal_type == NULL) alloc_error(__FILE__, __LINE__); 00099 time_units = (char **) malloc(NCAT * sizeof(char *)); 00100 if (time_units == NULL) alloc_error(__FILE__, __LINE__); 00101 00102 /* Loop over all large-scale field categories */ 00103 for (cat=0; cat<NCAT; cat++) { 00104 00105 cal_type[cat] = NULL; 00106 time_units[cat] = NULL; 00107 00108 /* Select proper domain given large-scale field category */ 00109 if (cat == 0 || cat == 1) { 00110 longitude_min = data->conf->longitude_min; 00111 longitude_max = data->conf->longitude_max; 00112 latitude_min = data->conf->latitude_min; 00113 latitude_max = data->conf->latitude_max; 00114 } 00115 else { 00116 longitude_min = data->conf->secondary_longitude_min; 00117 longitude_max = data->conf->secondary_longitude_max; 00118 latitude_min = data->conf->secondary_latitude_min; 00119 latitude_max = data->conf->secondary_latitude_max; 00120 } 00121 00122 /* Free memory for loop and set pointers to NULL for realloc */ 00123 if (data->field[cat].time_ls != NULL) { 00124 (void) free(data->field[cat].time_ls); 00125 data->field[cat].time_ls = NULL; 00126 } 00127 if (data->field[cat].lat_ls != NULL) { 00128 (void) free(data->field[cat].lat_ls); 00129 data->field[cat].lat_ls = NULL; 00130 } 00131 if (data->field[cat].lon_ls != NULL) { 00132 (void) free(data->field[cat].lon_ls); 00133 data->field[cat].lon_ls = NULL; 00134 } 00135 00136 /* Loop over large-scale fields */ 00137 for (i=0; i<data->field[cat].n_ls; i++) { 00138 /* Retrieve dimensions if time buffer is not already set for this field category */ 00139 if (data->field[cat].time_ls == NULL) { 00140 istat = read_netcdf_dims_3d(&lon, &lat, &time_ls, &(cal_type[cat]), &(time_units[cat]), &nlon, &nlat, &ntime, 00141 data->info, data->field[cat].proj[i].coords, data->field[cat].proj[i].name, 00142 data->field[cat].data[i].lonname, data->field[cat].data[i].latname, 00143 data->field[cat].data[i].dimxname, data->field[cat].data[i].dimyname, 00144 data->field[cat].data[i].timename, 00145 data->field[cat].data[i].filename_ls); 00146 if (istat < 0) { 00147 /* In case of failure */ 00148 (void) free(lon); 00149 (void) free(lat); 00150 (void) free(time_ls); 00151 (void) free(time_units[cat]); 00152 (void) free(cal_type[cat]); 00153 return istat; 00154 } 00155 /* Adjust time units if we want to fix time (set in the configuration file) */ 00156 if (data->conf->fixtime == TRUE) { 00157 if (cat == FIELD_LS || cat == SEC_FIELD_LS) 00158 year_begin = data->conf->year_begin_other; 00159 else 00160 year_begin = data->conf->year_begin_ctrl; 00161 if (istat != 1) { 00162 (void) fprintf(stderr, "\n%s: IMPORTANT WARNING: Time variable values all zero!!! Fixing time variable to index value, STARTING at 0...\n\n", __FILE__); 00163 for (t=0; t<ntime; t++) 00164 time_ls[t] = (double) t; 00165 } 00166 (void) fprintf(stdout, "%s: Fixing time units using start date %d-01-01 12:00:00.\n", __FILE__, year_begin); 00167 time_units[cat] = realloc(time_units[cat], 500 * sizeof(char)); 00168 if (time_units[cat] == NULL) alloc_error(__FILE__, __LINE__); 00169 /* days since 1950-01-01 12:00:00 */ 00170 (void) sprintf(time_units[cat], "days since %d-01-01 12:00:00", year_begin); 00171 } 00172 } 00173 00174 /* For standard calendar data */ 00175 if ( !strcmp(cal_type[cat], "gregorian") || !strcmp(cal_type[cat], "standard") ) { 00176 00177 /* Read data */ 00178 istat = read_netcdf_var_3d(&buf, data->field[cat].data[i].info, &(data->field[cat].proj[i]), 00179 data->field[cat].data[i].filename_ls, 00180 data->field[cat].data[i].nomvar_ls, 00181 data->field[cat].data[i].dimxname, data->field[cat].data[i].dimyname, data->field[cat].data[i].timename, 00182 &nlon_file, &nlat_file, &ntime_file, TRUE); 00183 if (nlon != nlon_file || nlat != nlat_file || ntime != ntime_file) { 00184 (void) fprintf(stderr, "%s: Problems in dimensions! nlat=%d nlat_file=%d nlon=%d nlon_file=%d ntime=%d ntime_file=%d\n", 00185 __FILE__, nlat, nlat_file, nlon, nlon_file, ntime, ntime_file); 00186 istat = -1; 00187 } 00188 if (istat != 0) { 00189 /* In case of failure */ 00190 (void) free(buf); 00191 (void) free(lon); 00192 (void) free(lat); 00193 (void) free(time_ls); 00194 (void) free(time_units[cat]); 00195 (void) free(cal_type[cat]); 00196 return istat; 00197 } 00198 00199 /* Extract subdomain of spatial fields */ 00200 if (data->field[cat].lon_ls != NULL) { 00201 (void) free(data->field[cat].lon_ls); 00202 data->field[cat].lon_ls = NULL; 00203 } 00204 if (data->field[cat].lat_ls != NULL) { 00205 (void) free(data->field[cat].lat_ls); 00206 data->field[cat].lat_ls = NULL; 00207 } 00208 if (data->field[cat].data[i].field_ls != NULL) { 00209 (void) free(data->field[cat].data[i].field_ls); 00210 data->field[cat].data[i].field_ls = NULL; 00211 } 00212 00213 /* Extraction of subdomain */ 00214 (void) extract_subdomain(&(data->field[cat].data[i].field_ls), &(data->field[cat].lon_ls), &(data->field[cat].lat_ls), 00215 &(data->field[cat].nlon_ls), &(data->field[cat].nlat_ls), buf, lon, lat, 00216 longitude_min, longitude_max, latitude_min, latitude_max, nlon, nlat, ntime); 00217 (void) free(buf); 00218 00219 /* Save number of times dimension */ 00220 data->field[cat].ntime_ls = ntime; 00221 00222 /* If time info not already retrieved for this category, get time information and generate time structure */ 00223 if (data->field[cat].time_ls == NULL) { 00224 data->field[cat].time_ls = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00225 if (data->field[cat].time_ls == NULL) alloc_error(__FILE__, __LINE__); 00226 if ( strcmp(time_units[cat], data->conf->time_units) ) 00227 (void) change_date_origin(data->field[cat].time_ls, data->conf->time_units, time_ls, time_units[cat], ntime); 00228 else 00229 for (t=0; t<data->field[cat].ntime_ls; t++) 00230 data->field[cat].time_ls[t] = time_ls[t]; 00231 istat = compute_time_info(data->field[cat].time_s, data->field[cat].time_ls, data->conf->time_units, data->conf->cal_type, 00232 data->field[cat].ntime_ls); 00233 } 00234 } 00235 else { 00236 /* Non-standard calendar type */ 00237 00238 double *dummy = NULL; 00239 00240 /* Free memory if previously allocated */ 00241 if (data->field[cat].lon_ls != NULL) { 00242 (void) free(data->field[cat].lon_ls); 00243 data->field[cat].lon_ls = NULL; 00244 } 00245 if (data->field[cat].lat_ls != NULL) { 00246 (void) free(data->field[cat].lat_ls); 00247 data->field[cat].lat_ls = NULL; 00248 } 00249 if (data->field[cat].data[i].field_ls != NULL) { 00250 (void) free(data->field[cat].data[i].field_ls); 00251 data->field[cat].data[i].field_ls = NULL; 00252 } 00253 /* Read data and fix calendar */ 00254 istat = read_netcdf_var_3d(&(data->field[cat].data[i].field_ls), data->field[cat].data[i].info, 00255 &(data->field[cat].proj[i]), data->field[cat].data[i].filename_ls, 00256 data->field[cat].data[i].nomvar_ls, 00257 data->field[cat].data[i].dimxname, data->field[cat].data[i].dimyname, data->field[cat].data[i].timename, 00258 &nlon_file, &nlat_file, &ntime_file, TRUE); 00259 if (nlon != nlon_file || nlat != nlat_file || ntime != ntime_file) { 00260 (void) fprintf(stderr, "%s: Problems in dimensions! nlat=%d nlat_file=%d nlon=%d nlon_file=%d ntime=%d ntime_file=%d\n", 00261 __FILE__, nlat, nlat_file, nlon, nlon_file, ntime, ntime_file); 00262 istat = -1; 00263 } 00264 if (istat != 0) { 00265 /* In case of failure */ 00266 (void) free(lon); 00267 (void) free(lat); 00268 (void) free(time_ls); 00269 (void) free(time_units[cat]); 00270 (void) free(cal_type[cat]); 00271 return istat; 00272 } 00273 00274 /* Extract subdomain of spatial fields */ 00275 (void) extract_subdomain(&buf, &(data->field[cat].lon_ls), &(data->field[cat].lat_ls), 00276 &(data->field[cat].nlon_ls), &(data->field[cat].nlat_ls), data->field[cat].data[i].field_ls, lon, lat, 00277 longitude_min, longitude_max, latitude_min, latitude_max, nlon, nlat, ntime); 00278 (void) free(data->field[cat].data[i].field_ls); 00279 00280 /* Adjust calendar to standard calendar */ 00281 istat = data_to_gregorian_cal_d(&(data->field[cat].data[i].field_ls), &dummy, &(data->field[cat].ntime_ls), 00282 buf, time_ls, time_units[cat], data->conf->time_units, 00283 cal_type[cat], data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime); 00284 if (istat < 0) { 00285 /* In case of failure */ 00286 (void) free(lon); 00287 (void) free(lat); 00288 (void) free(time_ls); 00289 (void) free(time_units[cat]); 00290 (void) free(cal_type[cat]); 00291 (void) free(buf); 00292 (void) free(data->field[cat].lon_ls); 00293 (void) free(data->field[cat].lat_ls); 00294 (void) free(data->field[cat].data[i].field_ls); 00295 return istat; 00296 } 00297 if (data->field[cat].time_ls == NULL) { 00298 data->field[cat].time_ls = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00299 if (data->field[cat].time_ls == NULL) alloc_error(__FILE__, __LINE__); 00300 for (t=0; t<data->field[cat].ntime_ls; t++) 00301 data->field[cat].time_ls[t] = dummy[t]; 00302 istat = compute_time_info(data->field[cat].time_s, data->field[cat].time_ls, data->conf->time_units, data->conf->cal_type, 00303 data->field[cat].ntime_ls); 00304 } 00305 (void) free(dummy); 00306 (void) free(buf); 00307 } 00308 } 00309 /* Free memory */ 00310 if (lat != NULL) { 00311 (void) free(lat); 00312 lat = NULL; 00313 } 00314 if (lon != NULL) { 00315 (void) free(lon); 00316 lon = NULL; 00317 } 00318 if (time_ls != NULL) { 00319 (void) free(time_ls); 00320 time_ls = NULL; 00321 } 00322 if (time_units[cat] != NULL) { 00323 (void) free(time_units[cat]); 00324 time_units[cat] = NULL; 00325 } 00326 if (cal_type[cat] != NULL) { 00327 (void) free(cal_type[cat]); 00328 cal_type[cat] = NULL; 00329 } 00330 } 00331 00332 (void) free(time_units); 00333 (void) free(cal_type); 00334 00335 /* Diagnostic status */ 00336 return 0; 00337 }
int read_learning_fields | ( | data_struct * | data | ) |
Read Learning data from input files.
Currently only NetCDF is implemented.
[in] | data | MASTER data structure. |
We read as many season times the same variable to have the same values available for each season!
Create whole period time info from separate season info merging
Definition at line 66 of file read_learning_fields.c.
References alloc_error(), learning_data_struct::class_clusters, conf_struct::clustname, data_struct::conf, field_struct::data, learning_struct::data, time_vect_struct::day, field_data_struct::eof_info, conf_struct::eofname, data_struct::field, learning_struct::filename_open_clust_learn, learning_struct::filename_open_learn, learning_struct::filename_open_weight, get_time_info(), time_vect_struct::hour, data_struct::learning, time_vect_struct::minutes, time_vect_struct::month, field_struct::n_ls, season_struct::nclusters, eof_info_struct::neof_ls, learning_struct::nomvar_class_clusters, learning_struct::nomvar_pc_normalized_var, learning_struct::nomvar_precip_index, learning_struct::nomvar_precip_reg, learning_struct::nomvar_precip_reg_cst, learning_struct::nomvar_precip_reg_dist, learning_struct::nomvar_precip_reg_err, learning_struct::nomvar_sup_index, learning_struct::nomvar_sup_index_mean, learning_struct::nomvar_sup_index_var, learning_struct::nomvar_sup_val, learning_struct::nomvar_time, learning_struct::nomvar_weight, reg_struct::npts, season_struct::nreg, conf_struct::nseasons, learning_struct::ntime, learning_data_struct::ntime, learning_struct::pc_normalized_var, learning_data_struct::precip_index, learning_data_struct::precip_reg, learning_data_struct::precip_reg_cst, learning_data_struct::precip_reg_dist, learning_data_struct::precip_reg_err, conf_struct::ptsname, read_netcdf_var_1d(), read_netcdf_var_2d(), read_netcdf_var_3d(), read_netcdf_var_generic_val(), data_struct::reg, conf_struct::season, season_struct::secondary_cov, time_vect_struct::seconds, learning_data_struct::sup_index, learning_data_struct::sup_index_mean, learning_data_struct::sup_index_var, learning_struct::sup_lat, learning_struct::sup_latname, learning_struct::sup_lon, learning_struct::sup_lonname, learning_struct::sup_nlat, learning_struct::sup_nlon, learning_data_struct::sup_val, learning_data_struct::time, learning_struct::time_s, learning_data_struct::time_s, TRUE, learning_data_struct::weight, and time_vect_struct::year.
Referenced by wt_learning().
00066 { 00073 int istat; /* Diagnostic status */ 00074 int i; /* Loop counter */ 00075 int t; /* Loop counter */ 00076 int ii; /* Loop counter */ 00077 char *nomvar = NULL; /* Variable name in NetCDF file */ 00078 char *nomvar_time = NULL; /* Time variable name in NetCDF file */ 00079 char *nomvar_season = NULL; /* Season variable name in NetCDF file */ 00080 char *name = NULL; /* Dimension name in NetCDF file */ 00081 char *cal_type = NULL; /* Calendar type (udunits) */ 00082 char *time_units = NULL; /* Time units (udunits) */ 00083 double *bufd = NULL; /* Temporary buffer */ 00084 double *time_sort = NULL; /* Temporary time info used for time merging */ 00085 size_t *time_index = NULL; /* Temporary time index used for time merging */ 00086 int total_t; /* Total number of times used for time merging */ 00087 int neof; /* EOF dimension */ 00088 int npts; /* Points dimension */ 00089 int nclusters; /* Clusters dimension */ 00090 int neof_file; /* EOF dimension in input file */ 00091 00092 data->learning->sup_lat = data->learning->sup_lon = NULL; 00093 00094 /* Allocate memory for temporary strings */ 00095 nomvar = (char *) malloc(500 * sizeof(char)); 00096 if (nomvar == NULL) alloc_error(__FILE__, __LINE__); 00097 nomvar_time = (char *) malloc(500 * sizeof(char)); 00098 if (nomvar_time == NULL) alloc_error(__FILE__, __LINE__); 00099 nomvar_season = (char *) malloc(500 * sizeof(char)); 00100 if (nomvar_season == NULL) alloc_error(__FILE__, __LINE__); 00101 name = (char *) malloc(500 * sizeof(char)); 00102 if (name == NULL) alloc_error(__FILE__, __LINE__); 00103 00104 /* Initialize season string */ 00105 (void) strcpy(nomvar_season, "season"); 00106 00107 /* Loop over all the seasons */ 00108 for (i=0; i<data->conf->nseasons; i++) { 00109 00110 if (data->conf->season[i].secondary_cov == TRUE && data->learning->sup_lat == NULL) { 00111 /* Read lat variable */ 00112 istat = read_netcdf_var_2d(&(data->learning->sup_lat), (info_field_struct *) NULL, (proj_struct *) NULL, 00113 data->learning->filename_open_learn, 00114 data->learning->sup_latname, data->learning->sup_lonname, data->learning->sup_latname, 00115 &(data->learning->sup_nlon), &(data->learning->sup_nlat), TRUE); 00116 if (istat != 0) { 00117 /* In case of failure */ 00118 (void) free(nomvar); 00119 (void) free(nomvar_time); 00120 (void) free(nomvar_season); 00121 (void) free(name); 00122 return istat; 00123 } 00124 } 00125 if (data->conf->season[i].secondary_cov == TRUE && data->learning->sup_lon == NULL) { 00126 /* Read lon variable */ 00127 istat = read_netcdf_var_2d(&(data->learning->sup_lon), (info_field_struct *) NULL, (proj_struct *) NULL, 00128 data->learning->filename_open_learn, 00129 data->learning->sup_lonname, data->learning->sup_lonname, data->learning->sup_latname, 00130 &(data->learning->sup_nlon), &(data->learning->sup_nlat), TRUE); 00131 if (istat != 0) { 00132 /* In case of failure */ 00133 (void) free(nomvar); 00134 (void) free(nomvar_time); 00135 (void) free(nomvar_season); 00136 (void) free(name); 00137 return istat; 00138 } 00139 } 00140 00141 /* Read time data and info */ 00142 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_time, i+1); 00143 (void) sprintf(nomvar_time, "%s_%d", data->learning->nomvar_time, i+1); 00144 istat = get_time_info(data->learning->data[i].time_s, &(data->learning->data[i].time), &time_units, &cal_type, 00145 &(data->learning->data[i].ntime), 00146 data->learning->filename_open_learn, nomvar_time, TRUE); 00147 (void) free(cal_type); 00148 (void) free(time_units); 00149 if (istat != 0) { 00150 /* In case of failure */ 00151 (void) free(nomvar); 00152 (void) free(nomvar_time); 00153 (void) free(nomvar_season); 00154 (void) free(name); 00155 return istat; 00156 } 00157 00158 /* Read weight data */ 00159 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_weight, i+1); 00160 (void) sprintf(name, "%s_%d", data->conf->clustname, i+1); 00161 istat = read_netcdf_var_2d(&(data->learning->data[i].weight), (info_field_struct *) NULL, (proj_struct *) NULL, 00162 data->learning->filename_open_weight, nomvar, data->conf->eofname, name, 00163 &neof_file, &nclusters, TRUE); 00164 /* Save EOF dimension for control and model large-scale fields */ 00165 if (data->field[0].n_ls > 0) { 00166 if (data->field[0].data[0].eof_info->neof_ls != neof_file) { 00167 (void) fprintf(stderr, "%s: ERROR: Number of EOFs in learning weight datafile (%d) is not equal to number of EOFs specified in XML configuration file for model large-scale fields (%d)!\n", __FILE__, neof_file, data->field[0].data[0].eof_info->neof_ls); 00168 (void) free(nomvar); 00169 (void) free(nomvar_time); 00170 (void) free(nomvar_season); 00171 (void) free(name); 00172 return -1; 00173 } 00174 } 00175 if (data->field[1].n_ls > 0) 00176 if (data->field[1].data[0].eof_info->neof_ls != neof_file) { 00177 (void) fprintf(stderr, "%s: ERROR: Number of EOFs in learning weight datafile (%d) is not equal to number of EOFs specified in XML configuration file for control large-scale fields (%d)!\n", __FILE__, neof_file, data->field[1].data[0].eof_info->neof_ls); 00178 (void) free(nomvar); 00179 (void) free(nomvar_time); 00180 (void) free(nomvar_season); 00181 (void) free(name); 00182 return -1; 00183 } 00184 00185 /* If clusters dimension is not initialized, use retrieved info from input file */ 00186 if (data->conf->season[i].nclusters == -1) 00187 data->conf->season[i].nclusters = nclusters; 00188 /* Else verify that they match */ 00189 else if (data->conf->season[i].nclusters != nclusters) { 00190 (void) fprintf(stderr, "%s: ERROR: Incorrect number of clusters in NetCDF file. Season %d, nclusters=%d vs configuration file %d.\n", 00191 __FILE__, i, nclusters, data->conf->season[i].nclusters); 00192 (void) free(nomvar); 00193 (void) free(nomvar_time); 00194 (void) free(nomvar_season); 00195 (void) free(name); 00196 return -1; 00197 } 00198 if (istat != 0) { 00199 /* In case of failure */ 00200 (void) free(nomvar); 00201 (void) free(nomvar_time); 00202 (void) free(nomvar_season); 00203 (void) free(name); 00204 return istat; 00205 } 00206 00207 /* Read precip_reg data (precipitation regression coefficients) */ 00208 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg, i+1); 00209 (void) sprintf(name, "%s_%d", data->conf->clustname, i+1); 00210 istat = read_netcdf_var_2d(&(data->learning->data[i].precip_reg), (info_field_struct *) NULL, (proj_struct *) NULL, 00211 data->learning->filename_open_learn, 00212 nomvar, data->conf->ptsname, name, 00213 &npts, &(data->conf->season[i].nreg), TRUE); 00214 /* Verify that points dimension match configuration value */ 00215 if (npts != data->reg->npts) { 00216 (void) fprintf(stderr, "%s: ERROR: Incorrect number of points in NetCDF file %d vs configuration file %d.\n", 00217 __FILE__, npts, data->reg->npts); 00218 (void) free(nomvar); 00219 (void) free(nomvar_time); 00220 (void) free(nomvar_season); 00221 (void) free(name); 00222 return -1; 00223 } 00224 if (istat != 0) { 00225 /* In case of failure */ 00226 (void) free(nomvar); 00227 (void) free(nomvar_time); 00228 (void) free(nomvar_season); 00229 (void) free(name); 00230 return istat; 00231 } 00232 00233 /* Read precip_reg_cst data (precipitation regression constant) */ 00234 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_cst, i+1); 00235 istat = read_netcdf_var_1d(&(data->learning->data[i].precip_reg_cst), (info_field_struct *) NULL, 00236 data->learning->filename_open_learn, nomvar, data->conf->ptsname, &npts, TRUE); 00237 /* Verify that points dimension match configuration value */ 00238 if (npts != data->reg->npts) { 00239 (void) fprintf(stderr, "%s: ERROR: Incorrect number of points in NetCDF file %d vs configuration file %d.\n", 00240 __FILE__, npts, data->reg->npts); 00241 (void) free(nomvar); 00242 (void) free(nomvar_time); 00243 (void) free(nomvar_season); 00244 (void) free(name); 00245 return -1; 00246 } 00247 if (istat != 0) { 00248 /* In case of failure */ 00249 (void) free(nomvar); 00250 (void) free(nomvar_time); 00251 (void) free(nomvar_season); 00252 (void) free(name); 00253 return istat; 00254 } 00255 00256 /* Read precip_index data (precipitation index for learning period over all regression points) */ 00257 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_index, i+1); 00258 istat = read_netcdf_var_2d(&(data->learning->data[i].precip_index), (info_field_struct *) NULL, (proj_struct *) NULL, 00259 data->learning->filename_open_learn, 00260 nomvar, data->conf->ptsname, nomvar_time, 00261 &npts, &(data->learning->data[i].ntime), TRUE); 00262 /* Verify that points dimension match configuration value */ 00263 if (npts != data->reg->npts) { 00264 (void) fprintf(stderr, "%s: ERROR: Incorrect number of points in NetCDF file %d vs configuration file %d.\n", 00265 __FILE__, npts, data->reg->npts); 00266 (void) free(nomvar); 00267 (void) free(nomvar_time); 00268 (void) free(nomvar_season); 00269 (void) free(name); 00270 return -1; 00271 } 00272 if (istat != 0) { 00273 /* In case of failure */ 00274 (void) free(nomvar); 00275 (void) free(nomvar_time); 00276 (void) free(nomvar_season); 00277 (void) free(name); 00278 return istat; 00279 } 00280 00281 /* Read optional precip_reg_err data (regression residuals for learning period over all regression points) */ 00282 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_err, i+1); 00283 istat = read_netcdf_var_2d(&(data->learning->data[i].precip_reg_err), (info_field_struct *) NULL, (proj_struct *) NULL, 00284 data->learning->filename_open_learn, 00285 nomvar, data->conf->ptsname, nomvar_time, 00286 &npts, &(data->learning->data[i].ntime), TRUE); 00287 if (istat != 0) { 00288 /* In case of failure */ 00289 /* Support the fact that this variable is not in pre-1.5.15 dsclim version output files, so it is optional */ 00290 data->learning->data[i].precip_reg_err = NULL; 00291 (void) fprintf(stderr, "%s: WARNING: Old learning file without precip_reg_err data.\n", __FILE__); 00292 } 00293 else { 00294 /* Verify that points dimension match configuration value */ 00295 if (npts != data->reg->npts) { 00296 (void) fprintf(stderr, "%s: ERROR: Incorrect number of points in NetCDF file %d vs configuration file %d.\n", 00297 __FILE__, npts, data->reg->npts); 00298 (void) free(nomvar); 00299 (void) free(nomvar_time); 00300 (void) free(nomvar_season); 00301 (void) free(name); 00302 return -1; 00303 } 00304 } 00305 00306 /* Read cluster distances data (normalized distances for learning period over all clusters) */ 00307 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_dist, i+1); 00308 (void) sprintf(name, "%s_%d", data->conf->clustname, i+1); 00309 istat = read_netcdf_var_2d(&(data->learning->data[i].precip_reg_dist), (info_field_struct *) NULL, (proj_struct *) NULL, 00310 data->learning->filename_open_learn, 00311 nomvar, name, nomvar_time, 00312 &nclusters, &(data->learning->data[i].ntime), TRUE); 00313 if (istat != 0) { 00314 /* In case of failure */ 00315 /* Support the fact that this variable is not in pre-1.5.15 dsclim version output files, so it is optional */ 00316 data->learning->data[i].precip_reg_dist = NULL; 00317 (void) fprintf(stderr, "%s: WARNING: Old learning file without cluster distances data.\n", __FILE__); 00318 } 00319 else { 00320 /* If clusters dimension is not initialized, use retrieved info from input file */ 00321 if (data->conf->season[i].nclusters == -1) 00322 data->conf->season[i].nclusters = nclusters; 00323 /* Else verify that they match */ 00324 else if (data->conf->season[i].nclusters != nclusters) { 00325 (void) fprintf(stderr, "%s: ERROR: Incorrect number of clusters in NetCDF file. Season %d, nclusters=%d vs configuration file %d.\n", 00326 __FILE__, i, nclusters, data->conf->season[i].nclusters); 00327 (void) free(nomvar); 00328 (void) free(nomvar_time); 00329 (void) free(nomvar_season); 00330 (void) free(name); 00331 return -1; 00332 } 00333 } 00334 00335 /* Read cluster allocation data */ 00336 bufd = NULL; 00337 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_class_clusters, i+1); 00338 istat = read_netcdf_var_1d(&bufd, (info_field_struct *) NULL, 00339 data->learning->filename_open_clust_learn, nomvar, nomvar_time, 00340 &(data->learning->data[i].ntime), TRUE); 00341 if (istat != 0) { 00342 /* In case of failure */ 00343 (void) free(nomvar); 00344 (void) free(nomvar_time); 00345 (void) free(nomvar_season); 00346 (void) free(name); 00347 return istat; 00348 } 00349 /* Transfer data into proper data structure */ 00350 data->learning->data[i].class_clusters = malloc(data->learning->data[i].ntime * sizeof(int)); 00351 if (data->learning->data[i].class_clusters == NULL) alloc_error(__FILE__, __LINE__); 00352 for (ii=0; ii<data->learning->data[i].ntime; ii++) 00353 data->learning->data[i].class_clusters[ii] = bufd[ii]; 00354 (void) free(bufd); 00355 00356 /* Read sup_index data (secondary large-scale field index for learning period) */ 00357 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_sup_index, i+1); 00358 istat = read_netcdf_var_1d(&(data->learning->data[i].sup_index), (info_field_struct *) NULL, 00359 data->learning->filename_open_learn, nomvar, nomvar_time, 00360 &(data->learning->data[i].ntime), TRUE); 00361 if (istat != 0) { 00362 /* In case of failure */ 00363 (void) free(nomvar); 00364 (void) free(nomvar_time); 00365 (void) free(nomvar_season); 00366 (void) free(name); 00367 return istat; 00368 } 00369 00370 if (data->conf->season[i].secondary_cov == TRUE) { 00371 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_sup_val, i+1); 00372 istat = read_netcdf_var_3d(&(data->learning->data[i].sup_val), (info_field_struct *) NULL, (proj_struct *) NULL, 00373 data->learning->filename_open_learn, 00374 nomvar, data->learning->sup_lonname, data->learning->sup_latname, nomvar_time, 00375 &(data->learning->sup_nlon), &(data->learning->sup_nlat), &(data->learning->data[i].ntime), TRUE); 00376 if (istat != 0) { 00377 /* In case of failure */ 00378 (void) free(nomvar); 00379 (void) free(nomvar_time); 00380 (void) free(nomvar_season); 00381 (void) free(name); 00382 return istat; 00383 } 00384 } 00385 else 00386 data->learning->data[i].sup_val = NULL; 00387 00388 00391 /* Read sup_index_mean data (secondary large-scale field index spatial mean for learning period) */ 00392 istat = read_netcdf_var_generic_val(&(data->learning->data[i].sup_index_mean), (info_field_struct *) NULL, 00393 data->learning->filename_open_learn, data->learning->nomvar_sup_index_mean, i); 00394 if (istat != 0) { 00395 /* In case of failure */ 00396 (void) free(nomvar); 00397 (void) free(nomvar_time); 00398 (void) free(nomvar_season); 00399 (void) free(name); 00400 return istat; 00401 } 00402 00403 /* Read sup_index_var data (secondary large-scale field index spatial variance for learning period) */ 00404 istat = read_netcdf_var_generic_val(&(data->learning->data[i].sup_index_var), (info_field_struct *) NULL, 00405 data->learning->filename_open_learn, data->learning->nomvar_sup_index_var, i); 00406 if (istat != 0) { 00407 /* In case of failure */ 00408 (void) free(nomvar); 00409 (void) free(nomvar_time); 00410 (void) free(nomvar_season); 00411 (void) free(name); 00412 return istat; 00413 } 00414 } 00415 00418 /* Allocate memory and set pointers to NULL for realloc use */ 00419 time_index = (size_t *) malloc(data->conf->nseasons * sizeof(size_t)); 00420 if (time_index == NULL) alloc_error(__FILE__, __LINE__); 00421 data->learning->time_s->year = NULL; 00422 data->learning->time_s->month = NULL; 00423 data->learning->time_s->day = NULL; 00424 data->learning->time_s->hour = NULL; 00425 data->learning->time_s->minutes = NULL; 00426 data->learning->time_s->seconds = NULL; 00427 00428 /* Sort the vector, get the sorted vector indexes */ 00429 time_sort = (double *) malloc(data->conf->nseasons * sizeof(double)); 00430 if (time_sort == NULL) alloc_error(__FILE__, __LINE__); 00431 /* Loop over seasons */ 00432 for (i=0; i<data->conf->nseasons; i++) 00433 time_sort[i] = data->learning->data[i].time[0]; 00434 /* Sorting */ 00435 (void) gsl_sort_index(time_index, time_sort, 1, (size_t) data->conf->nseasons); 00436 (void) free(time_sort); 00437 00438 /* Merge time info */ 00439 total_t = 0; 00440 for (i=0; i<data->conf->nseasons; i++) { 00441 for (t=0; t<data->learning->data[time_index[i]].ntime; t++) { 00442 data->learning->time_s->year = (int *) realloc(data->learning->time_s->year, (total_t+1) * sizeof(int)); 00443 if (data->learning->time_s->year == NULL) alloc_error(__FILE__, __LINE__); 00444 data->learning->time_s->month = (int *) realloc(data->learning->time_s->month, (total_t+1) * sizeof(int)); 00445 if (data->learning->time_s->month == NULL) alloc_error(__FILE__, __LINE__); 00446 data->learning->time_s->day = (int *) realloc(data->learning->time_s->day, (total_t+1) * sizeof(int)); 00447 if (data->learning->time_s->day == NULL) alloc_error(__FILE__, __LINE__); 00448 data->learning->time_s->hour = (int *) realloc(data->learning->time_s->hour, (total_t+1) * sizeof(int)); 00449 if (data->learning->time_s->hour == NULL) alloc_error(__FILE__, __LINE__); 00450 data->learning->time_s->minutes = (int *) realloc(data->learning->time_s->minutes, (total_t+1) * sizeof(int)); 00451 if (data->learning->time_s->minutes == NULL) alloc_error(__FILE__, __LINE__); 00452 data->learning->time_s->seconds = (double *) realloc(data->learning->time_s->seconds, (total_t+1) * sizeof(double)); 00453 if (data->learning->time_s->seconds == NULL) alloc_error(__FILE__, __LINE__); 00454 data->learning->time_s->year[total_t] = data->learning->data[time_index[i]].time_s->year[t]; 00455 data->learning->time_s->month[total_t] = data->learning->data[time_index[i]].time_s->month[t]; 00456 data->learning->time_s->day[total_t] = data->learning->data[time_index[i]].time_s->day[t]; 00457 data->learning->time_s->hour[total_t] = data->learning->data[time_index[i]].time_s->hour[t]; 00458 data->learning->time_s->minutes[total_t] = data->learning->data[time_index[i]].time_s->minutes[t]; 00459 data->learning->time_s->seconds[total_t] = data->learning->data[time_index[i]].time_s->seconds[t]; 00460 total_t++; 00461 } 00462 } 00463 /* Save total number of times */ 00464 data->learning->ntime = total_t; 00465 00466 /* Free temporary vector */ 00467 (void) free(time_index); 00468 00469 /* Read pc_normalized_var data (normalized EOF-projected large-scale field variance for learning period) */ 00470 istat = read_netcdf_var_1d(&(data->learning->pc_normalized_var), (info_field_struct *) NULL, 00471 data->learning->filename_open_learn, data->learning->nomvar_pc_normalized_var, data->conf->eofname, 00472 &neof, TRUE); 00473 if (neof != neof_file) { 00474 /* Verify that EOF dimension match configuration value */ 00475 (void) fprintf(stderr, "%s: ERROR: Number of EOFs in learning weight datafile (%d) is not equal to number of EOFs in learning datafile (%d)!\n", __FILE__, neof, neof_file); 00476 (void) free(nomvar); 00477 (void) free(nomvar_time); 00478 (void) free(nomvar_season); 00479 (void) free(name); 00480 return -1; 00481 } 00482 if (istat != 0) { 00483 /* In case of failure */ 00484 (void) free(nomvar); 00485 (void) free(nomvar_time); 00486 (void) free(nomvar_season); 00487 (void) free(name); 00488 return istat; 00489 } 00490 /* The square-root of variance is stored: convert back to variance */ 00491 for (ii=0; ii<neof; ii++) 00492 data->learning->pc_normalized_var[ii] = data->learning->pc_normalized_var[ii] * data->learning->pc_normalized_var[ii]; 00493 00494 /* Free memory */ 00495 (void) free(nomvar); 00496 (void) free(nomvar_time); 00497 (void) free(nomvar_season); 00498 (void) free(name); 00499 00500 /* Success status */ 00501 return 0; 00502 }
int read_learning_obs_eof | ( | data_struct * | data | ) |
Read observation data EOFs for learning period.
Currently only NetCDF is implemented.
[in] | data | MASTER data structure. |
Definition at line 67 of file read_learning_obs_eof.c.
References alloc_error(), time_vect_struct::day, learning_eof_struct::eof, learning_eof_struct::filename_eof, get_time_info(), time_vect_struct::hour, data_struct::learning, time_vect_struct::minutes, time_vect_struct::month, learning_eof_struct::nomvar_eof, learning_eof_struct::nomvar_sing, learning_struct::nomvar_time, learning_eof_struct::ntime, learning_struct::ntime, learning_struct::obs, learning_struct::obs_eofname, learning_struct::obs_neof, learning_struct::obs_timename, read_netcdf_var_1d(), read_netcdf_var_2d(), time_vect_struct::seconds, learning_eof_struct::sing, learning_struct::time_s, learning_eof_struct::time_s, TRUE, and time_vect_struct::year.
Referenced by wt_learning().
00067 { 00074 int istat; /* Diagnostic status */ 00075 int neof; /* EOF dimension for main large-scale fields in input file */ 00076 int ntime; /* Time dimension for main large-scale fields in input file */ 00077 double *time = NULL; 00078 char *cal_type = NULL; /* Calendar type (udunits) */ 00079 char *time_units = NULL; /* Time units (udunits) */ 00080 double *buf = NULL; 00081 int i; 00082 int t; 00083 int eof; 00084 00085 if (data->learning->obs_neof != 0) { 00086 /* Only read variable if considering observation EOFs */ 00087 00088 /* Read EOF */ 00089 istat = read_netcdf_var_2d(&buf, (info_field_struct *) NULL, (proj_struct *) NULL, 00090 data->learning->obs->filename_eof, data->learning->obs->nomvar_eof, 00091 data->learning->obs_eofname, data->learning->obs_timename, &neof, &ntime, TRUE); 00092 if (istat != 0) { 00093 /* In case of failure */ 00094 (void) free(buf); 00095 return istat; 00096 } 00097 if (data->learning->obs_neof != neof) { 00098 (void) fprintf(stderr, "%s: ERROR: Number of EOFs (%d) for observation %s field from EOF file (%s) is not equal to number of EOFs specified in XML configuration file for observation fields (%d)!\n", __FILE__, neof, 00099 data->learning->obs->nomvar_eof, data->learning->obs->filename_eof, data->learning->obs_neof); 00100 (void) free(buf); 00101 return -1; 00102 } 00103 /* Re-order array with time as fastest varying dimension */ 00104 data->learning->obs->eof = malloc(neof*ntime * sizeof(double)); 00105 if (data->learning->obs->eof == NULL) alloc_error(__FILE__, __LINE__); 00106 for (eof=0; eof<neof; eof++) 00107 for (t=0; t<ntime; t++) 00108 data->learning->obs->eof[t+eof*ntime] = buf[eof+t*neof]; 00109 (void) free(buf); 00110 00111 /* Read Singular Values */ 00112 istat = read_netcdf_var_1d(&(data->learning->obs->sing), (info_field_struct *) NULL, 00113 data->learning->obs->filename_eof, data->learning->obs->nomvar_sing, 00114 data->learning->obs_eofname, &neof, TRUE); 00115 if (istat != 0) { 00116 /* In case of failure */ 00117 return istat; 00118 } 00119 if (data->learning->obs_neof != neof) { 00120 (void) fprintf(stderr, "%s: ERROR: Number of EOFs (%d) for observation %s field from EOF file (%s) is not equal to number of EOFs specified in XML configuration file for observation fields (%d)!\n", __FILE__, neof, 00121 data->learning->obs->nomvar_sing, data->learning->obs->filename_eof, data->learning->obs_neof); 00122 return -1; 00123 } 00124 } 00125 00126 /* Get time information */ 00127 istat = get_time_info(data->learning->obs->time_s, &time, &time_units, &cal_type, 00128 &ntime, data->learning->obs->filename_eof, data->learning->nomvar_time, TRUE); 00129 (void) free(cal_type); 00130 (void) free(time_units); 00131 (void) free(time); 00132 if (istat < 0) { 00133 (void) free(data->learning->obs->time_s); 00134 return -1; 00135 } 00136 00137 data->learning->ntime = ntime; 00138 data->learning->obs->ntime = ntime; 00139 00140 /* Copy into main time structure */ 00141 data->learning->time_s->year = (int *) malloc(ntime * sizeof(int)); 00142 if (data->learning->time_s->year == NULL) alloc_error(__FILE__, __LINE__); 00143 data->learning->time_s->month = (int *) malloc(ntime * sizeof(int)); 00144 if (data->learning->time_s->month == NULL) alloc_error(__FILE__, __LINE__); 00145 data->learning->time_s->day = (int *) malloc(ntime * sizeof(int)); 00146 if (data->learning->time_s->day == NULL) alloc_error(__FILE__, __LINE__); 00147 data->learning->time_s->hour = (int *) malloc(ntime * sizeof(int)); 00148 if (data->learning->time_s->hour == NULL) alloc_error(__FILE__, __LINE__); 00149 data->learning->time_s->minutes = (int *) malloc(ntime * sizeof(int)); 00150 if (data->learning->time_s->minutes == NULL) alloc_error(__FILE__, __LINE__); 00151 data->learning->time_s->seconds = (double *) malloc(ntime * sizeof(double)); 00152 if (data->learning->time_s->seconds == NULL) alloc_error(__FILE__, __LINE__); 00153 00154 for (i=0; i<ntime; i++) { 00155 data->learning->time_s->year[i] = data->learning->obs->time_s->year[i]; 00156 data->learning->time_s->month[i] = data->learning->obs->time_s->month[i]; 00157 data->learning->time_s->day[i] = data->learning->obs->time_s->day[i]; 00158 data->learning->time_s->hour[i] = data->learning->obs->time_s->hour[i]; 00159 data->learning->time_s->minutes[i] = data->learning->obs->time_s->minutes[i]; 00160 data->learning->time_s->seconds[i] = data->learning->obs->time_s->seconds[i]; 00161 } 00162 00163 /* Diagnostic status */ 00164 return 0; 00165 }
int read_learning_rea_eof | ( | data_struct * | data | ) |
Read reanalysis data EOFs for learning period.
Currently only NetCDF is implemented.
[in] | data | MASTER data structure. |
Definition at line 67 of file read_learning_rea_eof.c.
References alloc_error(), data_struct::conf, learning_eof_struct::eof, conf_struct::eofname, learning_eof_struct::filename_eof, get_time_info(), data_struct::learning, learning_eof_struct::nomvar_eof, learning_eof_struct::nomvar_sing, learning_struct::nomvar_time, learning_eof_struct::ntime, learning_struct::rea, learning_struct::rea_neof, learning_struct::rea_timename, read_netcdf_var_1d(), read_netcdf_var_2d(), learning_eof_struct::sing, learning_eof_struct::time_s, and TRUE.
Referenced by wt_learning().
00067 { 00074 int istat; /* Diagnostic status */ 00075 int neof; /* EOF dimension for main large-scale fields in input file */ 00076 int ntime; /* Time dimension for main large-scale fields in input file */ 00077 double *time; 00078 char *cal_type = NULL; /* Calendar type (udunits) */ 00079 char *time_units = NULL; /* Time units (udunits) */ 00080 double *buf = NULL; 00081 int t; 00082 int eof; 00083 00084 /* Read EOF principal components */ 00085 istat = read_netcdf_var_2d(&buf, (info_field_struct *) NULL, (proj_struct *) NULL, 00086 data->learning->rea->filename_eof, data->learning->rea->nomvar_eof, 00087 data->conf->eofname, data->learning->rea_timename, &neof, &ntime, TRUE); 00088 if (istat != 0) { 00089 /* In case of failure */ 00090 return istat; 00091 } 00092 if (data->learning->rea_neof != neof) { 00093 (void) fprintf(stderr, "%s: ERROR: Number of EOFs (%d) for reanalysis %s field from EOF file (%s) is not equal to number of EOFs specified in XML configuration file for reanalysis fields (%d)!\n", __FILE__, neof, 00094 data->learning->rea->nomvar_eof, data->learning->rea->filename_eof, data->learning->rea_neof); 00095 return -1; 00096 } 00097 /* Re-order array with time as fastest varying dimension */ 00098 data->learning->rea->eof = malloc(neof*ntime * sizeof(double)); 00099 if (data->learning->rea->eof == NULL) alloc_error(__FILE__, __LINE__); 00100 for (eof=0; eof<neof; eof++) 00101 for (t=0; t<ntime; t++) 00102 data->learning->rea->eof[t+eof*ntime] = buf[eof+t*neof]; 00103 (void) free(buf); 00104 00105 /* Get time information */ 00106 istat = get_time_info(data->learning->rea->time_s, &time, &time_units, &cal_type, 00107 &ntime, data->learning->rea->filename_eof, data->learning->nomvar_time, TRUE); 00108 (void) free(cal_type); 00109 (void) free(time_units); 00110 (void) free(time); 00111 if (istat != 0) { 00112 /* In case of failure */ 00113 (void) free(data->learning->rea->time_s); 00114 return istat; 00115 } 00116 00117 data->learning->rea->ntime = ntime; 00118 00119 /* Read Singular Values */ 00120 istat = read_netcdf_var_1d(&(data->learning->rea->sing), (info_field_struct *) NULL, 00121 data->learning->rea->filename_eof, data->learning->rea->nomvar_sing, 00122 data->conf->eofname, &neof, TRUE); 00123 if (istat != 0) { 00124 /* In case of failure */ 00125 (void) free(data->learning->rea->time_s); 00126 return istat; 00127 } 00128 if (data->learning->rea_neof != neof) { 00129 (void) fprintf(stderr, "%s: ERROR: Number of EOFs (%d) for reanalysis %s field from EOF file (%s) is not equal to number of EOFs specified in XML configuration file for reanalysis fields (%d)!\n", __FILE__, neof, 00130 data->learning->rea->nomvar_sing, data->learning->rea->filename_eof, data->learning->rea_neof); 00131 (void) free(data->learning->rea->time_s); 00132 return -1; 00133 } 00134 00135 /* Diagnostic status */ 00136 return 0; 00137 }
int read_mask | ( | mask_struct * | mask | ) |
Read a mask file.
[in] | mask | Mask structure. |
Definition at line 67 of file read_mask.c.
References mask_struct::coords, mask_struct::dimcoords, mask_struct::dimxname, mask_struct::dimyname, FALSE, mask_struct::field, mask_struct::filename, mask_struct::lat, mask_struct::latname, mask_struct::lon, mask_struct::lonname, mask_struct::maskname, mask_struct::nlat, mask_struct::nlon, mask_struct::proj, read_netcdf_latlon(), and read_netcdf_var_2d().
Referenced by main().
00067 { 00074 int istat; 00075 00076 if ( !strcmp(get_filename_ext(mask->filename), "nc")) { 00077 00078 /* Read latitudes and longitudes */ 00079 istat = read_netcdf_latlon(&(mask->lon), &(mask->lat), &(mask->nlon), &(mask->nlat), mask->dimcoords, mask->coords, mask->proj, 00080 mask->lonname, mask->latname, mask->dimxname, mask->dimyname, mask->filename); 00081 if (istat < 0) { 00082 (void) fprintf(stdout, "%s: ERROR reading mask file.\n", __FILE__); 00083 return istat; 00084 } 00085 00086 /* Read mask */ 00087 istat = read_netcdf_var_2d(&(mask->field), (info_field_struct *) NULL, (proj_struct *) NULL, mask->filename, mask->maskname, 00088 mask->dimxname, mask->dimyname, &(mask->nlon), &(mask->nlat), FALSE); 00089 if (istat < 0) { 00090 (void) fprintf(stdout, "%s: ERROR reading mask file.\n", __FILE__); 00091 return istat; 00092 } 00093 00094 } 00095 else if ( !strcmp(get_filename_ext(mask->filename), ".shp")) { 00096 00097 00098 00099 } 00100 00101 (void) fprintf(stdout, "%s: mask file read successfully.\n", __FILE__); 00102 00103 /* Return status */ 00104 return 0; 00105 }
int read_obs_period | ( | double ** | buffer, | |
double ** | lon, | |||
double ** | lat, | |||
double * | missing_value, | |||
data_struct * | data, | |||
char * | varname, | |||
int * | year, | |||
int * | month, | |||
int * | day, | |||
int * | nlon, | |||
int * | nlat, | |||
int | ntime | |||
) |
Read observation data for a given period.
[out] | buffer | Output 2D array |
[out] | lon | Output 2D longitude |
[out] | lat | Output 2D latitude |
[out] | missing_value | Missing value |
[in] | data | Main data structure |
[in] | varname | NetCDF variable name to read |
[in] | year | Year vector |
[in] | month | Month vector |
[in] | day | Day vector |
[out] | nlon | Longitude dimension |
[out] | nlat | Latitude dimension |
[in] | ntime | Time dimension |
Definition at line 58 of file read_obs_period.c.
References var_struct::acronym, alloc_error(), data_struct::conf, info_field_struct::coordinates, proj_struct::coords, time_vect_struct::day, var_struct::delta, var_struct::dimcoords, var_struct::dimxname, var_struct::dimyname, var_struct::factor, FALSE, proj_struct::false_easting, proj_struct::false_northing, info_field_struct::fillvalue, find_str_value(), var_struct::frequency, get_time_info(), info_field_struct::grid_mapping, proj_struct::grid_mapping_name, info_field_struct::height, time_vect_struct::hour, proj_struct::lat0, proj_struct::latin1, proj_struct::latin2, var_struct::latname, proj_struct::lonc, info_field_struct::long_name, var_struct::lonname, MAXPATH, time_vect_struct::minutes, time_vect_struct::month, var_struct::month_begin, proj_struct::name, var_struct::netcdfname, var_struct::nobs_var, conf_struct::obs_var, var_struct::path, var_struct::proj, read_netcdf_latlon(), read_netcdf_var_3d_2d(), time_vect_struct::seconds, var_struct::template, var_struct::timename, TRUE, info_field_struct::units, time_vect_struct::year, and var_struct::year_digits.
Referenced by wt_learning().
00059 { 00078 double *buf = NULL; /* Temporary buffer */ 00079 char *infile = NULL; /* Input filename */ 00080 int year1 = 0; /* First year of data input file */ 00081 int year2 = 0; /* End year of data input file */ 00082 double *timeval = NULL; /* Temporary time information buffer */ 00083 char *cal_type = NULL; /* Calendar type (udunits) */ 00084 char *time_units = NULL; /* Time units (udunits) */ 00085 int ntime_obs; /* Number of times dimension in observation database */ 00086 int found = FALSE; /* Used to tag if we found a specific date */ 00087 time_vect_struct *time_s = NULL; /* Time structure for observation database */ 00088 00089 info_field_struct *info = NULL; /* Temporary field information structure */ 00090 proj_struct *proj = NULL; /* Temporary field projection structure */ 00091 00092 int tmpi; /* Temporay integer value */ 00093 char *format = NULL; /* Temporay format string */ 00094 00095 int t; /* Time loop counter */ 00096 int tl; /* Time loop counter */ 00097 int var; /* Variable ID */ 00098 int istat; /* Diagnostic status */ 00099 int i; 00100 int j; 00101 00102 int ntime_file; 00103 00104 char *prev_infile = NULL; 00105 00106 /* Search variable */ 00107 var = find_str_value(varname, data->conf->obs_var->netcdfname, data->conf->obs_var->nobs_var); 00108 if (var == -1) return -2; 00109 00110 infile = (char *) malloc(MAXPATH * sizeof(char)); 00111 if (infile == NULL) alloc_error(__FILE__, __LINE__); 00112 prev_infile = (char *) malloc(MAXPATH * sizeof(char)); 00113 if (prev_infile == NULL) alloc_error(__FILE__, __LINE__); 00114 (void) strcpy(prev_infile, ""); 00115 format = (char *) malloc(MAXPATH * sizeof(char)); 00116 if (format == NULL) alloc_error(__FILE__, __LINE__); 00117 00118 info = (info_field_struct *) malloc(sizeof(info_field_struct)); 00119 if (info == NULL) alloc_error(__FILE__, __LINE__); 00120 proj = (proj_struct *) malloc(sizeof(proj_struct)); 00121 if (proj == NULL) alloc_error(__FILE__, __LINE__); 00122 00123 *lat = NULL; 00124 *lon = NULL; 00125 00126 if (data->conf->obs_var->proj->name != NULL) 00127 (void) free(data->conf->obs_var->proj->name); 00128 data->conf->obs_var->proj->name = NULL; 00129 proj->name = NULL; 00130 if (data->conf->obs_var->proj->grid_mapping_name != NULL) 00131 (void) free(data->conf->obs_var->proj->grid_mapping_name); 00132 data->conf->obs_var->proj->grid_mapping_name = NULL; 00133 proj->grid_mapping_name = NULL; 00134 00135 /* Loop over time */ 00136 for (t=0; t<ntime; t++) { 00137 00138 /* Create input filename for reading data */ 00139 (void) strcpy(format, "%s/%s/"); 00140 (void) strcat(format, data->conf->obs_var->template); 00141 if (data->conf->obs_var->month_begin != 1) { 00142 /* Months in observation files *does not* begin in January: must have 2 years in filename */ 00143 if (month[t] < data->conf->obs_var->month_begin) 00144 year1 = year[t] - 1; 00145 else 00146 year1 = year[t]; 00147 year2 = year1 + 1; 00148 if (data->conf->obs_var->year_digits == 4) 00149 (void) sprintf(infile, format, data->conf->obs_var->path, data->conf->obs_var->frequency, 00150 data->conf->obs_var->acronym[var], year1, year2); 00151 else { 00152 tmpi = year1 / 100; 00153 year1 = year1 - (tmpi*100); 00154 tmpi = year2 / 100; 00155 year2 = year2 - (tmpi*100); 00156 (void) sprintf(infile, format, data->conf->obs_var->path, data->conf->obs_var->frequency, 00157 data->conf->obs_var->acronym[var], year1, year2); 00158 } 00159 } 00160 else { 00161 /* Months in observation files begins in January: must have 1 year in filename */ 00162 if (data->conf->obs_var->year_digits == 4) { 00163 year1 = year[t]; 00164 (void) sprintf(infile, format, data->conf->obs_var->path, data->conf->obs_var->frequency, 00165 data->conf->obs_var->acronym[var], year1); 00166 } 00167 else { 00168 tmpi = year1 / 100; 00169 year1 = year1 - (tmpi*100); 00170 (void) sprintf(infile, format, data->conf->obs_var->path, data->conf->obs_var->frequency, 00171 data->conf->obs_var->acronym[var], year1); 00172 } 00173 } 00174 00175 /* Get time information for this input file if needed */ 00176 if ( strcmp(prev_infile, infile) ) { 00177 (void) printf("%s: Reading observation data %s from %s\n", __FILE__, varname, infile); 00178 if (time_s != NULL) { 00179 (void) free(time_s->year); 00180 (void) free(time_s->month); 00181 (void) free(time_s->day); 00182 (void) free(time_s->hour); 00183 (void) free(time_s->minutes); 00184 (void) free(time_s->seconds); 00185 00186 (void) free(time_s); 00187 (void) free(cal_type); 00188 (void) free(time_units); 00189 (void) free(timeval); 00190 } 00191 00192 time_s = (time_vect_struct *) malloc(sizeof(time_vect_struct)); 00193 if (time_s == NULL) alloc_error(__FILE__, __LINE__); 00194 00195 istat = get_time_info(time_s, &timeval, &time_units, &cal_type, &ntime_obs, infile, data->conf->obs_var->timename, FALSE); 00196 if (istat < 0) { 00197 (void) free(time_s); 00198 (void) free(infile); 00199 (void) free(prev_infile); 00200 (void) free(format); 00201 (void) free(info); 00202 (void) free(proj); 00203 return -1; 00204 } 00205 } 00206 00207 /* Find date in observation database */ 00208 found = FALSE; 00209 tl = 0; 00210 while (tl<ntime_obs && found == FALSE) { 00211 if (year[t] == time_s->year[tl] && month[t] == time_s->month[tl] && day[t] == time_s->day[tl]) 00212 found = TRUE; 00213 tl++; 00214 } 00215 00216 if (found == TRUE) { 00217 00218 tl--; 00219 00220 /* Read data */ 00221 istat = read_netcdf_var_3d_2d(&buf, info, proj, infile, data->conf->obs_var->acronym[var], 00222 data->conf->obs_var->dimxname, data->conf->obs_var->dimyname, data->conf->obs_var->timename, 00223 tl, nlon, nlat, &ntime_file, FALSE); 00224 *missing_value = info->fillvalue; 00225 00226 if (data->conf->obs_var->proj->name == NULL) { 00227 /* Retrieve observation grid parameters if not done already */ 00228 data->conf->obs_var->proj->name = strdup(proj->name); 00229 data->conf->obs_var->proj->grid_mapping_name = strdup(proj->grid_mapping_name); 00230 data->conf->obs_var->proj->latin1 = proj->latin1; 00231 data->conf->obs_var->proj->latin2 = proj->latin2; 00232 data->conf->obs_var->proj->lonc = proj->lonc; 00233 data->conf->obs_var->proj->lat0 = proj->lat0; 00234 data->conf->obs_var->proj->false_easting = proj->false_easting; 00235 data->conf->obs_var->proj->false_northing = proj->false_northing; 00236 } 00237 00238 if ( (*lat) == NULL && (*lon) == NULL ) { 00239 /* Get latitude and longitude coordinates information */ 00240 istat = read_netcdf_latlon(lon, lat, nlon, nlat, data->conf->obs_var->dimcoords, data->conf->obs_var->proj->coords, 00241 data->conf->obs_var->proj->name, data->conf->obs_var->lonname, 00242 data->conf->obs_var->latname, data->conf->obs_var->dimxname, 00243 data->conf->obs_var->dimyname, infile); 00244 00245 /* Allocate buffer memory given dimensions */ 00246 *buffer = (double *) malloc((*nlon)*(*nlat)*ntime * sizeof(double)); 00247 if ( (*buffer) == NULL) alloc_error(__FILE__, __LINE__); 00248 } 00249 00250 /* Transfer data */ 00251 for (j=0; j<(*nlat); j++) 00252 for (i=0; i<(*nlon); i++) 00253 if (buf[i+j*(*nlon)] != (*missing_value)) 00254 (*buffer)[i+j*(*nlon)+t*(*nlon)*(*nlat)] = (buf[i+j*(*nlon)] * data->conf->obs_var->factor[var]) + 00255 data->conf->obs_var->delta[var]; 00256 else 00257 (*buffer)[i+j*(*nlon)+t*(*nlon)*(*nlat)] = (*missing_value); 00258 00259 /* Free allocated memory */ 00260 (void) free(proj->name); 00261 (void) free(proj->grid_mapping_name); 00262 00263 (void) free(info->grid_mapping); 00264 (void) free(info->units); 00265 (void) free(info->height); 00266 (void) free(info->coordinates); 00267 (void) free(info->long_name); 00268 00269 (void) free(buf); 00270 } 00271 else { 00272 (void) fprintf(stderr, "%s: Fatal error in algorithm: date not found: %d %d %d %d!!\n", __FILE__, t, year[t],month[t],day[t]); 00273 00274 /* Fatal error */ 00275 (void) free(infile); 00276 (void) free(format); 00277 00278 (void) free(info); 00279 (void) free(proj); 00280 00281 if (time_s != NULL) { 00282 (void) free(time_s->year); 00283 (void) free(time_s->month); 00284 (void) free(time_s->day); 00285 (void) free(time_s->hour); 00286 (void) free(time_s->minutes); 00287 (void) free(time_s->seconds); 00288 00289 (void) free(time_s); 00290 (void) free(cal_type); 00291 (void) free(time_units); 00292 (void) free(timeval); 00293 } 00294 00295 return -1; 00296 } 00297 (void) strcpy(prev_infile, infile); 00298 } 00299 00300 /* Free allocated memory */ 00301 if (time_s != NULL) { 00302 (void) free(time_s->year); 00303 (void) free(time_s->month); 00304 (void) free(time_s->day); 00305 (void) free(time_s->hour); 00306 (void) free(time_s->minutes); 00307 (void) free(time_s->seconds); 00308 00309 (void) free(time_s); 00310 (void) free(cal_type); 00311 (void) free(time_units); 00312 (void) free(timeval); 00313 } 00314 00315 (void) free(info); 00316 (void) free(proj); 00317 00318 (void) free(infile); 00319 (void) free(prev_infile); 00320 (void) free(format); 00321 00322 /* Success diagnostic */ 00323 return 0; 00324 }
int read_regression_points | ( | reg_struct * | reg | ) |
Read regression point positions.
[in] | reg | Regression structure. |
Definition at line 67 of file read_regression_points.c.
References reg_struct::dimxname, reg_struct::dimyname, reg_struct::filename, reg_struct::lat, reg_struct::lon, reg_struct::npts, reg_struct::ptsname, read_netcdf_var_1d(), and TRUE.
Referenced by main().
00067 { 00074 int npts; 00075 int istat; 00076 00077 /* Read latitudes of points where regressions are calculated */ 00078 istat = read_netcdf_var_1d(&(reg->lat), (info_field_struct *) NULL, reg->filename, reg->dimyname, reg->ptsname, &npts, TRUE); 00079 if (istat != 0) { 00080 (void) free(reg->lat); 00081 return istat; 00082 } 00083 00084 /* Read longitudes of points where regressions are calculated */ 00085 istat = read_netcdf_var_1d(&(reg->lon), (info_field_struct *) NULL, reg->filename, reg->dimxname, reg->ptsname, &(reg->npts), TRUE); 00086 if (istat != 0 || npts != reg->npts) { 00087 (void) free(reg->lat); 00088 (void) free(reg->lon); 00089 return istat; 00090 } 00091 00092 (void) fprintf(stdout, "%s: %d regression point positions read successfully.\n", __FILE__, reg->npts); 00093 00094 /* Return status */ 00095 return 0; 00096 }
int remove_clim | ( | data_struct * | data | ) |
Remove climatologies.
[in] | data | MASTER data structure. |
Definition at line 66 of file remove_clim.c.
References alloc_error(), conf_struct::cal_type, clim_info_struct::clim_filein_ls, clim_info_struct::clim_fileout_ls, conf_struct::clim_filter_type, conf_struct::clim_filter_width, field_data_struct::clim_info, clim_info_struct::clim_nomvar_ls, clim_info_struct::clim_provided, clim_info_struct::clim_remove, clim_info_struct::clim_save, conf_struct::compression, conf_struct::compression_level, data_struct::conf, info_struct::contact_email, info_struct::contact_name, info_field_struct::coordinates, proj_struct::coords, info_struct::country, create_netcdf(), info_struct::creator_email, info_struct::creator_name, info_struct::creator_url, field_struct::data, field_data_struct::dimxname, field_data_struct::dimyname, info_struct::downscaling_forcing, proj_struct::false_easting, proj_struct::false_northing, data_struct::field, field_data_struct::field_ls, info_field_struct::fillvalue, conf_struct::format, get_calendar_ts(), info_field_struct::grid_mapping, proj_struct::grid_mapping_name, info_field_struct::height, data_struct::info, field_data_struct::info, info_struct::institution, info_struct::institution_model, proj_struct::lat0, field_struct::lat_ls, proj_struct::latin1, proj_struct::latin2, field_data_struct::latname, proj_struct::latpole, field_struct::lon_ls, proj_struct::lonc, info_field_struct::long_name, field_data_struct::lonname, proj_struct::lonpole, info_struct::member, info_struct::model, field_struct::n_ls, proj_struct::name, NCAT, field_struct::nlat_ls, field_struct::nlon_ls, field_struct::ntime_ls, info_struct::other_contact_email, info_struct::other_contact_name, field_struct::proj, read_netcdf_var_3d(), remove_seasonal_cycle(), info_struct::scenario, info_struct::scenario_co2, info_struct::software, field_struct::time_ls, conf_struct::time_units, field_data_struct::timename, TRUE, info_field_struct::units, info_struct::version, write_netcdf_dims_3d(), and write_netcdf_var_3d().
Referenced by wt_downscaling().
00066 { 00073 double *bufnoclim = NULL; /* Temporary buffer for field with climatology removed */ 00074 double **clim = NULL; /* Climatology buffer */ 00075 tstruct *timein_ts = NULL; /* Time info for input field */ 00076 int ntime_clim; /* Number of times for input field */ 00077 int nlon_file; /* Longitude dimension for input field */ 00078 int nlat_file; /* Latitude dimension for input field */ 00079 int ntime_file; /* Time dimension for input field */ 00080 00081 double fillvalue; /* Missing value */ 00082 00083 int istat = 0; /* Diagnostic status */ 00084 int i; /* Loop counter */ 00085 int j; /* Loop counter */ 00086 int cat; /* Loop counter for field category */ 00087 int ii; /* Loop counter */ 00088 info_field_struct clim_info_field; /* Information structure for climatology field */ 00089 double *timeclim = NULL; /* Time info for climatology field */ 00090 00091 /* Remove seasonal cycle: 00092 - Fix calendar and generate a gregorian calendar 00093 - Compute climatology including Feb 29th 00094 - Filter climatology 00095 - Optionally save climatology in file */ 00096 00097 /* Climatological year is 366 days */ 00098 ntime_clim = 366; 00099 00100 /* Time variable for climatological year: day timestep */ 00101 timeclim = (double *) malloc(ntime_clim * sizeof(double)); 00102 if (timeclim == NULL) alloc_error(__FILE__, __LINE__); 00103 for (j=0; j<ntime_clim; j++) 00104 timeclim[j] = (double) (j+1); 00105 00106 /* Climatology variable */ 00107 clim = (double **) malloc(NCAT * sizeof(double *)); 00108 if (clim == NULL) alloc_error(__FILE__, __LINE__); 00109 for (cat=0; cat<NCAT; cat++) 00110 clim[cat] = NULL; 00111 00112 /* Loop over all control-run large-scale field categories to process */ 00113 /* Always remove climatology from the control run and apply to corresponding fields for other downscaled runs */ 00114 for (cat=1; cat<NCAT; cat=cat+2) { 00115 00116 /* Loop over all large-scale fields */ 00117 for (i=0; i<data->field[cat].n_ls; i++) { 00118 00119 /* Allocate memory for field with climatology removed */ 00120 bufnoclim = (double *) malloc(data->field[cat].nlon_ls * data->field[cat].nlat_ls * data->field[cat].ntime_ls * sizeof(double)); 00121 if (bufnoclim == NULL) alloc_error(__FILE__, __LINE__); 00122 00123 /* Allocate memory for temporary time structure */ 00124 timein_ts = (tstruct *) malloc(data->field[cat].ntime_ls * sizeof(tstruct)); 00125 if (timein_ts == NULL) alloc_error(__FILE__, __LINE__); 00126 /* Get time info and calendar units */ 00127 istat = get_calendar_ts(timein_ts, data->conf->time_units, data->field[cat].time_ls, data->field[cat].ntime_ls); 00128 if (istat < 0) { 00129 (void) free(timein_ts); 00130 (void) free(bufnoclim); 00131 (void) free(timeclim); 00132 return -1; 00133 } 00134 00135 /* If we need to remove climatology for that field */ 00136 if (data->field[cat].data[i].clim_info->clim_remove == TRUE) { 00137 /* If climatology field is already provided */ 00138 if (data->field[cat].data[i].clim_info->clim_provided == TRUE) { 00139 /* Read climatology from NetCDF file */ 00140 istat = read_netcdf_var_3d(&(clim[cat]), &clim_info_field, (proj_struct *) NULL, 00141 data->field[cat].data[i].clim_info->clim_filein_ls, 00142 data->field[cat].data[i].clim_info->clim_nomvar_ls, 00143 data->field[cat].data[i].dimxname, data->field[cat].data[i].dimyname, 00144 data->field[cat].data[i].timename, 00145 &nlon_file, &nlat_file, &ntime_file, TRUE); 00146 if (data->field[cat].nlon_ls != nlon_file || data->field[cat].nlat_ls != nlat_file || ntime_clim != ntime_file) { 00147 (void) fprintf(stderr, "%s: Problems in dimensions! nlat=%d nlat_file=%d nlon=%d nlon_file=%d ntime=%d ntime_file=%d\n", 00148 __FILE__, data->field[cat].nlat_ls, nlat_file, data->field[cat].nlon_ls, nlon_file, ntime_clim, ntime_file); 00149 istat = -1; 00150 } 00151 if (istat != 0) { 00152 /* In case of error in reading data */ 00153 (void) free(bufnoclim); 00154 (void) free(timein_ts); 00155 (void) free(timeclim); 00156 if (clim[cat] != NULL) (void) free(clim[cat]); 00157 return istat; 00158 } 00159 /* Get missing value */ 00160 fillvalue = clim_info_field.fillvalue; 00161 /* Free memory */ 00162 (void) free(clim_info_field.height); 00163 (void) free(clim_info_field.coordinates); 00164 (void) free(clim_info_field.grid_mapping); 00165 (void) free(clim_info_field.units); 00166 (void) free(clim_info_field.long_name); 00167 } 00168 else { 00169 /* Climatology is not provided: must calculate */ 00170 if (clim[cat] == NULL) { 00171 /* Allocate memory if not already */ 00172 clim[cat] = (double *) malloc(data->field[cat].nlon_ls * data->field[cat].nlat_ls * ntime_clim * sizeof(double)); 00173 if (clim[cat] == NULL) alloc_error(__FILE__, __LINE__); 00174 } 00175 /* Get missing value */ 00176 fillvalue = data->field[cat].data[i].info->fillvalue; 00177 } 00178 00179 /* Remove seasonal cycle by calculating filtered climatology and substracting from field values */ 00180 (void) remove_seasonal_cycle(bufnoclim, clim[cat], data->field[cat].data[i].field_ls, timein_ts, 00181 data->field[cat].data[i].info->fillvalue, 00182 data->conf->clim_filter_width, data->conf->clim_filter_type, 00183 data->field[cat].data[i].clim_info->clim_provided, 00184 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls); 00185 00186 /* If we want to save climatology in NetCDF output file for further use */ 00187 if (data->field[cat].data[i].clim_info->clim_save == TRUE) { 00188 istat = create_netcdf("Computed climatology", "Climatologie calculee", "Computed climatology", "Climatologie calculee", 00189 "climatologie,climatology", "C language", data->info->software, 00190 "Computed climatology", data->info->institution, 00191 data->info->creator_email, data->info->creator_url, data->info->creator_name, 00192 data->info->version, data->info->scenario, data->info->scenario_co2, data->info->model, 00193 data->info->institution_model, data->info->country, data->info->member, 00194 data->info->downscaling_forcing, data->info->contact_email, data->info->contact_name, 00195 data->info->other_contact_email, data->info->other_contact_name, 00196 data->field[cat].data[i].clim_info->clim_fileout_ls, TRUE, data->conf->format, data->conf->compression); 00197 if (istat != 0) { 00198 /* In case of failure */ 00199 (void) free(bufnoclim); 00200 (void) free(timein_ts); 00201 (void) free(timeclim); 00202 if (clim[cat] != NULL) (void) free(clim[cat]); 00203 return istat; 00204 } 00205 /* Write dimensions of climatology field in NetCDF output file */ 00206 istat = write_netcdf_dims_3d(data->field[cat].lon_ls, data->field[cat].lat_ls, (double *) NULL, (double *) NULL, 00207 (double *) NULL, timeclim, data->conf->cal_type, 00208 data->conf->time_units, data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_clim, 00209 "daily", data->field[cat].proj[i].name, data->field[cat].proj[i].coords, 00210 data->field[cat].proj[i].grid_mapping_name, data->field[cat].proj[i].latin1, 00211 data->field[cat].proj[i].latin2, data->field[cat].proj[i].lonc, data->field[cat].proj[i].lat0, 00212 data->field[cat].proj[i].false_easting, data->field[cat].proj[i].false_northing, 00213 data->field[cat].proj[i].lonpole, data->field[cat].proj[i].latpole, 00214 data->field[cat].data[i].lonname, data->field[cat].data[i].latname, 00215 data->field[cat].data[i].timename, 00216 data->field[cat].data[i].clim_info->clim_fileout_ls, TRUE); 00217 if (istat != 0) { 00218 /* In case of failure */ 00219 (void) free(bufnoclim); 00220 (void) free(timein_ts); 00221 (void) free(timeclim); 00222 if (clim[cat] != NULL) (void) free(clim[cat]); 00223 return istat; 00224 } 00225 00226 /* Write climatology field in NetCDF output file */ 00227 istat = write_netcdf_var_3d(clim[cat], fillvalue, data->field[cat].data[i].clim_info->clim_fileout_ls, 00228 data->field[cat].data[i].clim_info->clim_nomvar_ls, data->field[cat].proj[i].name, 00229 data->field[cat].data[i].lonname, data->field[cat].data[i].latname, 00230 data->field[cat].data[i].timename, 00231 data->conf->format, data->conf->compression_level, 00232 data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_clim, TRUE); 00233 if (istat != 0) { 00234 /* In case of failure */ 00235 (void) free(bufnoclim); 00236 (void) free(timein_ts); 00237 (void) free(timeclim); 00238 if (clim[cat] != NULL) (void) free(clim[cat]); 00239 return istat; 00240 } 00241 } 00242 00243 /* Copy field with climatology removed to proper variable in data structure */ 00244 for (ii=0; ii<(data->field[cat].nlon_ls * data->field[cat].nlat_ls * data->field[cat].ntime_ls); ii++) 00245 data->field[cat].data[i].field_ls[ii] = bufnoclim[ii]; 00246 } 00247 /* Free memory */ 00248 (void) free(bufnoclim); 00249 (void) free(timein_ts); 00250 } 00251 } 00252 00253 /* Loop over all non-control-run large-scale field categories to process */ 00254 /* Always remove climatology calculated with the control run and apply to corresponding fields for other downscaled runs */ 00255 for (cat=0; cat<NCAT; cat=cat+2) { 00256 00257 /* Loop over all large-scale fields */ 00258 for (i=0; i<data->field[cat].n_ls; i++) { 00259 00260 /* Allocate memory for field with climatology removed */ 00261 bufnoclim = (double *) malloc(data->field[cat].nlon_ls * data->field[cat].nlat_ls * data->field[cat].ntime_ls * sizeof(double)); 00262 if (bufnoclim == NULL) alloc_error(__FILE__, __LINE__); 00263 00264 /* Allocate memory for temporary time structure */ 00265 timein_ts = (tstruct *) malloc(data->field[cat].ntime_ls * sizeof(tstruct)); 00266 if (timein_ts == NULL) alloc_error(__FILE__, __LINE__); 00267 /* Get time info and calendar units */ 00268 istat = get_calendar_ts(timein_ts, data->conf->time_units, data->field[cat].time_ls, data->field[cat].ntime_ls); 00269 if (istat < 0) { 00270 (void) free(timein_ts); 00271 (void) free(bufnoclim); 00272 (void) free(timeclim); 00273 return -1; 00274 } 00275 00276 /* If we need to remove climatology for that field */ 00277 if (data->field[cat].data[i].clim_info->clim_remove == TRUE) { 00278 /* If climatology field is already provided */ 00279 if (data->field[cat].data[i].clim_info->clim_provided == TRUE) { 00280 /* Read climatology from NetCDF file */ 00281 istat = read_netcdf_var_3d(&(clim[cat]), &clim_info_field, (proj_struct *) NULL, 00282 data->field[cat].data[i].clim_info->clim_filein_ls, 00283 data->field[cat].data[i].clim_info->clim_nomvar_ls, 00284 data->field[cat].data[i].dimxname, data->field[cat].data[i].dimyname, 00285 data->field[cat].data[i].timename, 00286 &nlon_file, &nlat_file, &ntime_file, TRUE); 00287 if (data->field[cat].nlon_ls != nlon_file || data->field[cat].nlat_ls != nlat_file || ntime_clim != ntime_file) { 00288 (void) fprintf(stderr, "%s: Problems in dimensions! nlat=%d nlat_file=%d nlon=%d nlon_file=%d ntime=%d ntime_file=%d\n", 00289 __FILE__, data->field[cat].nlat_ls, nlat_file, data->field[cat].nlon_ls, nlon_file, ntime_clim, ntime_file); 00290 istat = -1; 00291 } 00292 if (istat != 0) { 00293 /* In case of error in reading data */ 00294 (void) free(bufnoclim); 00295 (void) free(timein_ts); 00296 (void) free(timeclim); 00297 if (clim[cat] != NULL) (void) free(clim[cat]); 00298 return istat; 00299 } 00300 /* Get missing value */ 00301 fillvalue = clim_info_field.fillvalue; 00302 /* Free memory */ 00303 (void) free(clim_info_field.height); 00304 (void) free(clim_info_field.coordinates); 00305 (void) free(clim_info_field.grid_mapping); 00306 (void) free(clim_info_field.units); 00307 (void) free(clim_info_field.long_name); 00308 } 00309 else { 00310 /* Climatology is not provided: must use the one calculated with control-run data */ 00311 /* Get missing value */ 00312 fillvalue = data->field[cat].data[i].info->fillvalue; 00313 } 00314 00315 /* Remove seasonal cycle by substracting control-run climatology from field values (not the clim[cat+1] */ 00316 (void) remove_seasonal_cycle(bufnoclim, clim[cat+1], data->field[cat].data[i].field_ls, timein_ts, 00317 data->field[cat].data[i].info->fillvalue, 00318 data->conf->clim_filter_width, data->conf->clim_filter_type, 00319 TRUE, 00320 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls); 00321 00322 /* If we want to save climatology in NetCDF output file for further use */ 00323 if (data->field[cat].data[i].clim_info->clim_save == TRUE) { 00324 istat = create_netcdf("Computed climatology", "Climatologie calculee", "Computed climatology", "Climatologie calculee", 00325 "climatologie,climatology", "C language", data->info->software, 00326 "Computed climatology", data->info->institution, 00327 data->info->creator_email, data->info->creator_url, data->info->creator_name, 00328 data->info->version, data->info->scenario, data->info->scenario_co2, data->info->model, 00329 data->info->institution_model, data->info->country, data->info->member, 00330 data->info->downscaling_forcing, data->info->contact_email, data->info->contact_name, 00331 data->info->other_contact_email, data->info->other_contact_name, 00332 data->field[cat].data[i].clim_info->clim_fileout_ls, TRUE, data->conf->format, data->conf->compression); 00333 if (istat != 0) { 00334 /* In case of failure */ 00335 (void) free(bufnoclim); 00336 (void) free(timein_ts); 00337 (void) free(timeclim); 00338 if (clim[cat+1] != NULL) (void) free(clim[cat+1]); 00339 return istat; 00340 } 00341 /* Write dimensions of climatology field in NetCDF output file */ 00342 istat = write_netcdf_dims_3d(data->field[cat].lon_ls, data->field[cat].lat_ls, (double *) NULL, (double *) NULL, 00343 (double *) NULL, timeclim, data->conf->cal_type, 00344 data->conf->time_units, data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_clim, 00345 "daily", data->field[cat].proj[i].name, data->field[cat].proj[i].coords, 00346 data->field[cat].proj[i].grid_mapping_name, data->field[cat].proj[i].latin1, 00347 data->field[cat].proj[i].latin2, data->field[cat].proj[i].lonc, data->field[cat].proj[i].lat0, 00348 data->field[cat].proj[i].false_easting, data->field[cat].proj[i].false_northing, 00349 data->field[cat].proj[i].lonpole, data->field[cat].proj[i].latpole, 00350 data->field[cat].data[i].lonname, data->field[cat].data[i].latname, 00351 data->field[cat].data[i].timename, 00352 data->field[cat].data[i].clim_info->clim_fileout_ls, TRUE); 00353 if (istat != 0) { 00354 /* In case of failure */ 00355 (void) free(bufnoclim); 00356 (void) free(timein_ts); 00357 (void) free(timeclim); 00358 if (clim[cat+1] != NULL) (void) free(clim[cat+1]); 00359 return istat; 00360 } 00361 00362 /* Write climatology field in NetCDF output file */ 00363 istat = write_netcdf_var_3d(clim[cat+1], fillvalue, data->field[cat].data[i].clim_info->clim_fileout_ls, 00364 data->field[cat].data[i].clim_info->clim_nomvar_ls, data->field[cat].proj[i].name, 00365 data->field[cat].data[i].lonname, data->field[cat].data[i].latname, 00366 data->field[cat].data[i].timename, 00367 data->conf->format, data->conf->compression_level, 00368 data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_clim, TRUE); 00369 if (istat != 0) { 00370 /* In case of failure */ 00371 (void) free(bufnoclim); 00372 (void) free(timein_ts); 00373 (void) free(timeclim); 00374 if (clim[cat+1] != NULL) (void) free(clim[cat+1]); 00375 return istat; 00376 } 00377 } 00378 00379 /* Copy field with climatology removed to proper variable in data structure */ 00380 for (ii=0; ii<(data->field[cat].nlon_ls * data->field[cat].nlat_ls * data->field[cat].ntime_ls); ii++) 00381 data->field[cat].data[i].field_ls[ii] = bufnoclim[ii]; 00382 } 00383 /* Free memory */ 00384 (void) free(bufnoclim); 00385 (void) free(timein_ts); 00386 } 00387 } 00388 00389 /* Free memory */ 00390 (void) free(timeclim); 00391 for (cat=0; cat<NCAT; cat++) 00392 if (clim[cat] != NULL) (void) free(clim[cat]); 00393 (void) free(clim); 00394 00395 /* Success status */ 00396 return 0; 00397 }
void save_analog_data | ( | analog_day_struct | analog_days, | |
double * | delta, | |||
double ** | delta_dayschoice, | |||
double * | dist, | |||
int * | cluster, | |||
double * | time_ls, | |||
char * | filename, | |||
data_struct * | data | |||
) |
Save analog data information for further use.
[in] | analog_days | Analog days time indexes and dates with corresponding dates being downscaled. |
[in] | delta | Temperature difference to apply to analog day data. |
[in] | delta_dayschoice | Temperature difference to apply to analog day data, for all ndayschoice analogs. |
[in] | dist | Distance to cluster associated with each downscaled/analog day. |
[in] | cluster | Cluster number associated with each downscaled/analog day. |
[in] | time_ls | Time values in udunit |
[in] | filename | Analog days output filename. |
[in] | data | MASTER data structure. |
Definition at line 58 of file save_analog_data.c.
References alloc_error(), analog_day_struct::analog_dayschoice, data_struct::conf, info_struct::contact_email, info_struct::contact_name, info_struct::creator_email, info_struct::creator_name, info_struct::creator_url, tstruct::day, analog_day_struct::day, analog_day_struct::day_s, handle_netcdf_error(), data_struct::info, info_struct::institution, analog_day_struct::metric_norm, tstruct::month, analog_day_struct::month, analog_day_struct::month_s, analog_day_struct::ndayschoice, analog_day_struct::ntime, info_struct::other_contact_email, info_struct::other_contact_name, info_struct::processor, info_struct::software, analog_day_struct::time, conf_struct::time_units, tstruct::year, analog_day_struct::year, and analog_day_struct::year_s.
Referenced by wt_downscaling().
00059 { 00071 int istat; /* Diagnostic status */ 00072 00073 int ncoutid; /* NetCDF output file handle ID */ 00074 int timedimoutid; /* NetCDF time dimension output ID */ 00075 int timeoutid; /* NetCDF time variable ID */ 00076 int ndayschoicedimoutid; /* NetCDF ndayschoice dimension output ID */ 00077 int ndayschoiceoutid; /* NetCDF ndayschoice variable ID */ 00078 int analogoutid; /* NetCDF analog dates variable ID */ 00079 int analogyearoutid; /* NetCDF analog dates variable ID */ 00080 int analogmonthoutid; /* NetCDF analog dates variable ID */ 00081 int analogdayoutid; /* NetCDF analog dates variable ID */ 00082 int analogyearndaysoutid; /* NetCDF analog ndayschoice dates variable ID */ 00083 int analogmonthndaysoutid; /* NetCDF analog ndayschoice dates variable ID */ 00084 int analogdayndaysoutid; /* NetCDF analog ndayschoice dates variable ID */ 00085 int metricoutid; /* NetCDF analog normalized metric variable ID */ 00086 int downscaledyearoutid; /* NetCDF downscaled dates variable ID */ 00087 int downscaledmonthoutid; /* NetCDF downscaled dates variable ID */ 00088 int downscaleddayoutid; /* NetCDF downscaled dates variable ID */ 00089 int distoutid; /* NetCDF cluster distance variable ID */ 00090 int clusteroutid; /* NetCDF cluster number variable ID */ 00091 int deltatoutid; /* NetCDF delta T variable ID */ 00092 int deltatndaysoutid; /* NetCDF delta T ndayschoice variable ID */ 00093 int vardimids[NC_MAX_VAR_DIMS]; /* NetCDF dimension IDs */ 00094 00095 int *buftmp = NULL; /* Temporary int buffer for writing data */ 00096 float *buftmpf = NULL; /* Temporary float buffer for writing data */ 00097 // double *buftmpd = NULL; /* Temporary double buffer for writing data */ 00098 int maxndays; /* Maximum number of days selected for any particular date */ 00099 00100 size_t start[2]; /* Start element when writing */ 00101 size_t count[2]; /* Count of elements to write */ 00102 00103 int fillvaluei; /* Missing value */ 00104 float fillvaluef; /* Missing value */ 00105 00106 char *tmpstr = NULL; /* Temporary string */ 00107 00108 int t; /* Time loop counter */ 00109 int i; /* Loop counter */ 00110 00111 tmpstr = (char *) malloc(200 * sizeof(char)); 00112 if (tmpstr == NULL) alloc_error(__FILE__, __LINE__); 00113 00114 /* Open NetCDF file for writing, overwrite and truncate existing file if any */ 00115 istat = nc_create(filename, NC_CLOBBER, &ncoutid); 00116 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00117 00118 /* Set global attributes */ 00119 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "processor", strlen(data->info->processor), data->info->processor); 00120 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "software", strlen(data->info->software), data->info->software); 00121 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "institution", strlen(data->info->institution), data->info->institution); 00122 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_email", strlen(data->info->creator_email), data->info->creator_email); 00123 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_url", strlen(data->info->creator_url), data->info->creator_url); 00124 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_name", strlen(data->info->creator_name), data->info->creator_name); 00125 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_email", strlen(data->info->contact_email), data->info->contact_email); 00126 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_name", strlen(data->info->contact_name), data->info->contact_name); 00127 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_email", strlen(data->info->other_contact_email), 00128 data->info->other_contact_email); 00129 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_name", strlen(data->info->other_contact_name), 00130 data->info->other_contact_name); 00131 00132 /* Set dimensions */ 00133 istat = nc_def_dim(ncoutid, "time", NC_UNLIMITED, &timedimoutid); 00134 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00135 00136 vardimids[0] = timedimoutid; 00137 istat = nc_def_var(ncoutid, "time", NC_INT, 1, vardimids, &timeoutid); 00138 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00139 00140 istat = sprintf(tmpstr, "gregorian"); 00141 istat = nc_put_att_text(ncoutid, timeoutid, "calendar", strlen(tmpstr), tmpstr); 00142 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00143 istat = sprintf(tmpstr, "%s", data->conf->time_units); 00144 istat = nc_put_att_text(ncoutid, timeoutid, "units", strlen(tmpstr), tmpstr); 00145 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00146 istat = sprintf(tmpstr, "time in %s", data->conf->time_units); 00147 istat = nc_put_att_text(ncoutid, timeoutid, "long_name", strlen(tmpstr), tmpstr); 00148 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00149 00150 /* Find maximum number of days in the first selection of analog days to have constant dimension size */ 00151 maxndays = analog_days.ndayschoice[0]; 00152 for (t=0; t<analog_days.ntime; t++) 00153 if (maxndays < analog_days.ndayschoice[t]) 00154 maxndays = analog_days.ndayschoice[t]; 00155 istat = nc_def_dim(ncoutid, "ndayschoice", maxndays, &ndayschoicedimoutid); 00156 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00157 00158 vardimids[0] = timedimoutid; 00159 istat = nc_def_var(ncoutid, "ndayschoice", NC_INT, 1, vardimids, &ndayschoiceoutid); 00160 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00161 00162 istat = sprintf(tmpstr, "Number of analog days selected"); 00163 istat = nc_put_att_text(ncoutid, ndayschoiceoutid, "long_name", strlen(tmpstr), tmpstr); 00164 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00165 00166 /* Set variables */ 00167 00168 /* Define downscaled day variable: year */ 00169 vardimids[0] = timedimoutid; 00170 istat = nc_def_var(ncoutid, "downscaled_date_year", NC_INT, 1, vardimids, &downscaledyearoutid); 00171 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00172 00173 fillvaluei = -1; 00174 istat = nc_put_att_int(ncoutid, downscaledyearoutid, "missing_value", NC_INT, 1, &fillvaluei); 00175 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00176 (void) sprintf(tmpstr, "time"); 00177 istat = nc_put_att_text(ncoutid, downscaledyearoutid, "coordinates", strlen(tmpstr), tmpstr); 00178 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00179 (void) sprintf(tmpstr, "%s", "year"); 00180 istat = nc_put_att_text(ncoutid, downscaledyearoutid, "units", strlen(tmpstr), tmpstr); 00181 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00182 (void) strcpy(tmpstr, "Downscaled date: year"); 00183 istat = nc_put_att_text(ncoutid, downscaledyearoutid, "long_name", strlen(tmpstr), tmpstr); 00184 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00185 00186 /* Define downscaled day variable: month */ 00187 vardimids[0] = timedimoutid; 00188 istat = nc_def_var(ncoutid, "downscaled_date_month", NC_INT, 1, vardimids, &downscaledmonthoutid); 00189 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00190 00191 fillvaluei = -1; 00192 istat = nc_put_att_int(ncoutid, downscaledmonthoutid, "missing_value", NC_INT, 1, &fillvaluei); 00193 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00194 (void) sprintf(tmpstr, "time"); 00195 istat = nc_put_att_text(ncoutid, downscaledmonthoutid, "coordinates", strlen(tmpstr), tmpstr); 00196 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00197 (void) sprintf(tmpstr, "%s", "month"); 00198 istat = nc_put_att_text(ncoutid, downscaledmonthoutid, "units", strlen(tmpstr), tmpstr); 00199 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00200 (void) strcpy(tmpstr, "Downscaled date: month"); 00201 istat = nc_put_att_text(ncoutid, downscaledmonthoutid, "long_name", strlen(tmpstr), tmpstr); 00202 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00203 00204 /* Define downscaled day variable: day */ 00205 vardimids[0] = timedimoutid; 00206 istat = nc_def_var(ncoutid, "downscaled_date_day", NC_INT, 1, vardimids, &downscaleddayoutid); 00207 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00208 00209 fillvaluei = -1; 00210 istat = nc_put_att_int(ncoutid, downscaleddayoutid, "missing_value", NC_INT, 1, &fillvaluei); 00211 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00212 (void) sprintf(tmpstr, "time"); 00213 istat = nc_put_att_text(ncoutid, downscaleddayoutid, "coordinates", strlen(tmpstr), tmpstr); 00214 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00215 (void) sprintf(tmpstr, "%s", "day"); 00216 istat = nc_put_att_text(ncoutid, downscaleddayoutid, "units", strlen(tmpstr), tmpstr); 00217 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00218 (void) strcpy(tmpstr, "Downscaled date: day"); 00219 istat = nc_put_att_text(ncoutid, downscaleddayoutid, "long_name", strlen(tmpstr), tmpstr); 00220 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00221 00222 /* Define analog day time variable */ 00223 vardimids[0] = timedimoutid; 00224 istat = nc_def_var(ncoutid, "analog_date", NC_INT, 1, vardimids, &analogoutid); 00225 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00226 00227 fillvaluei = -1; 00228 istat = nc_put_att_int(ncoutid, analogoutid, "missing_value", NC_INT, 1, &fillvaluei); 00229 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00230 (void) sprintf(tmpstr, "time"); 00231 istat = nc_put_att_text(ncoutid, analogoutid, "coordinates", strlen(tmpstr), tmpstr); 00232 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00233 (void) sprintf(tmpstr, "%s", data->conf->time_units); 00234 istat = nc_put_att_text(ncoutid, analogoutid, "units", strlen(tmpstr), tmpstr); 00235 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00236 (void) strcpy(tmpstr, "Analog date"); 00237 istat = nc_put_att_text(ncoutid, analogoutid, "long_name", strlen(tmpstr), tmpstr); 00238 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00239 00240 /* Define analog day variable: year */ 00241 vardimids[0] = timedimoutid; 00242 istat = nc_def_var(ncoutid, "analog_date_year", NC_INT, 1, vardimids, &analogyearoutid); 00243 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00244 00245 fillvaluei = -1; 00246 istat = nc_put_att_int(ncoutid, analogyearoutid, "missing_value", NC_INT, 1, &fillvaluei); 00247 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00248 (void) sprintf(tmpstr, "time"); 00249 istat = nc_put_att_text(ncoutid, analogyearoutid, "coordinates", strlen(tmpstr), tmpstr); 00250 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00251 (void) sprintf(tmpstr, "%s", "year"); 00252 istat = nc_put_att_text(ncoutid, analogyearoutid, "units", strlen(tmpstr), tmpstr); 00253 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00254 (void) strcpy(tmpstr, "Analog date: year"); 00255 istat = nc_put_att_text(ncoutid, analogyearoutid, "long_name", strlen(tmpstr), tmpstr); 00256 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00257 00258 /* Define analog day variable: month */ 00259 vardimids[0] = timedimoutid; 00260 istat = nc_def_var(ncoutid, "analog_date_month", NC_INT, 1, vardimids, &analogmonthoutid); 00261 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00262 00263 fillvaluei = -1; 00264 istat = nc_put_att_int(ncoutid, analogmonthoutid, "missing_value", NC_INT, 1, &fillvaluei); 00265 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00266 (void) sprintf(tmpstr, "time"); 00267 istat = nc_put_att_text(ncoutid, analogmonthoutid, "coordinates", strlen(tmpstr), tmpstr); 00268 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00269 (void) sprintf(tmpstr, "%s", "month"); 00270 istat = nc_put_att_text(ncoutid, analogmonthoutid, "units", strlen(tmpstr), tmpstr); 00271 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00272 (void) strcpy(tmpstr, "Analog date: month"); 00273 istat = nc_put_att_text(ncoutid, analogmonthoutid, "long_name", strlen(tmpstr), tmpstr); 00274 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00275 00276 /* Define analog day variable: day */ 00277 vardimids[0] = timedimoutid; 00278 istat = nc_def_var(ncoutid, "analog_date_day", NC_INT, 1, vardimids, &analogdayoutid); 00279 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00280 00281 fillvaluei = -1; 00282 istat = nc_put_att_int(ncoutid, analogdayoutid, "missing_value", NC_INT, 1, &fillvaluei); 00283 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00284 (void) sprintf(tmpstr, "time"); 00285 istat = nc_put_att_text(ncoutid, analogdayoutid, "coordinates", strlen(tmpstr), tmpstr); 00286 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00287 (void) sprintf(tmpstr, "%s", "day"); 00288 istat = nc_put_att_text(ncoutid, analogdayoutid, "units", strlen(tmpstr), tmpstr); 00289 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00290 (void) strcpy(tmpstr, "Analog date: day"); 00291 istat = nc_put_att_text(ncoutid, analogdayoutid, "long_name", strlen(tmpstr), tmpstr); 00292 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00293 00294 /* Define analog delta Temperature variable */ 00295 vardimids[0] = timedimoutid; 00296 istat = nc_def_var(ncoutid, "analog_delta_t", NC_FLOAT, 1, vardimids, &deltatoutid); 00297 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00298 00299 fillvaluef = -9999.0; 00300 istat = nc_put_att_float(ncoutid, deltatoutid, "missing_value", NC_FLOAT, 1, &fillvaluef); 00301 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00302 (void) sprintf(tmpstr, "time"); 00303 istat = nc_put_att_text(ncoutid, deltatoutid, "coordinates", strlen(tmpstr), tmpstr); 00304 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00305 (void) strcpy(tmpstr, "K"); 00306 istat = nc_put_att_text(ncoutid, deltatoutid, "units", strlen(tmpstr), tmpstr); 00307 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00308 (void) strcpy(tmpstr, "Delta of Temperature"); 00309 istat = nc_put_att_text(ncoutid, deltatoutid, "long_name", strlen(tmpstr), tmpstr); 00310 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00311 00312 /* Define analog delta Temperature ndayschoice variable */ 00313 vardimids[0] = timedimoutid; 00314 vardimids[1] = ndayschoicedimoutid; 00315 istat = nc_def_var(ncoutid, "analog_ndays_delta_t", NC_FLOAT, 2, vardimids, &deltatndaysoutid); 00316 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00317 00318 fillvaluef = -9999.0; 00319 istat = nc_put_att_float(ncoutid, deltatndaysoutid, "missing_value", NC_FLOAT, 1, &fillvaluef); 00320 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00321 (void) sprintf(tmpstr, "time ndayschoice"); 00322 istat = nc_put_att_text(ncoutid, deltatndaysoutid, "coordinates", strlen(tmpstr), tmpstr); 00323 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00324 (void) strcpy(tmpstr, "K"); 00325 istat = nc_put_att_text(ncoutid, deltatndaysoutid, "units", strlen(tmpstr), tmpstr); 00326 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00327 (void) strcpy(tmpstr, "Delta of Temperature ndays"); 00328 istat = nc_put_att_text(ncoutid, deltatndaysoutid, "long_name", strlen(tmpstr), tmpstr); 00329 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00330 00331 /* Define cluster distance variable */ 00332 vardimids[0] = timedimoutid; 00333 istat = nc_def_var(ncoutid, "cluster_distance", NC_FLOAT, 1, vardimids, &distoutid); 00334 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00335 00336 fillvaluef = -9999.0; 00337 istat = nc_put_att_float(ncoutid, distoutid, "missing_value", NC_FLOAT, 1, &fillvaluef); 00338 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00339 (void) sprintf(tmpstr, "time"); 00340 istat = nc_put_att_text(ncoutid, distoutid, "coordinates", strlen(tmpstr), tmpstr); 00341 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00342 (void) strcpy(tmpstr, "none"); 00343 istat = nc_put_att_text(ncoutid, distoutid, "units", strlen(tmpstr), tmpstr); 00344 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00345 (void) strcpy(tmpstr, "Normalized distance to cluster"); 00346 istat = nc_put_att_text(ncoutid, distoutid, "long_name", strlen(tmpstr), tmpstr); 00347 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00348 00349 /* Define cluster number variable */ 00350 vardimids[0] = timedimoutid; 00351 istat = nc_def_var(ncoutid, "cluster", NC_INT, 1, vardimids, &clusteroutid); 00352 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00353 00354 fillvaluei = -1; 00355 istat = nc_put_att_int(ncoutid, clusteroutid, "missing_value", NC_INT, 1, &fillvaluei); 00356 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00357 (void) sprintf(tmpstr, "time"); 00358 istat = nc_put_att_text(ncoutid, clusteroutid, "coordinates", strlen(tmpstr), tmpstr); 00359 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00360 (void) strcpy(tmpstr, "none"); 00361 istat = nc_put_att_text(ncoutid, clusteroutid, "units", strlen(tmpstr), tmpstr); 00362 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00363 (void) strcpy(tmpstr, "Cluster number"); 00364 istat = nc_put_att_text(ncoutid, clusteroutid, "long_name", strlen(tmpstr), tmpstr); 00365 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00366 00367 /* Define ndayschoice analog day variable: year */ 00368 vardimids[0] = timedimoutid; 00369 vardimids[1] = ndayschoicedimoutid; 00370 istat = nc_def_var(ncoutid, "analog_ndays_date_year", NC_INT, 2, vardimids, &analogyearndaysoutid); 00371 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00372 00373 fillvaluei = 0; 00374 istat = nc_put_att_int(ncoutid, analogyearndaysoutid, "missing_value", NC_INT, 1, &fillvaluei); 00375 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00376 (void) sprintf(tmpstr, "time ndayschoice"); 00377 istat = nc_put_att_text(ncoutid, analogyearndaysoutid, "coordinates", strlen(tmpstr), tmpstr); 00378 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00379 (void) sprintf(tmpstr, "%s", "year"); 00380 istat = nc_put_att_text(ncoutid, analogyearndaysoutid, "units", strlen(tmpstr), tmpstr); 00381 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00382 (void) strcpy(tmpstr, "Analog ndays date: year"); 00383 istat = nc_put_att_text(ncoutid, analogyearndaysoutid, "long_name", strlen(tmpstr), tmpstr); 00384 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00385 00386 /* Define ndayschoice analog day variable: month */ 00387 vardimids[0] = timedimoutid; 00388 vardimids[1] = ndayschoicedimoutid; 00389 istat = nc_def_var(ncoutid, "analog_ndays_date_month", NC_INT, 2, vardimids, &analogmonthndaysoutid); 00390 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00391 00392 fillvaluei = 0; 00393 istat = nc_put_att_int(ncoutid, analogmonthndaysoutid, "missing_value", NC_INT, 1, &fillvaluei); 00394 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00395 (void) sprintf(tmpstr, "time ndayschoice"); 00396 istat = nc_put_att_text(ncoutid, analogmonthndaysoutid, "coordinates", strlen(tmpstr), tmpstr); 00397 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00398 (void) sprintf(tmpstr, "%s", "month"); 00399 istat = nc_put_att_text(ncoutid, analogmonthndaysoutid, "units", strlen(tmpstr), tmpstr); 00400 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00401 (void) strcpy(tmpstr, "Analog ndays date: month"); 00402 istat = nc_put_att_text(ncoutid, analogmonthndaysoutid, "long_name", strlen(tmpstr), tmpstr); 00403 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00404 00405 /* Define ndayschoice analog day variable: day */ 00406 vardimids[0] = timedimoutid; 00407 vardimids[1] = ndayschoicedimoutid; 00408 istat = nc_def_var(ncoutid, "analog_ndays_date_day", NC_INT, 2, vardimids, &analogdayndaysoutid); 00409 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00410 00411 fillvaluei = 0; 00412 istat = nc_put_att_int(ncoutid, analogdayndaysoutid, "missing_value", NC_INT, 1, &fillvaluei); 00413 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00414 (void) sprintf(tmpstr, "time ndayschoice"); 00415 istat = nc_put_att_text(ncoutid, analogdayndaysoutid, "coordinates", strlen(tmpstr), tmpstr); 00416 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00417 (void) sprintf(tmpstr, "%s", "day"); 00418 istat = nc_put_att_text(ncoutid, analogdayndaysoutid, "units", strlen(tmpstr), tmpstr); 00419 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00420 (void) strcpy(tmpstr, "Analog ndays date: day"); 00421 istat = nc_put_att_text(ncoutid, analogdayndaysoutid, "long_name", strlen(tmpstr), tmpstr); 00422 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00423 00424 /* Define ndayschoice normalized metric */ 00425 vardimids[0] = timedimoutid; 00426 vardimids[1] = ndayschoicedimoutid; 00427 istat = nc_def_var(ncoutid, "analog_metric_norm", NC_FLOAT, 2, vardimids, &metricoutid); 00428 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00429 00430 fillvaluef = 0.0; 00431 istat = nc_put_att_float(ncoutid, metricoutid, "missing_value", NC_FLOAT, 1, &fillvaluef); 00432 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00433 (void) sprintf(tmpstr, "time ndayschoice"); 00434 istat = nc_put_att_text(ncoutid, metricoutid, "coordinates", strlen(tmpstr), tmpstr); 00435 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00436 (void) sprintf(tmpstr, "%s", "metric"); 00437 istat = nc_put_att_text(ncoutid, metricoutid, "units", strlen(tmpstr), tmpstr); 00438 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00439 (void) strcpy(tmpstr, "Analog normalized metric"); 00440 istat = nc_put_att_text(ncoutid, metricoutid, "long_name", strlen(tmpstr), tmpstr); 00441 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00442 00443 /* End definition mode */ 00444 istat = nc_enddef(ncoutid); 00445 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00446 00447 /* Write variables */ /* Write time */ 00448 start[0] = 0; 00449 start[1] = 0; 00450 count[0] = (size_t) analog_days.ntime; 00451 count[1] = 0; 00452 istat = nc_put_vara_double(ncoutid, timeoutid, start, count, time_ls); 00453 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00454 00455 /* Write ndayschoice */ 00456 start[0] = 0; 00457 start[1] = 0; 00458 count[0] = (size_t) analog_days.ntime; 00459 count[1] = 0; 00460 istat = nc_put_vara_int(ncoutid, ndayschoiceoutid, start, count, analog_days.ndayschoice); 00461 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00462 00463 /* Write downscaled dates */ 00464 start[0] = 0; 00465 start[1] = 0; 00466 count[0] = (size_t) analog_days.ntime; 00467 count[1] = 0; 00468 istat = nc_put_vara_int(ncoutid, downscaledyearoutid, start, count, analog_days.year_s); 00469 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00470 istat = nc_put_vara_int(ncoutid, downscaledmonthoutid, start, count, analog_days.month_s); 00471 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00472 istat = nc_put_vara_int(ncoutid, downscaleddayoutid, start, count, analog_days.day_s); 00473 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00474 00475 /* Write analog dates */ 00476 start[0] = 0; 00477 start[1] = 0; 00478 count[0] = (size_t) analog_days.ntime; 00479 count[1] = 0; 00480 istat = nc_put_vara_int(ncoutid, analogoutid, start, count, analog_days.time); 00481 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00482 istat = nc_put_vara_int(ncoutid, analogyearoutid, start, count, analog_days.year); 00483 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00484 istat = nc_put_vara_int(ncoutid, analogmonthoutid, start, count, analog_days.month); 00485 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00486 istat = nc_put_vara_int(ncoutid, analogdayoutid, start, count, analog_days.day); 00487 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00488 00489 /* Write ndayschoice analog dates */ 00490 start[0] = 0; 00491 start[1] = 0; 00492 count[0] = (size_t) analog_days.ntime; 00493 count[1] = (size_t) maxndays; 00494 /* Build 2D array */ 00495 buftmp = (int *) calloc(analog_days.ntime * maxndays, sizeof(int)); 00496 if (buftmp == NULL) alloc_error(__FILE__, __LINE__); 00497 for (t=0; t<analog_days.ntime; t++) 00498 for (i=0; i<analog_days.ndayschoice[t]; i++) 00499 buftmp[i+t*maxndays] = analog_days.analog_dayschoice[t][i].year; 00500 istat = nc_put_vara_int(ncoutid, analogyearndaysoutid, start, count, buftmp); 00501 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00502 for (t=0; t<analog_days.ntime; t++) 00503 for (i=0; i<analog_days.ndayschoice[t]; i++) 00504 buftmp[i+t*maxndays] = analog_days.analog_dayschoice[t][i].month; 00505 istat = nc_put_vara_int(ncoutid, analogmonthndaysoutid, start, count, buftmp); 00506 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00507 for (t=0; t<analog_days.ntime; t++) 00508 for (i=0; i<analog_days.ndayschoice[t]; i++) 00509 buftmp[i+t*maxndays] = analog_days.analog_dayschoice[t][i].day; 00510 istat = nc_put_vara_int(ncoutid, analogdayndaysoutid, start, count, buftmp); 00511 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00512 (void) free(buftmp); 00513 00514 /* Write analog normalized metric */ 00515 start[0] = 0; 00516 start[1] = 0; 00517 count[0] = (size_t) analog_days.ntime; 00518 count[1] = (size_t) maxndays; 00519 buftmpf = (float *) calloc(analog_days.ntime * maxndays, sizeof(float)); 00520 if (buftmpf == NULL) alloc_error(__FILE__, __LINE__); 00521 for (t=0; t<analog_days.ntime; t++) 00522 for (i=0; i<analog_days.ndayschoice[t]; i++) 00523 buftmpf[i+t*maxndays] = analog_days.metric_norm[t][i]; 00524 istat = nc_put_vara_float(ncoutid, metricoutid, start, count, buftmpf); 00525 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00526 (void) free(buftmpf); 00527 00528 /* Write delta of temperature */ 00529 start[0] = 0; 00530 start[1] = 0; 00531 count[0] = (size_t) analog_days.ntime; 00532 count[1] = 0; 00533 istat = nc_put_vara_double(ncoutid, deltatoutid, start, count, delta); 00534 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00535 00536 /* Write ndayschoice delta of temperature */ 00537 start[0] = 0; 00538 start[1] = 0; 00539 count[0] = (size_t) analog_days.ntime; 00540 count[1] = (size_t) maxndays; 00541 buftmpf = (float *) calloc(analog_days.ntime * maxndays, sizeof(float)); 00542 if (buftmpf == NULL) alloc_error(__FILE__, __LINE__); 00543 for (t=0; t<analog_days.ntime; t++) 00544 for (i=0; i<analog_days.ndayschoice[t]; i++) 00545 buftmpf[i+t*maxndays] = (float) delta_dayschoice[t][i]; 00546 istat = nc_put_vara_float(ncoutid, deltatndaysoutid, start, count, buftmpf); 00547 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00548 (void) free(buftmpf); 00549 00550 /* Write cluster distance */ 00551 start[0] = 0; 00552 start[1] = 0; 00553 count[0] = (size_t) analog_days.ntime; 00554 count[1] = 0; 00555 istat = nc_put_vara_double(ncoutid, distoutid, start, count, dist); 00556 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00557 00558 /* Write cluster number */ 00559 start[0] = 0; 00560 start[1] = 0; 00561 count[0] = (size_t) analog_days.ntime; 00562 count[1] = 0; 00563 istat = nc_put_vara_int(ncoutid, clusteroutid, start, count, cluster); 00564 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00565 00566 /* Close the output netCDF file */ 00567 istat = ncclose(ncoutid); 00568 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00569 00570 (void) free(tmpstr); 00571 }
int write_learning_fields | ( | data_struct * | data | ) |
Write learning fields for later use.
[in] | data | MASTER data structure. |
Definition at line 58 of file write_learning_fields.c.
References alloc_error(), learning_data_struct::class_clusters, conf_struct::clustname, data_struct::conf, info_struct::contact_email, info_struct::contact_name, info_struct::creator_email, info_struct::creator_name, info_struct::creator_url, learning_struct::data, time_vect_struct::day, conf_struct::eofname, FALSE, learning_struct::filename_save_clust_learn, learning_struct::filename_save_learn, learning_struct::filename_save_weight, handle_netcdf_error(), time_vect_struct::hour, data_struct::info, info_struct::institution, data_struct::learning, time_vect_struct::minutes, time_vect_struct::month, season_struct::nclusters, learning_struct::nomvar_class_clusters, learning_struct::nomvar_pc_normalized_var, learning_struct::nomvar_precip_index, learning_struct::nomvar_precip_index_obs, learning_struct::nomvar_precip_reg, learning_struct::nomvar_precip_reg_acor, learning_struct::nomvar_precip_reg_cst, learning_struct::nomvar_precip_reg_dist, learning_struct::nomvar_precip_reg_err, learning_struct::nomvar_precip_reg_rsq, learning_struct::nomvar_precip_reg_vif, learning_struct::nomvar_sup_index, learning_struct::nomvar_sup_index_mean, learning_struct::nomvar_sup_index_var, learning_struct::nomvar_sup_val, learning_struct::nomvar_time, learning_struct::nomvar_weight, reg_struct::npts, conf_struct::nseasons, learning_data_struct::ntime, info_struct::other_contact_email, info_struct::other_contact_name, learning_struct::pc_normalized_var, learning_data_struct::precip_index, learning_data_struct::precip_index_obs, learning_data_struct::precip_reg, learning_data_struct::precip_reg_autocor, learning_data_struct::precip_reg_cst, learning_data_struct::precip_reg_dist, learning_data_struct::precip_reg_err, learning_data_struct::precip_reg_rsq, learning_data_struct::precip_reg_vif, info_struct::processor, conf_struct::ptsname, learning_struct::rea_neof, data_struct::reg, conf_struct::season, season_struct::secondary_cov, time_vect_struct::seconds, info_struct::software, learning_data_struct::sup_index, learning_data_struct::sup_index_mean, learning_data_struct::sup_index_var, learning_struct::sup_lat, learning_struct::sup_latname, learning_struct::sup_lon, learning_struct::sup_lonname, learning_struct::sup_nlat, learning_struct::sup_nlon, learning_data_struct::sup_val, learning_data_struct::time, learning_data_struct::time_s, conf_struct::time_units, TRUE, learning_data_struct::weight, and time_vect_struct::year.
Referenced by wt_learning().
00058 { 00063 int istat; /* Diagnostic status */ 00064 00065 int ncoutid; /* NetCDF output file handle ID */ 00066 int *timedimoutid; /* NetCDF time dimension output ID */ 00067 int latdimoutid; /* NetCDF latitude dimension output ID */ 00068 int londimoutid; /* NetCDF longitude dimension output ID */ 00069 int sdimoutid; /* NetCDF season dimension output ID */ 00070 int eofdimoutid; /* NetCDF EOF dimension output ID */ 00071 int ptsdimoutid; /* NetCDF points dimension output ID */ 00072 int *clustdimoutid; /* NetCDF clusters dimension output ID */ 00073 int *weightdimoutid; /* NetCDF weight dimension output ID */ 00074 int *timeoutid; /* NetCDF time variable ID */ 00075 int latoutid; /* NetCDF latitude variable ID */ 00076 int lonoutid; /* NetCDF longitude variable ID */ 00077 int *cstoutid; /* NetCDF regression constant variable ID */ 00078 int *regoutid; /* NetCDF regression coefficients variable ID */ 00079 int *distoutid; /* NetCDF regression distances variable ID */ 00080 int *rrdoutid; /* NetCDF precipitation index variable ID */ 00081 int *rrooutid; /* NetCDF observed precipitation index variable ID */ 00082 int *taoutid; /* NetCDF secondary large-scale field index variable ID */ 00083 int *tadoutid; /* NetCDF secondary large-scale 2D field variable ID */ 00084 int *rsqoutid; /* NetCDF regression R^2 variable ID */ 00085 int *erroutid; /* NetCDF regression residuals variable ID */ 00086 int *acoroutid; /* NetCDF regression autocorrelation variable ID */ 00087 int *vifoutid; /* NetCDF regression VIF variable ID */ 00088 int pcoutid; /* NetCDF pc_normalized_var variable ID */ 00089 int tamoutid; /* NetCDF secondary large-scale field index mean variable ID */ 00090 int tavoutid; /* NetCDF secondary large-scale field index variance variable ID */ 00091 int *clustoutid; /* NetCDF clusters variable output ID */ 00092 int *weightoutid; /* NetCDF weight variable ID */ 00093 int vardimids[NC_MAX_VAR_DIMS]; /* NetCDF dimension IDs */ 00094 00095 size_t start[3]; /* Start element when writing */ 00096 size_t count[3]; /* Count of elements to write */ 00097 00098 char *tmpstr = NULL; /* Temporary string */ 00099 00100 ut_system *unitSystem = NULL; /* Unit System (udunits) */ 00101 ut_unit *dataunits = NULL; /* udunits variable */ 00102 00103 double fillvalue; 00104 float fillvaluef; 00105 char *nomvar = NULL; 00106 double *timeval = NULL; 00107 double *tancp_mean = NULL; 00108 double *tancp_var = NULL; 00109 double *bufd = NULL; 00110 00111 int s; 00112 int t; 00113 int ii; 00114 int cov_true = FALSE; /* Check if cov is TRUE in at least one season */ 00115 00116 tancp_mean = (double *) malloc(data->conf->nseasons * sizeof(double)); 00117 if (tancp_mean == NULL) alloc_error(__FILE__, __LINE__); 00118 tancp_var = (double *) malloc(data->conf->nseasons * sizeof(double)); 00119 if (tancp_var == NULL) alloc_error(__FILE__, __LINE__); 00120 00121 timedimoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00122 if (timedimoutid == NULL) alloc_error(__FILE__, __LINE__); 00123 clustdimoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00124 if (clustdimoutid == NULL) alloc_error(__FILE__, __LINE__); 00125 weightdimoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00126 if (weightdimoutid == NULL) alloc_error(__FILE__, __LINE__); 00127 00128 timeoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00129 if (timeoutid == NULL) alloc_error(__FILE__, __LINE__); 00130 cstoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00131 if (cstoutid == NULL) alloc_error(__FILE__, __LINE__); 00132 regoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00133 if (regoutid == NULL) alloc_error(__FILE__, __LINE__); 00134 distoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00135 if (distoutid == NULL) alloc_error(__FILE__, __LINE__); 00136 rrdoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00137 if (rrdoutid == NULL) alloc_error(__FILE__, __LINE__); 00138 rrooutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00139 if (rrooutid == NULL) alloc_error(__FILE__, __LINE__); 00140 taoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00141 if (taoutid == NULL) alloc_error(__FILE__, __LINE__); 00142 tadoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00143 if (tadoutid == NULL) alloc_error(__FILE__, __LINE__); 00144 clustoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00145 if (clustoutid == NULL) alloc_error(__FILE__, __LINE__); 00146 weightoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00147 if (weightoutid == NULL) alloc_error(__FILE__, __LINE__); 00148 rsqoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00149 if (rsqoutid == NULL) alloc_error(__FILE__, __LINE__); 00150 erroutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00151 if (erroutid == NULL) alloc_error(__FILE__, __LINE__); 00152 acoroutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00153 if (acoroutid == NULL) alloc_error(__FILE__, __LINE__); 00154 vifoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00155 if (vifoutid == NULL) alloc_error(__FILE__, __LINE__); 00156 00157 nomvar = (char *) malloc(200 * sizeof(char)); 00158 if (nomvar == NULL) alloc_error(__FILE__, __LINE__); 00159 tmpstr = (char *) malloc(200 * sizeof(char)); 00160 if (tmpstr == NULL) alloc_error(__FILE__, __LINE__); 00161 00162 istat = utInit(""); 00163 00164 /* Open NetCDF file for writing, overwrite and truncate existing file if any */ 00165 istat = nc_create(data->learning->filename_save_learn, NC_CLOBBER, &ncoutid); 00166 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00167 00168 /* Set global attributes */ 00169 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "processor", strlen(data->info->processor), data->info->processor); 00170 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "software", strlen(data->info->software), data->info->software); 00171 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "institution", strlen(data->info->institution), data->info->institution); 00172 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_email", strlen(data->info->creator_email), data->info->creator_email); 00173 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_url", strlen(data->info->creator_url), data->info->creator_url); 00174 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_name", strlen(data->info->creator_name), data->info->creator_name); 00175 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_email", strlen(data->info->contact_email), data->info->contact_email); 00176 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_name", strlen(data->info->contact_name), data->info->contact_name); 00177 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_email", strlen(data->info->other_contact_email), 00178 data->info->other_contact_email); 00179 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_name", strlen(data->info->other_contact_name), 00180 data->info->other_contact_name); 00181 00182 fillvalue = -9999.9; 00183 fillvaluef = -9999.9; 00184 00185 for (s=0; s<data->conf->nseasons; s++) 00186 if (data->conf->season[s].secondary_cov == TRUE) cov_true = TRUE; 00187 00188 /* Set dimensions */ 00189 istat = nc_def_dim(ncoutid, "season", data->conf->nseasons, &sdimoutid); 00190 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00191 istat = nc_def_dim(ncoutid, data->conf->eofname, data->learning->rea_neof, &eofdimoutid); 00192 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00193 istat = nc_def_dim(ncoutid, data->conf->ptsname, data->reg->npts, &ptsdimoutid); 00194 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00195 if (cov_true == TRUE) { 00196 istat = nc_def_dim(ncoutid, data->learning->sup_latname, data->learning->sup_nlat, &latdimoutid); 00197 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00198 istat = nc_def_dim(ncoutid, data->learning->sup_lonname, data->learning->sup_nlon, &londimoutid); 00199 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00200 } 00201 00202 if (cov_true == TRUE) { 00203 /* Define lat and lon variables */ 00204 vardimids[0] = latdimoutid; 00205 istat = nc_def_var(ncoutid, data->learning->sup_latname, NC_DOUBLE, 1, vardimids, &latoutid); 00206 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00207 istat = sprintf(tmpstr, "degrees_north"); 00208 istat = nc_put_att_text(ncoutid, latoutid, "units", strlen(tmpstr), tmpstr); 00209 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00210 istat = sprintf(tmpstr, "latitude coordinate"); 00211 istat = nc_put_att_text(ncoutid, latoutid, "long_name", strlen(tmpstr), tmpstr); 00212 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00213 istat = sprintf(tmpstr, "latitude"); 00214 istat = nc_put_att_text(ncoutid, latoutid, "standard_name", strlen(tmpstr), tmpstr); 00215 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00216 00217 vardimids[0] = londimoutid; 00218 istat = nc_def_var(ncoutid, data->learning->sup_lonname, NC_DOUBLE, 1, vardimids, &lonoutid); 00219 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00220 istat = sprintf(tmpstr, "degrees_east"); 00221 istat = nc_put_att_text(ncoutid, lonoutid, "units", strlen(tmpstr), tmpstr); 00222 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00223 istat = sprintf(tmpstr, "longitude coordinate"); 00224 istat = nc_put_att_text(ncoutid, lonoutid, "long_name", strlen(tmpstr), tmpstr); 00225 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00226 istat = sprintf(tmpstr, "longitude"); 00227 istat = nc_put_att_text(ncoutid, lonoutid, "standard_name", strlen(tmpstr), tmpstr); 00228 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00229 } 00230 00231 for (s=0; s<data->conf->nseasons; s++) { 00232 00233 /* Define time dimensions and variables */ 00234 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_time, s+1); 00235 istat = nc_def_dim(ncoutid, nomvar, data->learning->data[s].ntime, &(timedimoutid[s])); 00236 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00237 00238 vardimids[0] = timedimoutid[s]; 00239 istat = nc_def_var(ncoutid, nomvar, NC_INT, 1, vardimids, &(timeoutid[s])); 00240 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00241 00242 istat = sprintf(tmpstr, "gregorian"); 00243 istat = nc_put_att_text(ncoutid, timeoutid[s], "calendar", strlen(tmpstr), tmpstr); 00244 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00245 istat = sprintf(tmpstr, "%s", data->conf->time_units); 00246 istat = nc_put_att_text(ncoutid, timeoutid[s], "units", strlen(tmpstr), tmpstr); 00247 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00248 istat = sprintf(tmpstr, "time in %s", data->conf->time_units); 00249 istat = nc_put_att_text(ncoutid, timeoutid[s], "long_name", strlen(tmpstr), tmpstr); 00250 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00251 00252 /* Define cluster dimensions */ 00253 (void) sprintf(nomvar, "%s_%d", data->conf->clustname, s+1); 00254 istat = nc_def_dim(ncoutid, nomvar, data->conf->season[s].nclusters, &(clustdimoutid[s])); 00255 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00256 00257 /* Define regression constant variables */ 00258 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_cst, s+1); 00259 vardimids[0] = ptsdimoutid; 00260 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(cstoutid[s])); 00261 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00262 00263 istat = nc_put_att_double(ncoutid, cstoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00264 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00265 istat = nc_put_att_text(ncoutid, cstoutid[s], "coordinates", strlen(data->conf->ptsname), data->conf->ptsname); 00266 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00267 istat = sprintf(tmpstr, "none"); 00268 istat = nc_put_att_text(ncoutid, cstoutid[s], "units", strlen(tmpstr), tmpstr); 00269 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00270 00271 /* Define regression coefficients variables */ 00272 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg, s+1); 00273 vardimids[0] = clustdimoutid[s]; 00274 vardimids[1] = ptsdimoutid; 00275 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(regoutid[s])); 00276 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00277 00278 istat = nc_put_att_double(ncoutid, regoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00279 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00280 istat = sprintf(tmpstr, "%s %s_%d", data->conf->ptsname, data->learning->nomvar_class_clusters, s+1); 00281 istat = nc_put_att_text(ncoutid, regoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00282 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00283 istat = sprintf(tmpstr, "none"); 00284 istat = nc_put_att_text(ncoutid, regoutid[s], "units", strlen(tmpstr), tmpstr); 00285 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00286 00287 /* Define regression distances variables */ 00288 if (data->learning->data[s].precip_reg_dist != NULL) { 00289 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_dist, s+1); 00290 vardimids[0] = timedimoutid[s]; 00291 vardimids[1] = clustdimoutid[s]; 00292 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(distoutid[s])); 00293 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00294 00295 istat = nc_put_att_double(ncoutid, distoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00296 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00297 istat = sprintf(tmpstr, "%s_%d %s_%d", data->learning->nomvar_time, s+1, data->learning->nomvar_class_clusters, s+1); 00298 istat = nc_put_att_text(ncoutid, distoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00299 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00300 istat = sprintf(tmpstr, "none"); 00301 istat = nc_put_att_text(ncoutid, distoutid[s], "units", strlen(tmpstr), tmpstr); 00302 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00303 } 00304 00305 /* Define regression R^2 diagnostic */ 00306 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_rsq, s+1); 00307 vardimids[0] = ptsdimoutid; 00308 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(rsqoutid[s])); 00309 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00310 00311 istat = nc_put_att_double(ncoutid, rsqoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00312 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00313 istat = nc_put_att_text(ncoutid, rsqoutid[s], "coordinates", strlen(data->conf->ptsname), data->conf->ptsname); 00314 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00315 istat = sprintf(tmpstr, "none"); 00316 istat = nc_put_att_text(ncoutid, rsqoutid[s], "units", strlen(tmpstr), tmpstr); 00317 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00318 00319 /* Define regression residuals diagnostic */ 00320 if (data->learning->data[s].precip_reg_err != NULL) { 00321 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_err, s+1); 00322 vardimids[0] = timedimoutid[s]; 00323 vardimids[1] = ptsdimoutid; 00324 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(erroutid[s])); 00325 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00326 00327 istat = nc_put_att_double(ncoutid, erroutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00328 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00329 istat = sprintf(tmpstr, "%s %s_%d", data->conf->ptsname, data->learning->nomvar_time, s+1); 00330 istat = nc_put_att_text(ncoutid, erroutid[s], "coordinates", strlen(tmpstr), tmpstr); 00331 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00332 istat = sprintf(tmpstr, "none"); 00333 istat = nc_put_att_text(ncoutid, erroutid[s], "units", strlen(tmpstr), tmpstr); 00334 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00335 } 00336 00337 /* Define regression autocorrelation diagnostic */ 00338 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_acor, s+1); 00339 vardimids[0] = ptsdimoutid; 00340 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(acoroutid[s])); 00341 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00342 00343 istat = nc_put_att_double(ncoutid, acoroutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00344 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00345 istat = nc_put_att_text(ncoutid, acoroutid[s], "coordinates", strlen(data->conf->ptsname), data->conf->ptsname); 00346 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00347 istat = sprintf(tmpstr, "none"); 00348 istat = nc_put_att_text(ncoutid, acoroutid[s], "units", strlen(tmpstr), tmpstr); 00349 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00350 00351 /* Define regression VIF diagnostic */ 00352 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_vif, s+1); 00353 vardimids[0] = clustdimoutid[s]; 00354 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(vifoutid[s])); 00355 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00356 00357 istat = nc_put_att_double(ncoutid, vifoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00358 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00359 istat = nc_put_att_text(ncoutid, vifoutid[s], "coordinates", strlen(data->learning->nomvar_class_clusters), 00360 data->learning->nomvar_class_clusters); 00361 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00362 istat = sprintf(tmpstr, "none"); 00363 istat = nc_put_att_text(ncoutid, vifoutid[s], "units", strlen(tmpstr), tmpstr); 00364 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00365 00366 /* Define precipitation index variables */ 00367 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_index, s+1); 00368 vardimids[0] = timedimoutid[s]; 00369 vardimids[1] = ptsdimoutid; 00370 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(rrdoutid[s])); 00371 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00372 00373 istat = nc_put_att_double(ncoutid, rrdoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00374 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00375 istat = sprintf(tmpstr, "%s %s_%d", data->conf->ptsname, data->learning->nomvar_time, s+1); 00376 istat = nc_put_att_text(ncoutid, rrdoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00377 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00378 istat = sprintf(tmpstr, "none"); 00379 istat = nc_put_att_text(ncoutid, rrdoutid[s], "units", strlen(tmpstr), tmpstr); 00380 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00381 00382 /* Define precipitation index obs variables */ 00383 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_index_obs, s+1); 00384 vardimids[0] = timedimoutid[s]; 00385 vardimids[1] = ptsdimoutid; 00386 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(rrooutid[s])); 00387 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00388 00389 istat = nc_put_att_double(ncoutid, rrooutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00390 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00391 istat = sprintf(tmpstr, "%s %s_%d", data->conf->ptsname, data->learning->nomvar_time, s+1); 00392 istat = nc_put_att_text(ncoutid, rrooutid[s], "coordinates", strlen(tmpstr), tmpstr); 00393 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00394 istat = sprintf(tmpstr, "none"); 00395 istat = nc_put_att_text(ncoutid, rrooutid[s], "units", strlen(tmpstr), tmpstr); 00396 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00397 00398 /* Define sup_index (secondary large-scale field index for learning period) */ 00399 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_sup_index, s+1); 00400 vardimids[0] = timedimoutid[s]; 00401 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(taoutid[s])); 00402 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00403 00404 istat = nc_put_att_double(ncoutid, taoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00405 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00406 istat = sprintf(tmpstr, "%s_%d", data->learning->nomvar_time, s+1); 00407 istat = nc_put_att_text(ncoutid, taoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00408 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00409 istat = sprintf(tmpstr, "none"); 00410 istat = nc_put_att_text(ncoutid, taoutid[s], "units", strlen(tmpstr), tmpstr); 00411 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00412 00413 /* Define sup_val (secondary large-scale 2D field for learning period) */ 00414 if (data->conf->season[s].secondary_cov == TRUE && data->learning->data[s].sup_val != NULL) { 00415 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_sup_val, s+1); 00416 vardimids[0] = timedimoutid[s]; 00417 vardimids[1] = latdimoutid; 00418 vardimids[2] = londimoutid; 00419 istat = nc_def_var(ncoutid, nomvar, NC_FLOAT, 3, vardimids, &(tadoutid[s])); 00420 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00421 00422 istat = nc_put_att_double(ncoutid, tadoutid[s], "missing_value", NC_FLOAT, 1, &fillvalue); 00423 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00424 istat = sprintf(tmpstr, "%s_%d %s %s", data->learning->nomvar_time, s+1, data->learning->sup_latname, data->learning->sup_lonname); 00425 istat = nc_put_att_text(ncoutid, tadoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00426 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00427 istat = sprintf(tmpstr, "none"); 00428 istat = nc_put_att_text(ncoutid, tadoutid[s], "units", strlen(tmpstr), tmpstr); 00429 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00430 } 00431 } 00432 00433 /* Define pc_normalized_var */ 00434 (void) strcpy(nomvar, data->learning->nomvar_pc_normalized_var); 00435 vardimids[0] = eofdimoutid; 00436 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &pcoutid); 00437 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00438 00439 istat = nc_put_att_double(ncoutid, pcoutid, "missing_value", NC_DOUBLE, 1, &fillvalue); 00440 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00441 istat = nc_put_att_text(ncoutid, pcoutid, "coordinates", strlen(data->conf->eofname), data->conf->eofname); 00442 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00443 istat = sprintf(tmpstr, "none"); 00444 istat = nc_put_att_text(ncoutid, pcoutid, "units", strlen(tmpstr), tmpstr); 00445 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00446 00447 /* Define tancp_mean */ 00448 (void) strcpy(nomvar, data->learning->nomvar_sup_index_mean); 00449 vardimids[0] = sdimoutid; 00450 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &tamoutid); 00451 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00452 00453 istat = nc_put_att_double(ncoutid, tamoutid, "missing_value", NC_DOUBLE, 1, &fillvalue); 00454 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00455 istat = sprintf(tmpstr, "season"); 00456 istat = nc_put_att_text(ncoutid, tamoutid, "coordinates", strlen(tmpstr), tmpstr); 00457 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00458 istat = sprintf(tmpstr, "none"); 00459 istat = nc_put_att_text(ncoutid, tamoutid, "units", strlen(tmpstr), tmpstr); 00460 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00461 00462 /* Define tancp_var */ 00463 (void) strcpy(nomvar, data->learning->nomvar_sup_index_var); 00464 vardimids[0] = sdimoutid; 00465 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &tavoutid); 00466 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00467 00468 istat = nc_put_att_double(ncoutid, tavoutid, "missing_value", NC_DOUBLE, 1, &fillvalue); 00469 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00470 istat = sprintf(tmpstr, "season"); 00471 istat = nc_put_att_text(ncoutid, tavoutid, "coordinates", strlen(tmpstr), tmpstr); 00472 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00473 istat = sprintf(tmpstr, "none"); 00474 istat = nc_put_att_text(ncoutid, tavoutid, "units", strlen(tmpstr), tmpstr); 00475 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00476 00477 /* End definition mode */ 00478 istat = nc_enddef(ncoutid); 00479 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00480 00481 /* Write variables */ 00482 00483 if (cov_true == TRUE) { 00484 /* Write lat and lon */ 00485 start[0] = 0; 00486 start[1] = 0; 00487 start[2] = 0; 00488 count[0] = (size_t) data->learning->sup_nlat; 00489 count[1] = 0; 00490 count[2] = 0; 00491 istat = nc_put_vara_double(ncoutid, latoutid, start, count, data->learning->sup_lat); 00492 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00493 00494 start[0] = 0; 00495 start[1] = 0; 00496 start[2] = 0; 00497 count[0] = (size_t) data->learning->sup_nlon; 00498 count[1] = 0; 00499 count[2] = 0; 00500 istat = nc_put_vara_double(ncoutid, lonoutid, start, count, data->learning->sup_lon); 00501 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00502 } 00503 00504 /* Initialize udunits */ 00505 ut_set_error_message_handler(ut_ignore); 00506 unitSystem = ut_read_xml(NULL); 00507 ut_set_error_message_handler(ut_write_to_stderr); 00508 dataunits = ut_parse(unitSystem, data->conf->time_units, UT_ASCII); 00509 00510 timeval = NULL; 00511 for (s=0; s<data->conf->nseasons; s++) { 00512 00513 timeval = (double *) realloc(timeval, data->learning->data[s].ntime * sizeof(double)); 00514 if (timeval == NULL) alloc_error(__FILE__, __LINE__); 00515 00516 /* Compute time variable using actual units */ 00517 for (t=0; t<data->learning->data[s].ntime; t++) 00518 istat = utInvCalendar2(data->learning->data[s].time_s->year[t], data->learning->data[s].time_s->month[t], 00519 data->learning->data[s].time_s->day[t], data->learning->data[s].time_s->hour[t], 00520 data->learning->data[s].time_s->minutes[t], data->learning->data[s].time_s->seconds[t], 00521 dataunits, &(timeval[t])); 00522 00523 /* Write time */ 00524 start[0] = 0; 00525 start[1] = 0; 00526 start[2] = 0; 00527 count[0] = (size_t) data->learning->data[s].ntime; 00528 count[1] = 0; 00529 count[2] = 0; 00530 istat = nc_put_vara_double(ncoutid, timeoutid[s], start, count, timeval); 00531 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00532 00533 /* Write regression constants */ 00534 start[0] = 0; 00535 start[1] = 0; 00536 start[2] = 0; 00537 count[0] = (size_t) data->reg->npts; 00538 count[1] = 0; 00539 count[2] = 0; 00540 istat = nc_put_vara_double(ncoutid, cstoutid[s], start, count, data->learning->data[s].precip_reg_cst); 00541 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00542 00543 /* Write regressions coefficients */ 00544 start[0] = 0; 00545 start[1] = 0; 00546 start[2] = 0; 00547 count[0] = (size_t) data->conf->season[s].nclusters; 00548 count[1] = (size_t) data->reg->npts; 00549 count[2] = 0; 00550 istat = nc_put_vara_double(ncoutid, regoutid[s], start, count, data->learning->data[s].precip_reg); 00551 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00552 00553 /* Write regressions distances */ 00554 if (data->learning->data[s].precip_reg_dist != NULL) { 00555 start[0] = 0; 00556 start[1] = 0; 00557 start[2] = 0; 00558 count[0] = (size_t) data->learning->data[s].ntime; 00559 count[1] = (size_t) data->conf->season[s].nclusters; 00560 count[2] = 0; 00561 istat = nc_put_vara_double(ncoutid, distoutid[s], start, count, data->learning->data[s].precip_reg_dist); 00562 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00563 } 00564 00565 /* Write regression R^2 diagnostic */ 00566 start[0] = 0; 00567 start[1] = 0; 00568 start[2] = 0; 00569 count[0] = (size_t) data->reg->npts; 00570 count[1] = 0; 00571 count[2] = 0; 00572 istat = nc_put_vara_double(ncoutid, rsqoutid[s], start, count, data->learning->data[s].precip_reg_rsq); 00573 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00574 00575 if (data->learning->data[s].precip_reg_err != NULL) { 00576 /* Write regression residuals diagnostic */ 00577 start[0] = 0; 00578 start[1] = 0; 00579 start[2] = 0; 00580 count[0] = (size_t) data->learning->data[s].ntime; 00581 count[1] = (size_t) data->reg->npts; 00582 count[2] = 0; 00583 istat = nc_put_vara_double(ncoutid, erroutid[s], start, count, data->learning->data[s].precip_reg_err); 00584 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00585 } 00586 00587 /* Write regression autocorrelation diagnostic */ 00588 start[0] = 0; 00589 start[1] = 0; 00590 start[2] = 0; 00591 count[0] = (size_t) data->reg->npts; 00592 count[1] = 0; 00593 count[2] = 0; 00594 istat = nc_put_vara_double(ncoutid, acoroutid[s], start, count, data->learning->data[s].precip_reg_autocor); 00595 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00596 00597 /* Write regression VIF diagnostic */ 00598 start[0] = 0; 00599 start[1] = 0; 00600 start[2] = 0; 00601 count[0] = (size_t) data->conf->season[s].nclusters; 00602 count[1] = 0; 00603 count[2] = 0; 00604 istat = nc_put_vara_double(ncoutid, vifoutid[s], start, count, data->learning->data[s].precip_reg_vif); 00605 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00606 00607 /* Write precipitation index */ 00608 start[0] = 0; 00609 start[1] = 0; 00610 start[2] = 0; 00611 count[0] = (size_t) data->learning->data[s].ntime; 00612 count[1] = (size_t) data->reg->npts; 00613 count[2] = 0; 00614 istat = nc_put_vara_double(ncoutid, rrdoutid[s], start, count, data->learning->data[s].precip_index); 00615 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00616 00617 /* Write precipitation index obs */ 00618 start[0] = 0; 00619 start[1] = 0; 00620 start[2] = 0; 00621 count[0] = (size_t) data->learning->data[s].ntime; 00622 count[1] = (size_t) data->reg->npts; 00623 count[2] = 0; 00624 istat = nc_put_vara_double(ncoutid, rrooutid[s], start, count, data->learning->data[s].precip_index_obs); 00625 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00626 00627 /* Write secondary field index */ 00628 start[0] = 0; 00629 start[1] = 0; 00630 start[2] = 0; 00631 count[0] = (size_t) data->learning->data[s].ntime; 00632 count[1] = 0; 00633 count[2] = 0; 00634 istat = nc_put_vara_double(ncoutid, taoutid[s], start, count, data->learning->data[s].sup_index); 00635 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00636 00637 /* Write secondary field 2D field */ 00638 if (data->conf->season[s].secondary_cov == TRUE && data->learning->data[s].sup_val != NULL) { 00639 start[0] = 0; 00640 start[1] = 0; 00641 start[2] = 0; 00642 count[0] = (size_t) data->learning->data[s].ntime; 00643 count[1] = (size_t) data->learning->sup_nlat; 00644 count[2] = (size_t) data->learning->sup_nlon; 00645 istat = nc_put_vara_double(ncoutid, tadoutid[s], start, count, data->learning->data[s].sup_val); 00646 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00647 } 00648 00649 tancp_mean[s] = data->learning->data[s].sup_index_mean; 00650 tancp_var[s] = data->learning->data[s].sup_index_var; 00651 } 00652 00653 /* Write pc_normalized_var */ 00654 start[0] = 0; 00655 start[1] = 0; 00656 start[2] = 0; 00657 count[0] = (size_t) data->learning->rea_neof; 00658 count[1] = 0; 00659 count[2] = 0; 00660 bufd = (double *) malloc(data->learning->rea_neof * sizeof(double)); 00661 if (bufd == NULL) alloc_error(__FILE__, __LINE__); 00662 for (ii=0; ii<data->learning->rea_neof; ii++) 00663 bufd[ii] = sqrt(data->learning->pc_normalized_var[ii]); 00664 istat = nc_put_vara_double(ncoutid, pcoutid, start, count, bufd); 00665 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00666 (void) free(bufd); 00667 00668 /* Write tancp_mean */ 00669 start[0] = 0; 00670 start[1] = 0; 00671 start[2] = 0; 00672 count[0] = (size_t) data->conf->nseasons; 00673 count[1] = 0; 00674 count[2] = 0; 00675 istat = nc_put_vara_double(ncoutid, tamoutid, start, count, tancp_mean); 00676 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00677 00678 /* Write tancp_var */ 00679 start[0] = 0; 00680 start[1] = 0; 00681 start[2] = 0; 00682 count[0] = (size_t) data->conf->nseasons; 00683 count[1] = 0; 00684 count[2] = 0; 00685 istat = nc_put_vara_double(ncoutid, tavoutid, start, count, tancp_var); 00686 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00687 00688 00689 /* Close the output netCDF file */ 00690 istat = ncclose(ncoutid); 00691 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00692 00693 00694 00695 /* Open NetCDF file for writing, overwrite and truncate existing file if any */ 00696 istat = nc_create(data->learning->filename_save_weight, NC_CLOBBER, &ncoutid); 00697 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00698 00699 /* Set global attributes */ 00700 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "processor", strlen(data->info->processor), data->info->processor); 00701 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "institution", strlen(data->info->institution), data->info->institution); 00702 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_email", strlen(data->info->creator_email), data->info->creator_email); 00703 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_url", strlen(data->info->creator_url), data->info->creator_url); 00704 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_name", strlen(data->info->creator_name), data->info->creator_name); 00705 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_email", strlen(data->info->contact_email), data->info->contact_email); 00706 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_name", strlen(data->info->contact_name), data->info->contact_name); 00707 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_email", strlen(data->info->other_contact_email), 00708 data->info->other_contact_email); 00709 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_name", strlen(data->info->other_contact_name), 00710 data->info->other_contact_name); 00711 00712 fillvalue = -9999.9; 00713 00714 /* Set dimensions */ 00715 istat = nc_def_dim(ncoutid, "season", data->conf->nseasons, &sdimoutid); 00716 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00717 istat = nc_def_dim(ncoutid, data->conf->eofname, data->learning->rea_neof, &eofdimoutid); 00718 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00719 00720 for (s=0; s<data->conf->nseasons; s++) { 00721 00722 /* Define weight dimensions and variables */ 00723 (void) sprintf(nomvar, "%s_%d", data->conf->clustname, s+1); 00724 istat = nc_def_dim(ncoutid, nomvar, data->conf->season[s].nclusters, &(weightdimoutid[s])); 00725 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00726 00727 vardimids[0] = weightdimoutid[s]; 00728 vardimids[1] = eofdimoutid; 00729 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_weight, s+1); 00730 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(weightoutid[s])); 00731 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00732 00733 istat = nc_put_att_double(ncoutid, weightoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00734 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00735 istat = sprintf(tmpstr, "%s %s_%d", data->conf->eofname, data->conf->clustname, s+1); 00736 istat = nc_put_att_text(ncoutid, weightoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00737 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00738 istat = sprintf(tmpstr, "none"); 00739 istat = nc_put_att_text(ncoutid, weightoutid[s], "units", strlen(tmpstr), tmpstr); 00740 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00741 } 00742 00743 /* End definition mode */ 00744 istat = nc_enddef(ncoutid); 00745 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00746 00747 for (s=0; s<data->conf->nseasons; s++) { 00748 /* Write weights */ 00749 start[0] = 0; 00750 start[1] = 0; 00751 start[2] = 0; 00752 count[0] = (size_t) data->conf->season[s].nclusters; 00753 count[1] = (size_t) data->learning->rea_neof; 00754 count[2] = 0; 00755 istat = nc_put_vara_double(ncoutid, weightoutid[s], start, count, data->learning->data[s].weight); 00756 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00757 } 00758 00759 /* Close the output netCDF file */ 00760 istat = ncclose(ncoutid); 00761 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00762 00763 00764 /* Open NetCDF file for writing, overwrite and truncate existing file if any */ 00765 istat = nc_create(data->learning->filename_save_clust_learn, NC_CLOBBER, &ncoutid); 00766 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00767 00768 /* Set global attributes */ 00769 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "processor", strlen(data->info->processor), data->info->processor); 00770 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "institution", strlen(data->info->institution), data->info->institution); 00771 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_email", strlen(data->info->creator_email), data->info->creator_email); 00772 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_url", strlen(data->info->creator_url), data->info->creator_url); 00773 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_name", strlen(data->info->creator_name), data->info->creator_name); 00774 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_email", strlen(data->info->contact_email), data->info->contact_email); 00775 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_name", strlen(data->info->contact_name), data->info->contact_name); 00776 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_email", strlen(data->info->other_contact_email), 00777 data->info->other_contact_email); 00778 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_name", strlen(data->info->other_contact_name), 00779 data->info->other_contact_name); 00780 00781 fillvalue = -9999.9; 00782 00783 /* Set dimensions */ 00784 istat = nc_def_dim(ncoutid, "season", data->conf->nseasons, &sdimoutid); 00785 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00786 istat = nc_def_dim(ncoutid, data->conf->eofname, data->learning->rea_neof, &eofdimoutid); 00787 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00788 00789 for (s=0; s<data->conf->nseasons; s++) { 00790 00791 /* Define time dimensions and variables */ 00792 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_time, s+1); 00793 istat = nc_def_dim(ncoutid, nomvar, data->learning->data[s].ntime, &(timedimoutid[s])); 00794 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00795 00796 vardimids[0] = timedimoutid[s]; 00797 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(timeoutid[s])); 00798 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00799 00800 istat = sprintf(tmpstr, "gregorian"); 00801 istat = nc_put_att_text(ncoutid, timeoutid[s], "calendar", strlen(tmpstr), tmpstr); 00802 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00803 istat = sprintf(tmpstr, "%s", data->conf->time_units); 00804 istat = nc_put_att_text(ncoutid, timeoutid[s], "units", strlen(tmpstr), tmpstr); 00805 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00806 istat = sprintf(tmpstr, "time in %s", data->conf->time_units); 00807 istat = nc_put_att_text(ncoutid, timeoutid[s], "long_name", strlen(tmpstr), tmpstr); 00808 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00809 00810 /* Define clust_learn variables */ 00811 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_class_clusters, s+1); 00812 vardimids[0] = timedimoutid[s]; 00813 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(clustoutid[s])); 00814 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00815 00816 istat = nc_put_att_double(ncoutid, clustoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00817 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00818 istat = sprintf(tmpstr, "%s_%d", data->learning->nomvar_time, s+1); 00819 istat = nc_put_att_text(ncoutid, clustoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00820 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00821 istat = sprintf(tmpstr, "none"); 00822 istat = nc_put_att_text(ncoutid, clustoutid[s], "units", strlen(tmpstr), tmpstr); 00823 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00824 } 00825 00826 /* End definition mode */ 00827 istat = nc_enddef(ncoutid); 00828 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00829 00830 for (s=0; s<data->conf->nseasons; s++) { 00831 00832 timeval = (double *) realloc(timeval, data->learning->data[s].ntime * sizeof(double)); 00833 if (timeval == NULL) alloc_error(__FILE__, __LINE__); 00834 00835 /* Compute time variable using actual units */ 00836 for (t=0; t<data->learning->data[s].ntime; t++) 00837 istat = utInvCalendar2(data->learning->data[s].time_s->year[t], data->learning->data[s].time_s->month[t], 00838 data->learning->data[s].time_s->day[t], data->learning->data[s].time_s->hour[t], 00839 data->learning->data[s].time_s->minutes[t], data->learning->data[s].time_s->seconds[t], 00840 dataunits, &(timeval[t])); 00841 00842 /* Write clust_learn */ 00843 start[0] = 0; 00844 start[1] = 0; 00845 start[2] = 0; 00846 count[0] = (size_t) data->learning->data[s].ntime; 00847 count[1] = 0; 00848 count[2] = 0; 00849 istat = nc_put_vara_int(ncoutid, clustoutid[s], start, count, data->learning->data[s].class_clusters); 00850 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00851 00852 /* Write time */ 00853 start[0] = 0; 00854 start[1] = 0; 00855 start[2] = 0; 00856 count[0] = (size_t) data->learning->data[s].ntime; 00857 count[1] = 0; 00858 count[2] = 0; 00859 istat = nc_put_vara_double(ncoutid, timeoutid[s], start, count, data->learning->data[s].time); 00860 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00861 } 00862 00863 /* Close the output netCDF file */ 00864 istat = ncclose(ncoutid); 00865 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00866 00867 (void) ut_free(dataunits); 00868 (void) ut_free_system(unitSystem); 00869 00870 (void) free(nomvar); 00871 (void) free(tancp_mean); 00872 (void) free(tancp_var); 00873 (void) free(timeval); 00874 00875 (void) free(timedimoutid); 00876 (void) free(clustdimoutid); 00877 (void) free(weightdimoutid); 00878 00879 (void) free(timeoutid); 00880 (void) free(cstoutid); 00881 (void) free(regoutid); 00882 (void) free(distoutid); 00883 (void) free(rrdoutid); 00884 (void) free(rrooutid); 00885 (void) free(taoutid); 00886 (void) free(tadoutid); 00887 (void) free(clustoutid); 00888 (void) free(weightoutid); 00889 (void) free(rsqoutid); 00890 (void) free(erroutid); 00891 (void) free(acoroutid); 00892 (void) free(vifoutid); 00893 00894 (void) free(tmpstr); 00895 00896 return 0; 00897 }
int write_regression_fields | ( | data_struct * | data, | |
char * | filename, | |||
double ** | timeval, | |||
int * | ntime, | |||
double ** | precip_index, | |||
double ** | distclust, | |||
double ** | sup_index | |||
) |
Write regression-related downscaling fields for diagnostics use.
[in] | data | MASTER data structure. |
[in] | filename | Output filename. |
[in] | timeval | Time vector. |
[in] | ntime | Number of times in timeval. |
[in] | precip_index | Precipitation index. |
[in] | distclust | Distance of days to cluster centres. |
[in] | sup_index | Supplemental large-scale field index. |
Definition at line 59 of file write_regression_fields.c.
References alloc_error(), conf_struct::clustname, data_struct::conf, info_struct::contact_email, info_struct::contact_name, info_struct::creator_email, info_struct::creator_name, info_struct::creator_url, learning_struct::data, handle_netcdf_error(), data_struct::info, info_struct::institution, data_struct::learning, season_struct::nclusters, learning_struct::nomvar_class_clusters, learning_struct::nomvar_precip_index, learning_struct::nomvar_precip_reg, learning_struct::nomvar_precip_reg_cst, learning_struct::nomvar_sup_index, reg_struct::npts, conf_struct::nseasons, info_struct::other_contact_email, info_struct::other_contact_name, learning_data_struct::precip_reg, learning_data_struct::precip_reg_cst, info_struct::processor, conf_struct::ptsname, data_struct::reg, conf_struct::season, info_struct::software, conf_struct::time_units, and reg_struct::timename.
Referenced by wt_downscaling().
00060 { 00071 int istat; /* Diagnostic status */ 00072 00073 int ncoutid; /* NetCDF output file handle ID */ 00074 int *timedimoutid; /* NetCDF time dimension output ID */ 00075 int sdimoutid; /* NetCDF season dimension output ID */ 00076 int ptsdimoutid; /* NetCDF points dimension output ID */ 00077 int *clustdimoutid; /* NetCDF clusters dimension output ID */ 00078 int *timeoutid; /* NetCDF time variable ID */ 00079 int *cstoutid; /* NetCDF regression constant variable ID */ 00080 int *regoutid; /* NetCDF regression coefficients variable ID */ 00081 int *rrdoutid; /* NetCDF precipitation index variable ID */ 00082 int *distclustoutid; /* NetCDF distances to clusters variable ID */ 00083 int *taoutid; /* NetCDF secondary large-scale field index variable ID */ 00084 int vardimids[NC_MAX_VAR_DIMS]; /* NetCDF dimension IDs */ 00085 00086 size_t start[3]; /* Start element when writing */ 00087 size_t count[3]; /* Count of elements to write */ 00088 00089 char *tmpstr = NULL; /* Temporary string */ 00090 00091 double fillvalue; 00092 char *nomvar = NULL; 00093 00094 int s; 00095 00096 timedimoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00097 if (timedimoutid == NULL) alloc_error(__FILE__, __LINE__); 00098 clustdimoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00099 if (clustdimoutid == NULL) alloc_error(__FILE__, __LINE__); 00100 00101 timeoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00102 if (timeoutid == NULL) alloc_error(__FILE__, __LINE__); 00103 cstoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00104 if (cstoutid == NULL) alloc_error(__FILE__, __LINE__); 00105 regoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00106 if (regoutid == NULL) alloc_error(__FILE__, __LINE__); 00107 rrdoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00108 if (rrdoutid == NULL) alloc_error(__FILE__, __LINE__); 00109 distclustoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00110 if (distclustoutid == NULL) alloc_error(__FILE__, __LINE__); 00111 taoutid = (int *) malloc(data->conf->nseasons * sizeof(int)); 00112 if (taoutid == NULL) alloc_error(__FILE__, __LINE__); 00113 00114 nomvar = (char *) malloc(200 * sizeof(char)); 00115 if (nomvar == NULL) alloc_error(__FILE__, __LINE__); 00116 tmpstr = (char *) malloc(200 * sizeof(char)); 00117 if (tmpstr == NULL) alloc_error(__FILE__, __LINE__); 00118 00119 /* Open NetCDF file for writing, overwrite and truncate existing file if any */ 00120 istat = nc_create(filename, NC_CLOBBER, &ncoutid); 00121 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00122 00123 /* Set global attributes */ 00124 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "processor", strlen(data->info->processor), data->info->processor); 00125 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "software", strlen(data->info->software), data->info->software); 00126 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "institution", strlen(data->info->institution), data->info->institution); 00127 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_email", strlen(data->info->creator_email), data->info->creator_email); 00128 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_url", strlen(data->info->creator_url), data->info->creator_url); 00129 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "creator_name", strlen(data->info->creator_name), data->info->creator_name); 00130 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_email", strlen(data->info->contact_email), data->info->contact_email); 00131 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "contact_name", strlen(data->info->contact_name), data->info->contact_name); 00132 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_email", strlen(data->info->other_contact_email), 00133 data->info->other_contact_email); 00134 istat = nc_put_att_text(ncoutid, NC_GLOBAL, "other_contact_name", strlen(data->info->other_contact_name), 00135 data->info->other_contact_name); 00136 00137 fillvalue = -9999.9; 00138 00139 /* Set dimensions */ 00140 istat = nc_def_dim(ncoutid, "season", data->conf->nseasons, &sdimoutid); 00141 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00142 istat = nc_def_dim(ncoutid, data->conf->ptsname, data->reg->npts, &ptsdimoutid); 00143 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00144 for (s=0; s<data->conf->nseasons; s++) { 00145 00146 /* Define time dimensions and variables */ 00147 (void) sprintf(nomvar, "%s_%d", data->reg->timename, s+1); 00148 istat = nc_def_dim(ncoutid, nomvar, ntime[s], &(timedimoutid[s])); 00149 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00150 00151 vardimids[0] = timedimoutid[s]; 00152 istat = nc_def_var(ncoutid, nomvar, NC_INT, 1, vardimids, &(timeoutid[s])); 00153 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00154 00155 istat = sprintf(tmpstr, "gregorian"); 00156 istat = nc_put_att_text(ncoutid, timeoutid[s], "calendar", strlen(tmpstr), tmpstr); 00157 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00158 istat = sprintf(tmpstr, "%s", data->conf->time_units); 00159 istat = nc_put_att_text(ncoutid, timeoutid[s], "units", strlen(tmpstr), tmpstr); 00160 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00161 istat = sprintf(tmpstr, "time in %s", data->conf->time_units); 00162 istat = nc_put_att_text(ncoutid, timeoutid[s], "long_name", strlen(tmpstr), tmpstr); 00163 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00164 00165 /* Define cluster dimensions */ 00166 (void) sprintf(nomvar, "%s_%d", data->conf->clustname, s+1); 00167 istat = nc_def_dim(ncoutid, nomvar, data->conf->season[s].nclusters, &(clustdimoutid[s])); 00168 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00169 00170 /* Define regression constant variables */ 00171 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg_cst, s+1); 00172 vardimids[0] = ptsdimoutid; 00173 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(cstoutid[s])); 00174 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00175 00176 istat = nc_put_att_double(ncoutid, cstoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00177 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00178 istat = nc_put_att_text(ncoutid, cstoutid[s], "coordinates", strlen(data->conf->ptsname), data->conf->ptsname); 00179 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00180 istat = sprintf(tmpstr, "none"); 00181 istat = nc_put_att_text(ncoutid, cstoutid[s], "units", strlen(tmpstr), tmpstr); 00182 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00183 00184 /* Define regression coefficients variables */ 00185 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_reg, s+1); 00186 vardimids[0] = clustdimoutid[s]; 00187 vardimids[1] = ptsdimoutid; 00188 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(regoutid[s])); 00189 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00190 00191 istat = nc_put_att_double(ncoutid, regoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00192 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00193 istat = sprintf(tmpstr, "%s %s_%d", data->conf->ptsname, data->learning->nomvar_class_clusters, s+1); 00194 istat = nc_put_att_text(ncoutid, regoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00195 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00196 istat = sprintf(tmpstr, "none"); 00197 istat = nc_put_att_text(ncoutid, regoutid[s], "units", strlen(tmpstr), tmpstr); 00198 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00199 00200 /* Define precipitation index variables */ 00201 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_precip_index, s+1); 00202 vardimids[0] = timedimoutid[s]; 00203 vardimids[1] = ptsdimoutid; 00204 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(rrdoutid[s])); 00205 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00206 00207 istat = nc_put_att_double(ncoutid, rrdoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00208 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00209 istat = sprintf(tmpstr, "%s %s_%d", data->conf->ptsname, data->reg->timename, s+1); 00210 istat = nc_put_att_text(ncoutid, rrdoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00211 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00212 istat = sprintf(tmpstr, "none"); 00213 istat = nc_put_att_text(ncoutid, rrdoutid[s], "units", strlen(tmpstr), tmpstr); 00214 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00215 00216 /* Define sup_index (secondary large-scale field index for learning period) */ 00217 (void) sprintf(nomvar, "%s_%d", data->learning->nomvar_sup_index, s+1); 00218 vardimids[0] = timedimoutid[s]; 00219 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 1, vardimids, &(taoutid[s])); 00220 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00221 00222 istat = nc_put_att_double(ncoutid, taoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00223 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00224 istat = sprintf(tmpstr, "%s_%d", data->reg->timename, s+1); 00225 istat = nc_put_att_text(ncoutid, taoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00226 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00227 istat = sprintf(tmpstr, "none"); 00228 istat = nc_put_att_text(ncoutid, taoutid[s], "units", strlen(tmpstr), tmpstr); 00229 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00230 00231 /* Define distances to clusters variable */ 00232 (void) sprintf(nomvar, "cluster_distance_%d", s+1); 00233 vardimids[0] = clustdimoutid[s]; 00234 vardimids[1] = timedimoutid[s]; 00235 istat = nc_def_var(ncoutid, nomvar, NC_DOUBLE, 2, vardimids, &(distclustoutid[s])); 00236 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00237 00238 istat = nc_put_att_double(ncoutid, distclustoutid[s], "missing_value", NC_DOUBLE, 1, &fillvalue); 00239 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00240 istat = sprintf(tmpstr, "%s_%d %s_%d", data->reg->timename, s+1, data->conf->clustname, s+1); 00241 istat = nc_put_att_text(ncoutid, distclustoutid[s], "coordinates", strlen(tmpstr), tmpstr); 00242 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00243 istat = sprintf(tmpstr, "none"); 00244 istat = nc_put_att_text(ncoutid, distclustoutid[s], "units", strlen(tmpstr), tmpstr); 00245 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00246 00247 } 00248 00249 /* End definition mode */ 00250 istat = nc_enddef(ncoutid); 00251 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00252 00253 /* Write variables */ 00254 for (s=0; s<data->conf->nseasons; s++) { 00255 00256 /* Write time */ 00257 start[0] = 0; 00258 start[1] = 0; 00259 start[2] = 0; 00260 count[0] = (size_t) ntime[s]; 00261 count[1] = 0; 00262 count[2] = 0; 00263 istat = nc_put_vara_double(ncoutid, timeoutid[s], start, count, timeval[s]); 00264 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00265 00266 /* Write regression constants */ 00267 start[0] = 0; 00268 start[1] = 0; 00269 start[2] = 0; 00270 count[0] = (size_t) data->reg->npts; 00271 count[1] = 0; 00272 count[2] = 0; 00273 istat = nc_put_vara_double(ncoutid, cstoutid[s], start, count, data->learning->data[s].precip_reg_cst); 00274 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00275 00276 /* Write regressions coefficients */ 00277 start[0] = 0; 00278 start[1] = 0; 00279 start[2] = 0; 00280 count[0] = (size_t) data->conf->season[s].nclusters; 00281 count[1] = (size_t) data->reg->npts; 00282 count[2] = 0; 00283 istat = nc_put_vara_double(ncoutid, regoutid[s], start, count, data->learning->data[s].precip_reg); 00284 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00285 00286 /* Write reconstructed precipitation index */ 00287 start[0] = 0; 00288 start[1] = 0; 00289 start[2] = 0; 00290 count[0] = (size_t) ntime[s]; 00291 count[1] = (size_t) data->reg->npts; 00292 count[2] = 0; 00293 istat = nc_put_vara_double(ncoutid, rrdoutid[s], start, count, precip_index[s]); 00294 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00295 00296 /* Write secondary field index */ 00297 start[0] = 0; 00298 start[1] = 0; 00299 start[2] = 0; 00300 count[0] = (size_t) ntime[s]; 00301 count[1] = 0; 00302 count[2] = 0; 00303 istat = nc_put_vara_double(ncoutid, taoutid[s], start, count, sup_index[s]); 00304 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00305 00306 /* Write distances to clusters */ 00307 start[0] = 0; 00308 start[1] = 0; 00309 start[2] = 0; 00310 count[0] = (size_t) data->conf->season[s].nclusters; 00311 count[1] = (size_t) ntime[s]; 00312 count[2] = 0; 00313 istat = nc_put_vara_double(ncoutid, distclustoutid[s], start, count, distclust[s]); 00314 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00315 00316 } 00317 00318 /* Close the output netCDF file */ 00319 istat = ncclose(ncoutid); 00320 if (istat != NC_NOERR) handle_netcdf_error(istat, __FILE__, __LINE__); 00321 00322 (void) free(nomvar); 00323 00324 (void) free(timedimoutid); 00325 (void) free(clustdimoutid); 00326 00327 (void) free(timeoutid); 00328 (void) free(cstoutid); 00329 (void) free(regoutid); 00330 (void) free(rrdoutid); 00331 (void) free(distclustoutid); 00332 (void) free(taoutid); 00333 00334 (void) free(tmpstr); 00335 00336 return 0; 00337 }
int wt_downscaling | ( | data_struct * | data | ) |
Downscaling climate scenarios program using weather typing.
[in] | data | MASTER data structure. |
Step 1: Read large-scale fields
Step 2: Compute climatologies and remove them from selected large scale fields
Step 3: Project selected large scale fields on EOF
CONTROL RUN
Step 4: Compute distance to clusters for the control run
Step 5: Compute mean and variance of secondary large-scale fields for the control run
MODEL RUN
Step 6: Compute mean secondary large-scale fields for the model run
Step 7: Compute distance to clusters (model run and optionally control run)
Step 8: Normalize the secondary large-scale fields by control-run mean and variance
Step 9: Compute the precipitation using the pre-computed regressions for the model run for each season
Step 10: Find the days : resampling
Step 11: Compute the secondary large-scale fields difference if wanted
Step 12: Reconstruct data using chosen resampled days and write output
Determine the number of elements given the seasons
Optionally save analog_days information in an output file
Definition at line 67 of file wt_downscaling.c.
References alloc_error(), field_struct::analog_days, field_struct::analog_days_year, analog_day_struct::analog_dayschoice, conf_struct::analog_file_ctrl, conf_struct::analog_file_other, conf_struct::analog_save, apply_regression(), conf_struct::cal_type, learning_data_struct::class_clusters, class_days_pc_clusters(), conf_struct::classif_type, conf_struct::compression, conf_struct::compression_level, compute_secondary_large_scale_diff(), data_struct::conf, conf_struct::config, CTRL_FIELD_LS, CTRL_SEC_FIELD_LS, learning_struct::data, field_struct::data, analog_day_struct::day, time_vect_struct::day, analog_day_struct::day_s, downscale_struct::days_class_clusters, downscale_struct::days_class_clusters_all, conf_struct::debug, downscale_struct::delta, downscale_struct::delta_all, downscale_struct::delta_dayschoice, downscale_struct::delta_dayschoice_all, conf_struct::deltat, downscale_struct::dist, downscale_struct::dist_all, dist_clusters_normctrl(), field_data_struct::down, period_struct::downscale, field_data_struct::eof_data, field_data_struct::eof_info, eof_data_struct::eof_ls, eof_info_struct::eof_project, eof_info_struct::eof_scale, extract_subdomain(), extract_subperiod_months(), FALSE, data_struct::field, mask_struct::field, field_data_struct::field_eof_ls, field_data_struct::field_ls, FIELD_LS, reg_struct::filename_save_ctrl_reg, reg_struct::filename_save_other_reg, info_field_struct::fillvalue, find_the_days(), field_data_struct::first_variance, conf_struct::format, data_struct::info, eof_info_struct::info, mask_struct::lat, field_struct::lat_eof_ls, data_struct::learning, mask_struct::lon, field_struct::lon_eof_ls, downscale_struct::mean, downscale_struct::mean_dist, mean_field_spatial(), mean_variance_dist_clusters(), mean_variance_field_spatial(), merge_seasonal_data(), merge_seasonal_data_2d(), merge_seasonal_data_i(), merge_seasons(), analog_day_struct::metric_norm, analog_day_struct::month, season_struct::month, time_vect_struct::month, analog_day_struct::month_s, field_struct::n_ls, NCAT, season_struct::nclusters, season_struct::ndays, analog_day_struct::ndayschoice, season_struct::ndayschoices, eof_info_struct::neof_ls, mask_struct::nlat, field_struct::nlat_eof_ls, field_struct::nlat_ls, mask_struct::nlon, field_struct::nlon_eof_ls, field_struct::nlon_ls, season_struct::nmonths, normalize_field(), normalize_field_2d(), normalize_pc(), reg_struct::npts, season_struct::nreg, conf_struct::nseasons, analog_day_struct::ntime, learning_data_struct::ntime, learning_struct::ntime, field_struct::ntime_ls, conf_struct::obs_var, conf_struct::only_wt, conf_struct::output, output_downscaled_analog(), conf_struct::output_month_begin, conf_struct::output_only, conf_struct::output_path, learning_struct::pc_normalized_var, conf_struct::period, conf_struct::period_ctrl, learning_data_struct::precip_index, field_struct::precip_index, learning_data_struct::precip_reg, learning_data_struct::precip_reg_cst, project_field_eof(), read_analog_data(), read_large_scale_eof(), read_large_scale_fields(), data_struct::reg, reg_struct::reg_save, remove_clim(), save_analog_data(), conf_struct::season, SEC_FIELD_LS, season_struct::secondary_choice, season_struct::secondary_cov, conf_struct::secondary_latitude_max, conf_struct::secondary_latitude_min, conf_struct::secondary_longitude_max, conf_struct::secondary_longitude_min, season_struct::secondary_main_choice, data_struct::secondary_mask, season_struct::shuffle, eof_data_struct::sing_ls, downscale_struct::smean, downscale_struct::smean_2d, downscale_struct::smean_norm, sub_period_common(), learning_data_struct::sup_index, learning_data_struct::sup_index_var, learning_struct::sup_nlat, learning_struct::sup_nlon, learning_data_struct::sup_val, downscale_struct::sup_val_norm, downscale_struct::svar_2d, analog_day_struct::time, field_struct::time_ls, time_mean_variance_field_2d(), learning_data_struct::time_s, learning_struct::time_s, field_struct::time_s, conf_struct::time_units, var_struct::timename, analog_day_struct::tindex, analog_day_struct::tindex_all, analog_day_struct::tindex_dayschoice, analog_day_struct::tindex_s_all, TRUE, conf_struct::use_downscaled_year, mask_struct::use_mask, downscale_struct::var, downscale_struct::var_dist, downscale_struct::var_pc_norm, learning_data_struct::weight, write_regression_fields(), analog_day_struct::year, time_vect_struct::year, and analog_day_struct::year_s.
Referenced by main().
00067 { 00074 double *buf_sub = NULL; /* Temporary buffer for sub-domain or sub-period */ 00075 double *buftmp = NULL; /* Temporary buffer */ 00076 double *buftmpf = NULL; /* Temporary buffer */ 00077 double *mask_subd = NULL; /* Mask covering subdomain in double format when reading */ 00078 short int *mask_sub = NULL; /* Mask covering subdomain in short int */ 00079 double *lon_mask = NULL; /* Longitudes of mask */ 00080 double *lat_mask = NULL; /* Latitudes of mask */ 00081 double *var_pc_norm_all = NULL; /* Temporary values of the norm of the principal components */ 00082 int **ntime_sub = NULL; /* Number of times for sub-periods. Dimensions number of field categories (NCAT) and number of seasons */ 00083 double **time_ls_sub = NULL; /* Time values used for regression diagnostics output */ 00084 int *merged_itimes = NULL; /* Time values in common merged time vector */ 00085 int ntimes_merged; /* Number of times in one particular season */ 00086 int curindex_merged; /* Current index in merged season vector */ 00087 short int *merged_times_flag = NULL; /* Flag variable for days in the year that are processed */ 00088 double *merged_times = NULL; /* Merge times in udunit */ 00089 int ntime_sub_tmp; /* Number of times for regression diagnostics output */ 00090 int ntime_sub_learn; /* Number of times for learning common sub-period with control run for a specific season */ 00091 int ntime_sub_learn_all; /* Number of times for learning common sub-period with control run for whole period */ 00092 int nlon_mask; /* Longitude dimension for mask subdomain */ 00093 int nlat_mask; /* Latitude dimension for mask subdomain */ 00094 00095 int istat; /* Function return diagnostic value */ 00096 int i; /* Loop counter */ 00097 int ii; /* Loop counter */ 00098 int s; /* Loop counter for seasons */ 00099 int cat; /* Loop counter for field categories */ 00100 int beg_cat; /* Beginning category to process in loop */ 00101 int maxndays; /* Maximum number of analog days choices within all seasons */ 00102 00103 char *analog_file = NULL; /* Analog data filename */ 00104 period_struct *period = NULL; /* Period structure for output */ 00105 00106 char *filename = NULL; /* Temporary filename for regression optional output */ 00107 00108 if (data->conf->output_only != TRUE) { 00109 00110 /* Allocate memory */ 00111 ntime_sub = (int **) malloc(NCAT * sizeof(int *)); 00112 if (ntime_sub == NULL) alloc_error(__FILE__, __LINE__); 00113 00114 if (data->reg->reg_save == TRUE) { 00115 time_ls_sub = (double **) malloc(data->conf->nseasons * sizeof(double *)); 00116 if (time_ls_sub == NULL) alloc_error(__FILE__, __LINE__); 00117 } 00118 00119 for (cat=0; cat<NCAT; cat++) { 00120 ntime_sub[cat] = (int *) malloc(data->conf->nseasons * sizeof(int)); 00121 if (ntime_sub[cat] == NULL) alloc_error(__FILE__, __LINE__); 00122 } 00123 00125 istat = read_large_scale_fields(data); 00126 if (istat != 0) return istat; 00127 00128 /* Prepare optional mask for secondary large-scale fields */ 00129 if (data->secondary_mask->use_mask == TRUE) { 00130 (void) extract_subdomain(&mask_subd, &lon_mask, &lat_mask, &nlon_mask, &nlat_mask, data->secondary_mask->field, 00131 data->secondary_mask->lon, data->secondary_mask->lat, 00132 data->conf->secondary_longitude_min, data->conf->secondary_longitude_max, 00133 data->conf->secondary_latitude_min, data->conf->secondary_latitude_max, 00134 data->secondary_mask->nlon, data->secondary_mask->nlat, 1); 00135 00136 if (data->conf->period_ctrl->downscale == TRUE) 00137 beg_cat = CTRL_SEC_FIELD_LS; 00138 else 00139 beg_cat = SEC_FIELD_LS; 00140 /* Loop over secondary field categories (model run and optionally control run) */ 00141 for (cat=beg_cat; cat>=SEC_FIELD_LS; cat--) { 00142 /* Loop over secondary large-scale fields */ 00143 for (i=0; i<data->field[cat].n_ls; i++) 00144 if (data->field[cat].nlon_ls != nlon_mask || data->field[cat].nlat_ls != nlat_mask) { 00145 (void) fprintf(stderr, "%s: The mask for secondary large-scale fields after selecting subdomain has invalid dimensions: nlon=%d nlat=%d. Expected: nlon=%d nlat=%d\nReverting to no-mask processing.", __FILE__, 00146 nlon_mask, nlat_mask, data->field[cat].nlon_ls, data->field[cat].nlat_ls); 00147 mask_sub = (short int *) NULL; 00148 data->secondary_mask->use_mask = FALSE; 00149 } 00150 } 00151 /* Dimensions are ok and we are using a mask. Get values into short int buffer. */ 00152 if (data->secondary_mask->use_mask == TRUE) { 00153 mask_sub = (short int *) malloc(data->field[SEC_FIELD_LS].nlon_ls*data->field[SEC_FIELD_LS].nlat_ls * sizeof(short int)); 00154 if (mask_sub == NULL) alloc_error(__FILE__, __LINE__); 00155 for (i=0; i<data->field[SEC_FIELD_LS].nlon_ls*data->field[SEC_FIELD_LS].nlat_ls; i++) 00156 mask_sub[i] = (short int) mask_subd[i]; 00157 } 00158 (void) free(mask_subd); 00159 (void) free(lon_mask); 00160 (void) free(lat_mask); 00161 } 00162 else 00163 mask_sub = (short int *) NULL; 00164 00165 if (mask_sub != NULL) 00166 printf("%s: Using a mask for secondary large-scale fields.\n", __FILE__); 00167 00169 istat = remove_clim(data); 00170 if (istat != 0) return istat; 00171 00174 /* Read EOFs and Singular Values */ 00175 istat = read_large_scale_eof(data); 00176 if (istat != 0) return istat; 00177 00178 /* Project selected large scale fields on EOF */ 00179 /* Loop over large-scale field categories (Control run and Model run) */ 00180 for (cat=CTRL_FIELD_LS; cat>=FIELD_LS; cat--) 00181 /* Loop over large-scale fields */ 00182 for (i=0; i<data->field[cat].n_ls; i++) { 00183 /* Check if we need to project field on EOFs */ 00184 if (data->field[cat].data[i].eof_info->eof_project == TRUE) { 00185 /* Allocate memory for projected large-scale field */ 00186 data->field[cat].data[i].field_eof_ls = (double *) malloc(data->field[cat].ntime_ls * data->field[cat].data[i].eof_info->neof_ls * 00187 sizeof(double)); 00188 if (data->field[cat].data[i].field_eof_ls == NULL) alloc_error(__FILE__, __LINE__); 00189 /* Project large-scale field on EOFs */ 00190 istat = project_field_eof(data->field[cat].data[i].field_eof_ls, data->field[cat].data[i].field_ls, 00191 data->field[cat].data[i].eof_data->eof_ls, data->field[cat].data[i].eof_data->sing_ls, 00192 data->field[cat].data[i].eof_info->info->fillvalue, 00193 data->field[cat].lon_eof_ls, data->field[cat].lat_eof_ls, 00194 data->field[cat].data[i].eof_info->eof_scale, 00195 data->field[cat].nlon_eof_ls, data->field[cat].nlat_eof_ls, data->field[cat].ntime_ls, 00196 data->field[cat].data[i].eof_info->neof_ls); 00197 if (istat != 0) return istat; 00198 } 00199 } 00200 00201 00205 /* Process control run only */ 00206 cat = CTRL_FIELD_LS; 00207 /* Loop over large-scale fields */ 00208 for (i=0; i<data->field[cat].n_ls; i++) { 00209 00210 /* Allocate memory for temporary buffer */ 00211 buftmp = (double *) malloc(data->field[cat].ntime_ls*data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00212 if (buftmp == NULL) alloc_error(__FILE__, __LINE__); 00213 00214 /* Normalisation of the principal component by the square root of the variance of the first one */ 00215 /* Select common time period between the learning period and the model period (control run) */ 00216 /* for first variance calculation */ 00217 istat = sub_period_common(&buf_sub, &ntime_sub_learn_all, data->field[cat].data[i].field_eof_ls, 00218 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00219 data->learning->time_s->year, data->learning->time_s->month, 00220 data->learning->time_s->day, 1, 00221 data->field[cat].data[i].eof_info->neof_ls, 1, data->field[cat].ntime_ls, data->learning->ntime); 00222 if (istat != 0) return istat; 00223 00224 /* Allocate memory for temporary buffer */ 00225 buftmpf = (double *) malloc(ntime_sub_learn_all*data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00226 if (buftmpf == NULL) alloc_error(__FILE__, __LINE__); 00227 00228 // for (s=0; s<data->field[cat].ntime_ls; s++) 00229 // printf("%d %lf\n",s,data->field[cat].data[i].field_eof_ls[s]); 00230 00231 /* Compute the norm and the variance of the first EOF of the control run as references */ 00232 printf("Compute the norm and the variance of the first EOF of the control run as references\n"); 00233 /* Only when first_variance is -9999.9999, the variance of the first EOF will be computed */ 00234 data->field[cat].data[i].first_variance = -9999.9999; 00235 (void) normalize_pc(data->field[cat].data[i].down->var_pc_norm, &(data->field[cat].data[i].first_variance), 00236 buftmpf, buf_sub, data->field[cat].data[i].eof_info->neof_ls, 00237 ntime_sub_learn_all); 00238 // for (ii=0; ii<9; ii++) printf("%d %lf\n",ii,sqrt(data->field[cat].data[i].down->var_pc_norm[ii])); 00239 /* Free temporary buffers */ 00240 (void) free(buf_sub); 00241 (void) free(buftmpf); 00242 00243 /* Normalize the large-scale field given the reference norm and variance */ 00244 printf("Normalize the large-scale field given the reference norm and variance.\n"); 00245 /* Allocate memory for temporary buffer */ 00246 var_pc_norm_all = (double *) malloc(data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00247 if (var_pc_norm_all == NULL) alloc_error(__FILE__, __LINE__); 00248 /* Normalize EOF-projected large-scale fields */ 00249 (void) normalize_pc(var_pc_norm_all, &(data->field[cat].data[i].first_variance), 00250 buftmp, data->field[cat].data[i].field_eof_ls, data->field[cat].data[i].eof_info->neof_ls, 00251 data->field[cat].ntime_ls); 00252 /* Free temporary buffer */ 00253 (void) free(var_pc_norm_all); 00254 00255 /* Loop over each season */ 00256 for (s=0; s<data->conf->nseasons; s++) { 00257 /* Compute mean and variance of principal components of selected large-scale fields */ 00258 00259 /* Allocate memory for season-specific mean and variance of distances to clusters */ 00260 data->field[cat].data[i].down->mean_dist[s] = (double *) malloc(data->conf->season[s].nclusters * sizeof(double)); 00261 if (data->field[cat].data[i].down->mean_dist[s] == NULL) alloc_error(__FILE__, __LINE__); 00262 data->field[cat].data[i].down->var_dist[s] = (double *) malloc(data->conf->season[s].nclusters * sizeof(double)); 00263 if (data->field[cat].data[i].down->var_dist[s] == NULL) alloc_error(__FILE__, __LINE__); 00264 00265 /* Select common time period between the learning period and the model period (control run) */ 00266 istat = sub_period_common(&buf_sub, &ntime_sub_learn, buftmp, 00267 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00268 data->learning->data[s].time_s->year, data->learning->data[s].time_s->month, 00269 data->learning->data[s].time_s->day, 1, 00270 data->field[cat].data[i].eof_info->neof_ls, 1, data->field[cat].ntime_ls, data->learning->data[s].ntime); 00271 if (istat != 0) return istat; 00272 00273 /* Compute mean and variance of distances to clusters */ 00274 (void) mean_variance_dist_clusters(data->field[cat].data[i].down->mean_dist[s], data->field[cat].data[i].down->var_dist[s], 00275 buf_sub, data->learning->data[s].weight, 00276 data->learning->pc_normalized_var, data->field[cat].data[i].down->var_pc_norm, 00277 data->field[cat].data[i].eof_info->neof_ls, data->conf->season[s].nclusters, ntime_sub_learn); 00278 /* Diagnostic output */ 00279 printf("Season: %d\n", s); 00280 for (ii=0; ii<data->conf->season[s].nclusters; ii++) 00281 (void) printf("%s: Cluster #%d. Mean and variance of distances to clusters for control run: %lf %lf\n", __FILE__, ii, 00282 data->field[cat].data[i].down->mean_dist[s][ii], sqrt(data->field[cat].data[i].down->var_dist[s][ii])); 00283 00284 /* Free temporary buffer */ 00285 (void) free(buf_sub); 00286 } 00287 /* Free temporary buffer */ 00288 (void) free(buftmp); 00289 } 00290 00293 /* Process only secondary field of control run. */ 00294 cat = CTRL_SEC_FIELD_LS; 00295 /* Loop over secondary large-scale fields */ 00296 for (i=0; i<data->field[cat].n_ls; i++) { 00297 00298 /* Compute spatial mean of secondary large-scale fields */ 00299 data->field[cat].data[i].down->smean = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00300 if (data->field[cat].data[i].down->smean == NULL) alloc_error(__FILE__, __LINE__); 00301 00302 (void) mean_field_spatial(data->field[cat].data[i].down->smean, data->field[cat].data[i].field_ls, mask_sub, 00303 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls); 00304 00305 for (s=0; s<data->conf->nseasons; s++) { 00306 00307 /* Compute seasonal mean and variance of principal components of selected large-scale fields */ 00308 00309 /* Select common time period between the learning period and the model period (control run) */ 00310 istat = sub_period_common(&buf_sub, &ntime_sub_learn, data->field[cat].data[i].field_ls, 00311 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00312 data->learning->data[s].time_s->year, data->learning->data[s].time_s->month, 00313 data->learning->data[s].time_s->day, 3, 00314 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls, 00315 data->learning->data[s].ntime); 00316 if (istat != 0) return istat; 00317 00318 /* Compute seasonal mean and variance of spatially-averaged secondary field */ 00319 (void) mean_variance_field_spatial(&(data->field[cat].data[i].down->mean[s]), &(data->field[cat].data[i].down->var[s]), buf_sub, 00320 mask_sub, data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_sub_learn); 00321 00322 /* Compute mean and variance over time for each point of secondary field */ 00323 data->field[cat].data[i].down->smean_2d[s] = (double *) 00324 malloc(data->field[cat].nlon_ls*data->field[cat].nlat_ls*ntime_sub_learn * sizeof(double)); 00325 if (data->field[cat].data[i].down->smean_2d[s] == NULL) alloc_error(__FILE__, __LINE__); 00326 data->field[cat].data[i].down->svar_2d[s] = (double *) 00327 malloc(data->field[cat].nlon_ls*data->field[cat].nlat_ls*ntime_sub_learn * sizeof(double)); 00328 if (data->field[cat].data[i].down->svar_2d[s] == NULL) alloc_error(__FILE__, __LINE__); 00329 (void) time_mean_variance_field_2d(data->field[cat].data[i].down->smean_2d[s], data->field[cat].data[i].down->svar_2d[s], 00330 buf_sub, data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_sub_learn); 00331 00332 /* Diagnostic output */ 00333 (void) printf("Control run:: Season: %d TAS mean=%lf variance=%lf cat=%d field=%d\n", s, data->field[cat].data[i].down->mean[s], 00334 sqrt(data->field[cat].data[i].down->var[s]), cat, i); 00335 00336 /* Free temporary buffer */ 00337 (void) free(buf_sub); 00338 } 00339 } 00340 00341 00346 cat = SEC_FIELD_LS; 00347 /* Loop over secondary large-scale fields */ 00348 for (i=0; i<data->field[cat].n_ls; i++) { 00349 /* Compute spatial mean of secondary large-scale fields */ 00350 data->field[cat].data[i].down->smean = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00351 if (data->field[cat].data[i].down->smean == NULL) alloc_error(__FILE__, __LINE__); 00352 (void) mean_field_spatial(data->field[cat].data[i].down->smean, data->field[cat].data[i].field_ls, mask_sub, 00353 data->field[cat].nlon_ls, data->field[cat].nlat_ls, data->field[cat].ntime_ls); 00354 } 00355 00358 /* Downscale also control run if needed */ 00359 if (data->conf->period_ctrl->downscale == TRUE) 00360 beg_cat = CTRL_FIELD_LS; 00361 else 00362 beg_cat = FIELD_LS; 00363 /* Loop over larg-scale field categories (model run and optionally control run) */ 00364 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00365 /* Loop over large-scale fields */ 00366 for (i=0; i<data->field[cat].n_ls; i++) { 00367 00368 /* Allocate memory for temporary buffer */ 00369 buftmp = (double *) malloc(data->field[cat].ntime_ls*data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00370 if (buftmp == NULL) alloc_error(__FILE__, __LINE__); 00371 00372 /* Normalisation of the principal component by the square root of the variance of the control run */ 00373 var_pc_norm_all = (double *) malloc(data->field[cat].data[i].eof_info->neof_ls * sizeof(double)); 00374 if (var_pc_norm_all == NULL) alloc_error(__FILE__, __LINE__); 00375 (void) normalize_pc(var_pc_norm_all, &(data->field[CTRL_FIELD_LS].data[i].first_variance), buftmp, 00376 data->field[cat].data[i].field_eof_ls, data->field[cat].data[i].eof_info->neof_ls, 00377 data->field[cat].ntime_ls); 00378 (void) free(var_pc_norm_all); 00379 00380 /* Loop over seasons */ 00381 for (s=0; s<data->conf->nseasons; s++) { 00382 00383 /* Select season months in the whole time period and create sub-period large-scale field buffer */ 00384 (void) extract_subperiod_months(&buf_sub, &(ntime_sub[cat][s]), buftmp, 00385 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00386 data->conf->season[s].month, 00387 1, 1, data->field[cat].data[i].eof_info->neof_ls, data->field[cat].ntime_ls, 00388 data->conf->season[s].nmonths); 00389 00390 /* Compute distances to clusters using normalization and against the control reference run */ 00391 data->field[cat].data[i].down->dist[s] = (double *) 00392 malloc(data->conf->season[s].nclusters*ntime_sub[cat][s] * sizeof(double)); 00393 if (data->field[cat].data[i].down->dist[s] == NULL) alloc_error(__FILE__, __LINE__); 00394 (void) dist_clusters_normctrl(data->field[cat].data[i].down->dist[s], buf_sub, data->learning->data[s].weight, 00395 data->learning->pc_normalized_var, data->field[CTRL_FIELD_LS].data[i].down->var_pc_norm, 00396 data->field[CTRL_FIELD_LS].data[i].down->mean_dist[s], 00397 data->field[CTRL_FIELD_LS].data[i].down->var_dist[s], 00398 data->field[cat].data[i].eof_info->neof_ls, data->conf->season[s].nclusters, 00399 ntime_sub[cat][s]); 00400 /* Classify each day in the current clusters */ 00401 data->field[cat].data[i].down->days_class_clusters[s] = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00402 if (data->field[cat].data[i].down->days_class_clusters[s] == NULL) alloc_error(__FILE__, __LINE__); 00403 (void) class_days_pc_clusters(data->field[cat].data[i].down->days_class_clusters[s], buf_sub, 00404 data->learning->data[s].weight, data->conf->classif_type, 00405 data->field[cat].data[i].eof_info->neof_ls, data->conf->season[s].nclusters, 00406 ntime_sub[cat][s]); 00407 /* Free temporary buffer */ 00408 (void) free(buf_sub); 00409 } 00410 /* Free temporary buffer */ 00411 (void) free(buftmp); 00412 } 00413 } 00414 00417 /* Downscale also control run if needed */ 00418 if (data->conf->period_ctrl->downscale == TRUE) 00419 beg_cat = CTRL_SEC_FIELD_LS; 00420 else 00421 beg_cat = SEC_FIELD_LS; 00422 00423 /* Loop over secondary field categories (model run and optionally control run) */ 00424 for (cat=beg_cat; cat>=SEC_FIELD_LS; cat--) { 00425 /* Loop over secondary large-scale fields */ 00426 for (i=0; i<data->field[cat].n_ls; i++) 00427 /* Loop over each season */ 00428 for (s=0; s<data->conf->nseasons; s++) { 00429 /* Select season months in the whole time period to create a sub-period buffer */ 00430 (void) extract_subperiod_months(&buf_sub, &(ntime_sub[cat][s]), data->field[cat].data[i].down->smean, 00431 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00432 data->conf->season[s].month, 3, 1, 1, data->field[cat].ntime_ls, data->conf->season[s].nmonths); 00433 /* Normalize the spatial mean of secondary large-scale fields */ 00434 data->field[cat].data[i].down->smean_norm[s] = (double *) malloc(data->field[cat].ntime_ls * sizeof(double)); 00435 if (data->field[cat].data[i].down->smean_norm[s] == NULL) alloc_error(__FILE__, __LINE__); 00436 (void) normalize_field(data->field[cat].data[i].down->smean_norm[s], buf_sub, 00437 data->field[CTRL_SEC_FIELD_LS].data[i].down->mean[s], data->field[CTRL_SEC_FIELD_LS].data[i].down->var[s], 00438 1, 1, ntime_sub[cat][s]); 00439 /* Free temporary buffer */ 00440 (void) free(buf_sub); 00441 00442 /* Select season months in the whole time period to create a 2D sub-period buffer */ 00443 (void) extract_subperiod_months(&buf_sub, &(ntime_sub[cat][s]), data->field[cat].data[i].field_ls, 00444 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00445 data->conf->season[s].month, 3, data->field[cat].nlon_ls, data->field[cat].nlat_ls, 00446 data->field[cat].ntime_ls, data->conf->season[s].nmonths); 00447 /* Normalize the secondary large-scale fields */ 00448 data->field[cat].data[i].down->sup_val_norm[s] = 00449 (double *) malloc(data->field[cat].nlon_ls*data->field[cat].nlat_ls*data->field[cat].ntime_ls * sizeof(double)); 00450 if (data->field[cat].data[i].down->sup_val_norm[s] == NULL) alloc_error(__FILE__, __LINE__); 00451 (void) normalize_field_2d(data->field[cat].data[i].down->sup_val_norm[s], buf_sub, 00452 data->field[CTRL_SEC_FIELD_LS].data[i].down->smean_2d[s], 00453 data->field[CTRL_SEC_FIELD_LS].data[i].down->svar_2d[s], 00454 data->field[cat].nlon_ls, data->field[cat].nlat_ls, ntime_sub[cat][s]); 00455 /* Free temporary buffer */ 00456 (void) free(buf_sub); 00457 } 00458 } 00459 00462 /* Select the first large-scale field which must contain the cluster distances */ 00463 /* and the first secondary large-scale fields which must contains its spatial mean */ 00464 i = 0; 00465 00466 /* Downscale also control run if needed */ 00467 if (data->conf->period_ctrl->downscale == TRUE) 00468 beg_cat = CTRL_FIELD_LS; 00469 else 00470 beg_cat = FIELD_LS; 00471 00472 /* Loop over large-scale field categories (model run and optionally control run) */ 00473 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00474 /* Process only if, for this category, at least one large-scale field is available */ 00475 for (i=0; i<data->field[cat].n_ls; i++) { 00476 /* Loop over each season */ 00477 for (s=0; s<data->conf->nseasons; s++) { 00478 /* Apply the regression coefficients to calculate precipitation using the cluster distances */ 00479 /* and the normalized spatial mean of the corresponding secondary large-scale field */ 00480 data->field[cat].precip_index[s] = (double *) malloc(data->reg->npts*ntime_sub[cat+2][s] * sizeof(double)); 00481 if (data->field[cat].precip_index[s] == NULL) alloc_error(__FILE__, __LINE__); 00482 (void) apply_regression(data->field[cat].precip_index[s], data->learning->data[s].precip_reg, 00483 data->learning->data[s].precip_reg_cst, 00484 data->field[cat].data[i].down->dist[s], data->field[cat+2].data[i].down->smean_norm[s], 00485 data->reg->npts, ntime_sub[cat+2][s], data->conf->season[s].nclusters, data->conf->season[s].nreg); 00486 if (data->reg->reg_save == TRUE) 00487 /* Select season months in the whole time period and create sub-period time vector */ 00488 (void) extract_subperiod_months(&(time_ls_sub[s]), &ntime_sub_tmp, data->field[cat].time_ls, 00489 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00490 data->conf->season[s].month, 00491 1, 1, 1, data->field[cat].ntime_ls, 00492 data->conf->season[s].nmonths); 00493 } 00494 if (data->reg->reg_save == TRUE) { 00495 (void) printf("Writing downscaling regression diagnostic fields.\n"); 00496 if (cat == CTRL_FIELD_LS) 00497 filename = data->reg->filename_save_ctrl_reg; 00498 else 00499 filename = data->reg->filename_save_other_reg; 00500 (void) write_regression_fields(data, filename, time_ls_sub, ntime_sub[cat+2], 00501 data->field[cat].precip_index, 00502 data->field[cat].data[i].down->dist, 00503 data->field[cat+2].data[i].down->smean_norm); 00504 } 00505 } 00506 } 00507 if (data->reg->reg_save == TRUE) { 00508 for (s=0; s<data->conf->nseasons; s++) 00509 (void) free(time_ls_sub[s]); 00510 (void) free(time_ls_sub); 00511 } 00512 00515 /* Select the first large-scale field which must contain the cluster distances */ 00516 /* and the first secondary large-scale fields which must contains its spatial mean */ 00517 i = 0; 00518 00519 /* Downscale also control run if needed */ 00520 if (data->conf->period_ctrl->downscale == TRUE) 00521 beg_cat = CTRL_FIELD_LS; 00522 else 00523 beg_cat = FIELD_LS; 00524 00525 /* Loop over large-scale field categories (model run and optionally control run) */ 00526 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00527 /* Process only if, for this category, at least one large-scale field is available */ 00528 if (data->field[cat].n_ls > 0) 00529 /* Loop over each season */ 00530 for (s=0; s<data->conf->nseasons; s++) { 00531 /* Find the analog days in the learning period given the precipitation index, */ 00532 /* the spatial mean of the secondary large-scale fields and its index, and the cluster classification of the days */ 00533 data->field[cat].analog_days[s].ntime = ntime_sub[cat][s]; 00534 data->field[cat].analog_days[s].time = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00535 if (data->field[cat].analog_days[s].time == NULL) alloc_error(__FILE__, __LINE__); 00536 data->field[cat].analog_days[s].tindex = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00537 if (data->field[cat].analog_days[s].tindex == NULL) alloc_error(__FILE__, __LINE__); 00538 data->field[cat].analog_days[s].tindex_all = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00539 if (data->field[cat].analog_days[s].tindex_all == NULL) alloc_error(__FILE__, __LINE__); 00540 data->field[cat].analog_days[s].year = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00541 if (data->field[cat].analog_days[s].year == NULL) alloc_error(__FILE__, __LINE__); 00542 data->field[cat].analog_days[s].month = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00543 if (data->field[cat].analog_days[s].month == NULL) alloc_error(__FILE__, __LINE__); 00544 data->field[cat].analog_days[s].day = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00545 if (data->field[cat].analog_days[s].day == NULL) alloc_error(__FILE__, __LINE__); 00546 data->field[cat].analog_days[s].tindex_s_all = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00547 if (data->field[cat].analog_days[s].tindex_s_all == NULL) alloc_error(__FILE__, __LINE__); 00548 data->field[cat].analog_days[s].year_s = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00549 if (data->field[cat].analog_days[s].year_s == NULL) alloc_error(__FILE__, __LINE__); 00550 data->field[cat].analog_days[s].month_s = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00551 if (data->field[cat].analog_days[s].month_s == NULL) alloc_error(__FILE__, __LINE__); 00552 data->field[cat].analog_days[s].day_s = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00553 if (data->field[cat].analog_days[s].day_s == NULL) alloc_error(__FILE__, __LINE__); 00554 data->field[cat].analog_days[s].ndayschoice = (int *) malloc(ntime_sub[cat][s] * sizeof(int)); 00555 if (data->field[cat].analog_days[s].ndayschoice == NULL) alloc_error(__FILE__, __LINE__); 00556 data->field[cat].analog_days[s].analog_dayschoice = (tstruct **) malloc(ntime_sub[cat][s] * sizeof(tstruct *)); 00557 if (data->field[cat].analog_days[s].analog_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00558 data->field[cat].analog_days[s].metric_norm = (float **) malloc(ntime_sub[cat][s] * sizeof(float *)); 00559 if (data->field[cat].analog_days[s].metric_norm == NULL) alloc_error(__FILE__, __LINE__); 00560 data->field[cat].analog_days[s].tindex_dayschoice = (int **) malloc(ntime_sub[cat][s] * sizeof(int *)); 00561 if (data->field[cat].analog_days[s].tindex_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00562 for (ii=0; ii<ntime_sub[cat][s]; ii++) { 00563 data->field[cat].analog_days[s].ndayschoice[ii] = data->conf->season[s].ndayschoices; 00564 data->field[cat].analog_days[s].analog_dayschoice[ii] = (tstruct *) NULL; 00565 data->field[cat].analog_days[s].metric_norm[ii] = (float *) NULL; 00566 data->field[cat].analog_days[s].tindex_dayschoice[ii] = (int *) NULL; 00567 } 00568 (void) printf("%s: Searching analog days for season #%d\n", __FILE__, s); 00569 istat = find_the_days(data->field[cat].analog_days[s], data->field[cat].precip_index[s], data->learning->data[s].precip_index, 00570 data->field[cat+2].data[i].down->smean_norm[s], data->learning->data[s].sup_index, 00571 data->field[cat+2].data[i].down->sup_val_norm[s], data->learning->data[s].sup_val, mask_sub, 00572 data->field[cat].data[i].down->days_class_clusters[s], data->learning->data[s].class_clusters, 00573 data->field[cat].time_s->year, data->field[cat].time_s->month, data->field[cat].time_s->day, 00574 data->learning->data[s].time_s->year, data->learning->data[s].time_s->month, 00575 data->learning->data[s].time_s->day, data->conf->time_units, 00576 data->field[cat].ntime_ls, data->learning->data[s].ntime, 00577 data->conf->season[s].month, data->conf->season[s].nmonths, 00578 data->conf->season[s].ndays, data->conf->season[s].ndayschoices, data->reg->npts, 00579 data->conf->season[s].shuffle, data->conf->season[s].secondary_choice, 00580 data->conf->season[s].secondary_main_choice, data->conf->season[s].secondary_cov, 00581 data->conf->use_downscaled_year, data->conf->only_wt, 00582 data->field[cat+2].nlon_ls, data->field[cat+2].nlat_ls, 00583 data->learning->sup_nlon, data->learning->sup_nlat); 00584 if (istat != 0) return istat; 00585 } 00586 } 00587 00590 /* Downscale also control run if needed */ 00591 if (data->conf->period_ctrl->downscale == TRUE) 00592 beg_cat = CTRL_SEC_FIELD_LS; 00593 else 00594 beg_cat = SEC_FIELD_LS; 00595 00596 /* Loop over secondary field categories (model run and optionally control run) */ 00597 for (cat=beg_cat; cat>=SEC_FIELD_LS; cat--) { 00598 /* Process only if, for this category, at least one secondary large-scale field is available */ 00599 for (i=0; i<data->field[cat].n_ls; i++) 00600 /* Loop over each season */ 00601 for (s=0; s<data->conf->nseasons; s++) { 00602 data->field[cat].data[i].down->delta[s] = (double *) malloc(ntime_sub[cat][s] * sizeof(double)); 00603 if (data->field[cat].data[i].down->delta[s] == NULL) alloc_error(__FILE__, __LINE__); 00604 data->field[cat].data[i].down->delta_dayschoice[s] = (double **) malloc(ntime_sub[cat][s] * sizeof(double *)); 00605 if (data->field[cat].data[i].down->delta_dayschoice[s] == NULL) alloc_error(__FILE__, __LINE__); 00606 for (ii=0; ii<ntime_sub[cat][s]; ii++) { 00607 data->field[cat].data[i].down->delta_dayschoice[s][ii] = (double *) calloc(data->conf->season[s].ndayschoices, sizeof(double)); 00608 if (data->field[cat].data[i].down->delta_dayschoice[s][ii] == NULL) alloc_error(__FILE__, __LINE__); 00609 } 00610 (void) compute_secondary_large_scale_diff(data->field[cat].data[i].down->delta[s], 00611 data->field[cat].data[i].down->delta_dayschoice[s], 00612 data->field[cat-2].analog_days[s], 00613 data->field[cat].data[i].down->smean_norm[s], data->learning->data[s].sup_index, 00614 data->field[CTRL_SEC_FIELD_LS].data[i].down->var[s], 00615 data->learning->data[s].sup_index_var, ntime_sub[cat][s]); 00616 } 00617 } 00618 00619 } 00620 00623 /* Downscale also control run if needed */ 00624 if (data->conf->period_ctrl->downscale == TRUE) 00625 beg_cat = CTRL_FIELD_LS; 00626 else 00627 beg_cat = FIELD_LS; 00628 00629 /* Loop over large-scale field categories (model run and optionally control run) */ 00630 for (cat=beg_cat; cat>=FIELD_LS; cat--) { 00631 /* Process only if, for this category, at least one large-scale field is available */ 00632 if (data->field[cat].n_ls > 0) { 00633 /* Process only first large-scale field. Limitation of the current implementation. */ 00634 i = 0; 00635 00636 if (data->conf->output_only != TRUE) { 00637 00639 /* Take into account the fact that it may be possible that the seasons does not span the whole year */ 00640 ntimes_merged = 0; 00641 merged_times_flag = (short int *) malloc(data->field[cat].ntime_ls * sizeof(short int)); 00642 if (merged_times_flag == NULL) alloc_error(__FILE__, __LINE__); 00643 for (ii=0; ii<data->field[cat].ntime_ls; ii++) merged_times_flag[ii] = 0; 00644 /* Flag all times within the processed seasons, and count number of timestep */ 00645 for (s=0; s<data->conf->nseasons; s++) 00646 for (ii=0; ii<ntime_sub[cat][s]; ii++) { 00647 /* Retrieve current index */ 00648 curindex_merged = data->field[cat].analog_days[s].tindex_s_all[ii]; 00649 /* Check for bounds */ 00650 if (curindex_merged < 0 || curindex_merged >= data->field[cat].ntime_ls) { 00651 (void) fprintf(stderr, "%s: Fatal error: index in merged season vector outside bounds! curindex_merged=%d max=%d\n", 00652 __FILE__, curindex_merged, data->field[cat].ntime_ls-1); 00653 return -1; 00654 } 00655 merged_times_flag[curindex_merged] = 1; 00656 ntimes_merged++; 00657 } 00658 /* Save time index given flag */ 00659 merged_itimes = (int *) malloc(data->field[cat].ntime_ls * sizeof(int)); 00660 if (merged_itimes == NULL) alloc_error(__FILE__, __LINE__); 00661 merged_times = (double *) malloc(ntimes_merged * sizeof(double)); 00662 if (merged_times == NULL) alloc_error(__FILE__, __LINE__); 00663 curindex_merged = 0; 00664 for (ii=0; ii<data->field[cat].ntime_ls; ii++) { 00665 if (merged_times_flag[ii] == 1) { 00666 merged_times[curindex_merged] = data->field[cat].time_ls[ii]; 00667 merged_itimes[ii] = curindex_merged++; 00668 } 00669 else 00670 merged_itimes[ii] = -1; 00671 } 00672 (void) free(merged_times_flag); 00673 00674 data->field[cat].analog_days_year.time = (int *) malloc(ntimes_merged * sizeof(int)); 00675 if (data->field[cat].analog_days_year.time == NULL) alloc_error(__FILE__, __LINE__); 00676 data->field[cat].analog_days_year.tindex = (int *) malloc(ntimes_merged * sizeof(int)); 00677 if (data->field[cat].analog_days_year.tindex == NULL) alloc_error(__FILE__, __LINE__); 00678 data->field[cat].analog_days_year.tindex_all = (int *) malloc(ntimes_merged * sizeof(int)); 00679 if (data->field[cat].analog_days_year.tindex_all == NULL) alloc_error(__FILE__, __LINE__); 00680 data->field[cat].analog_days_year.year = (int *) malloc(ntimes_merged * sizeof(int)); 00681 if (data->field[cat].analog_days_year.year == NULL) alloc_error(__FILE__, __LINE__); 00682 data->field[cat].analog_days_year.month = (int *) malloc(ntimes_merged * sizeof(int)); 00683 if (data->field[cat].analog_days_year.month == NULL) alloc_error(__FILE__, __LINE__); 00684 data->field[cat].analog_days_year.day = (int *) malloc(ntimes_merged * sizeof(int)); 00685 if (data->field[cat].analog_days_year.day == NULL) alloc_error(__FILE__, __LINE__); 00686 data->field[cat].analog_days_year.tindex_s_all = (int *) malloc(ntimes_merged * sizeof(int)); 00687 if (data->field[cat].analog_days_year.tindex_s_all == NULL) alloc_error(__FILE__, __LINE__); 00688 data->field[cat].analog_days_year.year_s = (int *) malloc(ntimes_merged * sizeof(int)); 00689 if (data->field[cat].analog_days_year.year_s == NULL) alloc_error(__FILE__, __LINE__); 00690 data->field[cat].analog_days_year.month_s = (int *) malloc(ntimes_merged * sizeof(int)); 00691 if (data->field[cat].analog_days_year.month_s == NULL) alloc_error(__FILE__, __LINE__); 00692 data->field[cat].analog_days_year.day_s = (int *) malloc(ntimes_merged * sizeof(int)); 00693 if (data->field[cat].analog_days_year.day_s == NULL) alloc_error(__FILE__, __LINE__); 00694 data->field[cat].analog_days_year.analog_dayschoice = (tstruct **) malloc(ntimes_merged * sizeof(tstruct *)); 00695 if (data->field[cat].analog_days_year.analog_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00696 data->field[cat].analog_days_year.metric_norm = (float **) malloc(ntimes_merged * sizeof(float *)); 00697 if (data->field[cat].analog_days_year.metric_norm == NULL) alloc_error(__FILE__, __LINE__); 00698 data->field[cat].analog_days_year.tindex_dayschoice = (int **) malloc(ntimes_merged * sizeof(int *)); 00699 if (data->field[cat].analog_days_year.tindex_dayschoice == NULL) alloc_error(__FILE__, __LINE__); 00700 for (ii=0; ii<ntimes_merged; ii++) { 00701 data->field[cat].analog_days_year.analog_dayschoice[ii] = (tstruct *) NULL; 00702 data->field[cat].analog_days_year.metric_norm[ii] = (float *) NULL; 00703 data->field[cat].analog_days_year.tindex_dayschoice[ii] = (int *) NULL; 00704 } 00705 data->field[cat].analog_days_year.ndayschoice = (int *) malloc(ntimes_merged * sizeof(int)); 00706 if (data->field[cat].analog_days_year.ndayschoice == NULL) alloc_error(__FILE__, __LINE__); 00707 data->field[cat+2].data[i].down->delta_all = (double *) malloc(ntimes_merged * sizeof(double)); 00708 if (data->field[cat+2].data[i].down->delta_all == NULL) alloc_error(__FILE__, __LINE__); 00709 data->field[cat+2].data[i].down->delta_dayschoice_all = (double **) malloc(ntimes_merged * sizeof(double *)); 00710 if (data->field[cat+2].data[i].down->delta_dayschoice_all == NULL) alloc_error(__FILE__, __LINE__); 00711 data->field[cat].data[i].down->dist_all = (double *) malloc(ntimes_merged * sizeof(double)); 00712 if (data->field[cat].data[i].down->dist_all == NULL) alloc_error(__FILE__, __LINE__); 00713 data->field[cat].data[i].down->days_class_clusters_all = (int *) malloc(ntimes_merged * sizeof(int)); 00714 if (data->field[cat].data[i].down->days_class_clusters_all == NULL) alloc_error(__FILE__, __LINE__); 00715 00716 /* Find maximum number of days choices within all seasons */ 00717 maxndays = data->conf->season[0].ndayschoices; 00718 for (s=0; s<data->conf->nseasons; s++) 00719 if (maxndays < data->conf->season[s].ndayschoices) 00720 maxndays = data->conf->season[s].ndayschoices; 00721 /* Allocate memory for special 2D delta t vector. Initialize to zero because dimensions can vary for each season. */ 00722 for (ii=0; ii<ntimes_merged; ii++) { 00723 data->field[cat+2].data[i].down->delta_dayschoice_all[ii] = (double *) calloc(maxndays, sizeof(double)); 00724 if (data->field[cat+2].data[i].down->delta_dayschoice_all[ii] == NULL) alloc_error(__FILE__, __LINE__); 00725 } 00726 00727 /* Loop over each season */ 00728 data->field[cat].analog_days_year.ntime = 0; 00729 for (s=0; s<data->conf->nseasons; s++) { 00730 /* Merge all seasons of analog_day data, supplemental field index, and cluster info */ 00731 printf("Season: %d\n",s); 00732 istat = merge_seasons(data->field[cat].analog_days_year, data->field[cat].analog_days[s], 00733 merged_itimes, ntimes_merged, ntime_sub[cat][s]); 00734 istat = merge_seasonal_data(data->field[cat+2].data[i].down->delta_all, 00735 data->field[cat+2].data[i].down->delta[s], 00736 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00737 ntimes_merged, ntime_sub[cat][s]); 00738 istat = merge_seasonal_data_2d(data->field[cat+2].data[i].down->delta_dayschoice_all, 00739 data->field[cat+2].data[i].down->delta_dayschoice[s], 00740 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00741 data->conf->season[s].ndayschoices,ntimes_merged, ntime_sub[cat][s]); 00742 istat = merge_seasonal_data(data->field[cat].data[i].down->dist_all, 00743 data->field[cat].data[i].down->dist[s], 00744 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00745 ntimes_merged, ntime_sub[cat][s]); 00746 istat = merge_seasonal_data_i(data->field[cat].data[i].down->days_class_clusters_all, 00747 data->field[cat].data[i].down->days_class_clusters[s], 00748 data->field[cat].analog_days[s], merged_itimes, 1, 1, 00749 ntimes_merged, ntime_sub[cat][s]); 00750 if (istat != 0) { 00751 (void) free(merged_times); 00752 (void) free(merged_itimes); 00753 return istat; 00754 } 00755 data->field[cat].analog_days_year.ntime += ntime_sub[cat][s]; 00756 } 00757 00759 if (data->conf->analog_save == TRUE) { 00760 if (cat == FIELD_LS) 00761 analog_file = data->conf->analog_file_other; 00762 else 00763 analog_file = data->conf->analog_file_ctrl; 00764 (void) save_analog_data(data->field[cat].analog_days_year, data->field[cat+2].data[i].down->delta_all, 00765 data->field[cat+2].data[i].down->delta_dayschoice_all, 00766 data->field[cat].data[i].down->dist_all, data->field[cat].data[i].down->days_class_clusters_all, 00767 merged_times, analog_file, data); 00768 } 00769 } 00770 else { 00771 if (cat == FIELD_LS) 00772 analog_file = data->conf->analog_file_other; 00773 else 00774 analog_file = data->conf->analog_file_ctrl; 00775 (void) printf("%s: Reading analog data from file %s\n", __FILE__, analog_file); 00776 (void) read_analog_data(&(data->field[cat].analog_days_year), &(data->field[cat+2].data[i].down->delta_all), 00777 &merged_times, analog_file, data->conf->obs_var->timename); 00778 ntimes_merged = data->field[cat].analog_days_year.ntime; 00779 } 00780 00781 /* Process all data */ 00782 if (data->conf->output == TRUE) { 00783 if (cat == FIELD_LS) { 00784 period = data->conf->period; 00785 } 00786 else { 00787 period = data->conf->period_ctrl; 00788 } 00789 istat = output_downscaled_analog(data->field[cat].analog_days_year, data->field[cat+2].data[i].down->delta_all, 00790 data->conf->output_month_begin, data->conf->output_path, data->conf->config, 00791 data->conf->time_units, data->conf->cal_type, data->conf->deltat, 00792 data->conf->format, data->conf->compression, data->conf->compression_level, 00793 data->conf->debug, 00794 data->info, data->conf->obs_var, period, merged_times, ntimes_merged); 00795 if (istat != 0) { 00796 (void) free(merged_times); 00797 (void) free(merged_itimes); 00798 return istat; 00799 } 00800 } 00801 (void) free(merged_times); 00802 (void) free(merged_itimes); 00803 } 00804 } 00805 00806 /* Free memory for specific downscaling buffers */ 00807 if (data->conf->output_only != TRUE) { 00808 for (cat=0; cat<NCAT; cat++) 00809 (void) free(ntime_sub[cat]); 00810 (void) free(ntime_sub); 00811 00812 /* Free mask memory if needed */ 00813 if (mask_sub != NULL) 00814 (void) free(mask_sub); 00815 } 00816 00817 /* Success return */ 00818 return 0; 00819 }
int wt_learning | ( | data_struct * | data | ) |
Compute or read learning data needed for downscaling climate scenarios using weather typing.
[in] | data | MASTER data structure. |
Return status.
Return status solid precipitation.
Read learning data
Compute learning data
Assume EOFs are already pre-computed
Normalize secondary large-scale fields for re-analysis learning data
Construct time vectors
Merge observation and reanalysis principal components for clustering algorithm and normalize using first Singular Value
Definition at line 71 of file wt_learning.c.
References alloc_error(), best_clusters(), learning_data_struct::class_clusters, class_days_pc_clusters(), conf_struct::classif_type, data_struct::conf, learning_struct::data, time_vect_struct::day, reg_struct::dist, dist_clusters_normctrl(), distance_point(), learning_eof_struct::eof, extract_subdomain(), extract_subperiod_months(), FALSE, mask_struct::field, learning_struct::filename_rea_sup, time_vect_struct::hour, mask_struct::lat, reg_struct::lat, learning_struct::lat, data_struct::learning, conf_struct::learning_mask_latitude_max, conf_struct::learning_mask_latitude_min, conf_struct::learning_mask_longitude_max, conf_struct::learning_mask_longitude_min, conf_struct::learning_maskfile, learning_struct::learning_provided, learning_struct::learning_save, mask_struct::lon, reg_struct::lon, learning_struct::lon, mask_points(), mask_region(), mean_field_spatial(), time_vect_struct::minutes, season_struct::month, time_vect_struct::month, conf_struct::nclassifications, season_struct::nclusters, mask_struct::nlat, learning_struct::nlat, mask_struct::nlon, learning_struct::nlon, season_struct::nmonths, learning_struct::nomvar_rea_sup, normalize_field(), normalize_field_2d(), conf_struct::npartitions, reg_struct::npts, season_struct::nreg, conf_struct::nseasons, learning_data_struct::ntime, learning_eof_struct::ntime, learning_struct::obs, learning_struct::obs_neof, learning_struct::pc_normalized_var, learning_data_struct::precip_index, learning_data_struct::precip_index_obs, learning_data_struct::precip_reg, learning_data_struct::precip_reg_autocor, learning_data_struct::precip_reg_cst, learning_data_struct::precip_reg_dist, learning_data_struct::precip_reg_err, learning_data_struct::precip_reg_rsq, learning_data_struct::precip_reg_vif, learning_struct::rea, learning_struct::rea_coords, learning_struct::rea_dimxname, learning_struct::rea_dimyname, learning_struct::rea_gridname, learning_struct::rea_latname, learning_struct::rea_lonname, learning_struct::rea_neof, learning_struct::rea_timename, read_field_subdomain_period(), read_learning_fields(), read_learning_obs_eof(), read_learning_rea_eof(), read_obs_period(), data_struct::reg, regress(), conf_struct::season, conf_struct::secondary_latitude_max, conf_struct::secondary_latitude_min, conf_struct::secondary_longitude_max, conf_struct::secondary_longitude_min, data_struct::secondary_mask, time_vect_struct::seconds, learning_eof_struct::sing, sub_period_common(), learning_data_struct::sup_index, learning_data_struct::sup_index_mean, learning_data_struct::sup_index_var, learning_struct::sup_lat, learning_struct::sup_lon, learning_struct::sup_nlat, learning_struct::sup_nlon, learning_data_struct::sup_val, learning_data_struct::time, time_mean_variance_field_2d(), learning_data_struct::time_s, learning_struct::time_s, learning_eof_struct::time_s, conf_struct::time_units, TRUE, mask_struct::use_mask, learning_data_struct::weight, write_learning_fields(), and time_vect_struct::year.
Referenced by main().
00071 { 00078 double *buf_learn = NULL; 00079 double *buf_weight = NULL; 00080 double *buf_learn_obs = NULL; 00081 double *buf_learn_rea = NULL; 00082 double *buf_learn_obs_sub = NULL; 00083 double *buf_learn_rea_sub = NULL; 00084 double *buf_learn_pc = NULL; 00085 double *buf_learn_pc_sub = NULL; 00086 00087 double *precip_liquid_obs = NULL; 00088 double *precip_solid_obs = NULL; 00089 double *precip_obs = NULL; 00090 double *mean_precip = NULL; 00091 double *mean_precip_sub = NULL; 00092 00093 double *precip_reg = NULL; 00094 double *precip_err = NULL; 00095 double *precip_index = NULL; 00096 double *dist_reg = NULL; 00097 double *vif = NULL; 00098 double chisq; 00099 double rsq; 00100 double autocor; 00101 00102 double obs_first_sing; 00103 double rea_sing; 00104 double obs_sing; 00105 double *rea_var = NULL; 00106 double rea_first_sing; 00107 00108 double *tas_rea = NULL; 00109 double *tas_rea_sub = NULL; 00110 double *tas_rea_mean = NULL; 00111 double *tas_rea_mean_sub = NULL; 00112 00113 double missing_value; 00114 double missing_value_precip; 00115 00116 double *mean_dist = NULL; 00117 double *var_dist = NULL; 00118 double *dist = NULL; 00119 double dist_pt; 00120 00121 double *mask_subd = NULL; 00122 short int *mask_sub = NULL; 00123 int nlon_mask; 00124 int nlat_mask; 00125 double *lon_mask = NULL; 00126 double *lat_mask = NULL; 00127 00128 int ntime_learn_all; 00129 int *ntime_sub = NULL; 00130 00131 double *sup_mean = NULL; 00132 double *sup_var = NULL; 00133 00134 double meanvif = 0.0; 00135 00136 int eof; 00137 int clust; 00138 int nt; 00139 int ntt; 00140 int t; 00141 int s; 00142 int i; 00143 int j; 00144 int pt; 00145 int term; 00146 int *npt = NULL; 00147 short int allpt; 00148 00149 /* udunits variables */ 00150 ut_system *unitSystem = NULL; /* Unit System (udunits) */ 00151 ut_unit *dataunits = NULL; /* Data units (udunits) */ 00152 00153 int niter = 2; 00154 00155 int istat; 00156 int istat_solid; 00158 if (data->learning->learning_provided == TRUE) { 00160 istat = read_learning_fields(data); 00161 if (istat != 0) return istat; 00162 } 00163 else { 00167 /* Read re-analysis pre-computed EOF and Singular Values */ 00168 istat = read_learning_rea_eof(data); 00169 if (istat != 0) return istat; 00170 00171 /* Read observations pre-computed EOF and Singular Values */ 00172 istat = read_learning_obs_eof(data); 00173 if (istat != 0) return istat; 00174 00175 /* Select common time period between the re-analysis and the observation data periods */ 00176 if (data->learning->obs_neof != 0) { 00177 istat = sub_period_common(&buf_learn_obs, &ntime_learn_all, data->learning->obs->eof, 00178 data->learning->obs->time_s->year, data->learning->obs->time_s->month, data->learning->obs->time_s->day, 00179 data->learning->rea->time_s->year, data->learning->rea->time_s->month, data->learning->rea->time_s->day, 00180 1, data->learning->obs_neof, 1, data->learning->obs->ntime, data->learning->rea->ntime); 00181 if (istat != 0) return istat; 00182 } 00183 istat = sub_period_common(&buf_learn_rea, &ntime_learn_all, data->learning->rea->eof, 00184 data->learning->rea->time_s->year, data->learning->rea->time_s->month, data->learning->rea->time_s->day, 00185 data->learning->obs->time_s->year, data->learning->obs->time_s->month, data->learning->obs->time_s->day, 00186 1, data->learning->rea_neof, 1, data->learning->rea->ntime, data->learning->obs->ntime); 00187 if (istat != 0) return istat; 00188 00189 rea_var = (double *) malloc(data->learning->rea_neof * sizeof(double)); 00190 if (rea_var == NULL) alloc_error(__FILE__, __LINE__); 00191 00192 /* Compute normalisation factor of EOF of large-scale field for the whole period */ 00193 00194 data->learning->pc_normalized_var = (double *) malloc(data->learning->rea_neof * sizeof(double)); 00195 if (data->learning->pc_normalized_var == NULL) alloc_error(__FILE__, __LINE__); 00196 buf_learn_pc = (double *) malloc(data->learning->rea_neof * ntime_learn_all * sizeof(double)); 00197 if (buf_learn_pc == NULL) alloc_error(__FILE__, __LINE__); 00198 00199 for (eof=0; eof<data->learning->rea_neof; eof++) { 00200 00201 for (nt=0; nt<ntime_learn_all; nt++) 00202 buf_learn_pc[nt+eof*ntime_learn_all] = buf_learn_rea[nt+eof*ntime_learn_all] * data->learning->rea->sing[eof]; 00203 00204 rea_var[eof] = gsl_stats_variance(&(buf_learn_pc[eof*ntime_learn_all]), 1, ntime_learn_all); 00205 if (rea_var[eof] == 0.0) { 00206 (void) fprintf(stderr, "%s: ERROR: Variance of the projection of the large-scale field onto EOF is 0.0. You probably have too many EOFs for your field. EOF number=%d. Variance=%f. Must abort...\n", 00207 __FILE__, eof, rea_var[eof]); 00208 return -1; 00209 } 00210 00211 /* Renormalize EOF of large-scale field for the whole period using the first EOF norm and the Singular Value */ 00212 for (nt=0; nt<ntime_learn_all; nt++) 00213 buf_learn_pc[nt+eof*ntime_learn_all] = buf_learn_pc[nt+eof*ntime_learn_all] / sqrt(rea_var[0]); 00214 00215 /* Recompute normalization factor using normalized field */ 00216 data->learning->pc_normalized_var[eof] = gsl_stats_variance(&(buf_learn_pc[eof*ntime_learn_all]), 1, ntime_learn_all); 00217 if (data->learning->pc_normalized_var[eof] == 0.0) { 00218 (void) fprintf(stderr, "%s: ERROR: Normalized variance of the projection of the large-scale field onto EOF is 0.0. You probably have too many EOFs for your field. EOF number=%d. Variance=%f. Must abort...\n", 00219 __FILE__, eof, data->learning->pc_normalized_var[eof]); 00220 return -1; 00221 } 00222 } 00223 00224 ntime_sub = (int *) malloc(data->conf->nseasons * sizeof(int)); 00225 if (ntime_sub == NULL) alloc_error(__FILE__, __LINE__); 00226 00227 /* Read observed precipitation (liquid and solid) */ 00228 istat_solid = read_obs_period(&precip_solid_obs, &(data->learning->lon), &(data->learning->lat), &missing_value_precip, 00229 data, "prsn", data->learning->obs->time_s->year, data->learning->obs->time_s->month, 00230 data->learning->obs->time_s->day, &(data->learning->nlon), &(data->learning->nlat), 00231 data->learning->obs->ntime); 00232 if (istat_solid == -1) return -1; 00233 if (istat_solid >= 0) { 00234 (void) free(data->learning->lon); 00235 (void) free(data->learning->lat); 00236 } 00237 istat = read_obs_period(&precip_liquid_obs, &(data->learning->lon), &(data->learning->lat), &missing_value_precip, data, "prr", 00238 data->learning->obs->time_s->year, data->learning->obs->time_s->month, data->learning->obs->time_s->day, 00239 &(data->learning->nlon), &(data->learning->nlat), data->learning->obs->ntime); 00240 if (istat == -1) return -1; 00241 00242 /* Calculate total precipitation */ 00243 precip_obs = (double *) malloc(data->learning->nlon*data->learning->nlat*data->learning->obs->ntime * sizeof(double)); 00244 if (precip_obs == NULL) alloc_error(__FILE__, __LINE__); 00245 00246 if (istat_solid == -2) { 00247 fprintf(stderr, "%s: WARNING: Snow observation variable not found in dsclim XML config file. Will assume that you don't have snow observations, and set it to zero.\n", __FILE__); 00248 for (t=0; t<data->learning->obs->ntime; t++) 00249 for (j=0; j<data->learning->nlat; j++) 00250 for (i=0; i<data->learning->nlon; i++) 00251 if (precip_liquid_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] != missing_value_precip) 00252 precip_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] = 00253 precip_liquid_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] * 86400; 00254 else 00255 precip_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] = missing_value_precip; 00256 (void) free(precip_liquid_obs); 00257 } 00258 else { 00259 (void) printf("%s: Calculating total precipitation from solid and liquid.\n", __FILE__); 00260 for (t=0; t<data->learning->obs->ntime; t++) 00261 for (j=0; j<data->learning->nlat; j++) 00262 for (i=0; i<data->learning->nlon; i++) 00263 if (precip_liquid_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] != missing_value_precip) 00264 precip_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] = 00265 (precip_liquid_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] + 00266 precip_solid_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat]) * 86400.0; 00267 else 00268 precip_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] = missing_value_precip; 00269 (void) free(precip_liquid_obs); 00270 (void) free(precip_solid_obs); 00271 } 00272 00273 /* Apply mask for learning data */ 00274 if (data->conf->learning_maskfile->use_mask == TRUE) { 00275 /* Allocate memory */ 00276 mask_sub = (short int *) malloc(data->learning->nlat*data->learning->nlon * sizeof(short int)); 00277 if (mask_sub == NULL) alloc_error(__FILE__, __LINE__); 00278 for (i=0; i<data->learning->nlat*data->learning->nlon; i++) 00279 mask_sub[i] = (short int) data->conf->learning_maskfile->field[i]; 00280 /* Apply mask */ 00281 (void) printf("%s: Masking points using mask file for regression analysis.\n", __FILE__); 00282 (void) mask_points(precip_obs, missing_value_precip, mask_sub, 00283 data->learning->nlon, data->learning->nlat, data->learning->obs->ntime); 00284 /* Free memory of mask_sub */ 00285 (void) free(mask_sub); 00286 mask_sub = NULL; 00287 } 00288 00289 /* Mask region if needed using domain bounding box */ 00290 if (data->conf->learning_mask_longitude_min != -999.0 && 00291 data->conf->learning_mask_longitude_max != -999.0 && 00292 data->conf->learning_mask_latitude_min != -999.0 && 00293 data->conf->learning_mask_latitude_max != -999.0) { 00294 (void) printf("%s: Masking region for regression analysis.\n", __FILE__); 00295 (void) mask_region(precip_obs, missing_value_precip, data->learning->lon, data->learning->lat, 00296 data->conf->learning_mask_longitude_min, data->conf->learning_mask_longitude_max, 00297 data->conf->learning_mask_latitude_min, data->conf->learning_mask_latitude_max, 00298 data->learning->nlon, data->learning->nlat, data->learning->obs->ntime); 00299 } 00300 00301 /* Perform spatial mean of observed precipitation around regression points, normalize precip */ 00302 (void) printf("%s: Perform spatial mean of observed precipitation around regression points.\n", __FILE__); 00303 mean_precip = (double *) malloc(data->reg->npts * data->learning->obs->ntime * sizeof(double)); 00304 if (mean_precip == NULL) alloc_error(__FILE__, __LINE__); 00305 npt = (int *) malloc(data->learning->obs->ntime * sizeof(int)); 00306 if (npt == NULL) alloc_error(__FILE__, __LINE__); 00307 for (pt=0; pt<data->reg->npts; pt++) { 00308 for (t=0; t<data->learning->obs->ntime; t++) { 00309 mean_precip[t+pt*data->learning->obs->ntime] = 0.0; 00310 npt[t] = 0; 00311 } 00312 for (j=0; j<data->learning->nlat; j++) 00313 for (i=0; i<data->learning->nlon; i++) { 00314 dist_pt = distance_point(data->reg->lon[pt], data->reg->lat[pt], 00315 data->learning->lon[i+j*data->learning->nlon], data->learning->lat[i+j*data->learning->nlon]); 00316 if (dist_pt <= data->reg->dist) 00317 for (t=0; t<data->learning->obs->ntime; t++) 00318 if (precip_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat] != missing_value_precip) { 00319 mean_precip[t+pt*data->learning->obs->ntime] += 00320 precip_obs[i+j*data->learning->nlon+t*data->learning->nlon*data->learning->nlat]; 00321 npt[t]++; 00322 } 00323 } 00324 allpt = FALSE; 00325 for (t=0; t<data->learning->obs->ntime; t++) 00326 if (npt[t] == 0) allpt = TRUE; 00327 if (allpt == TRUE) { 00328 (void) fprintf(stderr, "%s: WARNING: There are no point of observation in the vicinity of the regression point #%d at a minimum distance of at least %f meters! Verify your regression points, or the configuration of your coordinate variable names in your configuration file, or that you don't have all missing values in your observations in the vicinity of the regression point. Time=%d. lon=%lf lat=%lf. WARNING: Will desactivate this regression point.\n", 00329 __FILE__, pt, data->reg->dist, t, data->reg->lon[pt], data->reg->lat[pt]); 00330 for (t=0; t<data->learning->obs->ntime; t++) 00331 mean_precip[t+pt*data->learning->obs->ntime] = missing_value_precip; 00332 } 00333 else 00334 for (t=0; t<data->learning->obs->ntime; t++) 00335 mean_precip[t+pt*data->learning->obs->ntime] = sqrt(mean_precip[t+pt*data->learning->obs->ntime] / (double) npt[t]); 00336 } 00337 (void) free(npt); 00338 (void) free(precip_obs); 00339 00340 /* Select common time period between the re-analysis and the observation data periods for */ 00341 /* secondary large-scale field and extract subdomain */ 00342 istat = read_field_subdomain_period(&tas_rea, &(data->learning->sup_lon), &(data->learning->sup_lat), 00343 &missing_value, data->learning->nomvar_rea_sup, 00344 data->learning->obs->time_s->year, data->learning->obs->time_s->month, 00345 data->learning->obs->time_s->day, 00346 data->conf->secondary_longitude_min, data->conf->secondary_longitude_max, 00347 data->conf->secondary_latitude_min, data->conf->secondary_latitude_max, 00348 data->learning->rea_coords, data->learning->rea_gridname, 00349 data->learning->rea_lonname, data->learning->rea_latname, 00350 data->learning->rea_dimxname, data->learning->rea_dimyname, 00351 data->learning->rea_timename, data->learning->filename_rea_sup, 00352 &(data->learning->sup_nlon), &(data->learning->sup_nlat), data->learning->obs->ntime); 00353 00354 /* Perform spatial mean of secondary large-scale fields */ 00355 tas_rea_mean = (double *) malloc(data->learning->obs->ntime * sizeof(double)); 00356 if (tas_rea_mean == NULL) alloc_error(__FILE__, __LINE__); 00357 /* Prepare mask */ 00358 if (data->secondary_mask->use_mask == TRUE) { 00359 (void) extract_subdomain(&mask_subd, &lon_mask, &lat_mask, &nlon_mask, &nlat_mask, data->secondary_mask->field, 00360 data->secondary_mask->lon, data->secondary_mask->lat, 00361 data->conf->secondary_longitude_min, data->conf->secondary_longitude_max, 00362 data->conf->secondary_latitude_min, data->conf->secondary_latitude_max, 00363 data->secondary_mask->nlon, data->secondary_mask->nlat, 1); 00364 if (data->learning->sup_nlon != nlon_mask || data->learning->sup_nlat != nlat_mask) { 00365 (void) fprintf(stderr, "%s: IMPORTANT WARNING: The mask for secondary large-scale fields after selecting subdomain has invalid dimensions: nlon=%d nlat=%d. Expected: nlon=%d nlat=%d\nReverting to no-mask processing.", __FILE__, nlon_mask, nlat_mask, 00366 data->learning->sup_nlon, data->learning->sup_nlat); 00367 mask_sub = (short int *) NULL; 00368 } 00369 else { 00370 mask_sub = (short int *) malloc(data->learning->sup_nlat*data->learning->sup_nlon * sizeof(short int)); 00371 if (mask_sub == NULL) alloc_error(__FILE__, __LINE__); 00372 for (i=0; i<data->learning->sup_nlat*data->learning->sup_nlon; i++) 00373 mask_sub[i] = (short int) mask_subd[i]; 00374 } 00375 (void) free(mask_subd); 00376 (void) free(lon_mask); 00377 (void) free(lat_mask); 00378 } 00379 else 00380 mask_sub = (short int *) NULL; 00381 00382 if (mask_sub != NULL) 00383 printf("%s: Using a mask for secondary large-scale fields.\n", __FILE__); 00384 00385 (void) mean_field_spatial(tas_rea_mean, tas_rea, mask_sub, data->learning->sup_nlon, data->learning->sup_nlat, 00386 data->learning->obs->ntime); 00387 if (mask_sub != NULL) 00388 (void) free(mask_sub); 00389 00390 /* Loop over each season */ 00391 (void) printf("Extract data for each season separately and process each season.\n"); 00392 00393 for (s=0; s<data->conf->nseasons; s++) { 00394 /* Process separately each season */ 00395 00396 /* Select season months in the whole time period and create sub-period fields */ 00397 if (data->learning->obs_neof != 0) { 00398 (void) extract_subperiod_months(&buf_learn_obs_sub, &(ntime_sub[s]), buf_learn_obs, 00399 data->learning->time_s->year, data->learning->time_s->month, data->learning->time_s->day, 00400 data->conf->season[s].month, 00401 1, 1, data->learning->obs_neof, ntime_learn_all, 00402 data->conf->season[s].nmonths); 00403 } 00404 (void) extract_subperiod_months(&buf_learn_rea_sub, &(ntime_sub[s]), buf_learn_rea, 00405 data->learning->time_s->year, data->learning->time_s->month, data->learning->time_s->day, 00406 data->conf->season[s].month, 00407 1, 1, data->learning->rea_neof, ntime_learn_all, 00408 data->conf->season[s].nmonths); 00409 (void) extract_subperiod_months(&buf_learn_pc_sub, &(ntime_sub[s]), buf_learn_pc, 00410 data->learning->time_s->year, data->learning->time_s->month, data->learning->time_s->day, 00411 data->conf->season[s].month, 00412 1, 1, data->learning->rea_neof, ntime_learn_all, 00413 data->conf->season[s].nmonths); 00414 (void) extract_subperiod_months(&tas_rea_mean_sub, &(ntime_sub[s]), tas_rea_mean, 00415 data->learning->time_s->year, data->learning->time_s->month, data->learning->time_s->day, 00416 data->conf->season[s].month, 00417 1, 1, 1, ntime_learn_all, 00418 data->conf->season[s].nmonths); 00419 (void) extract_subperiod_months(&tas_rea_sub, &(ntime_sub[s]), tas_rea, 00420 data->learning->time_s->year, data->learning->time_s->month, data->learning->time_s->day, 00421 data->conf->season[s].month, 00422 1, data->learning->sup_nlon, data->learning->sup_nlat, ntime_learn_all, 00423 data->conf->season[s].nmonths); 00424 (void) extract_subperiod_months(&mean_precip_sub, &(ntime_sub[s]), mean_precip, 00425 data->learning->time_s->year, data->learning->time_s->month, data->learning->time_s->day, 00426 data->conf->season[s].month, 00427 1, 1, data->reg->npts, ntime_learn_all, 00428 data->conf->season[s].nmonths); 00429 00431 data->learning->data[s].sup_index = (double *) malloc(ntime_sub[s] * sizeof(double)); 00432 if (data->learning->data[s].sup_index == NULL) alloc_error(__FILE__, __LINE__); 00433 data->learning->data[s].sup_val = (double *) malloc(data->learning->sup_nlon*data->learning->sup_nlat*ntime_sub[s] * sizeof(double)); 00434 if (data->learning->data[s].sup_val == NULL) alloc_error(__FILE__, __LINE__); 00435 00436 /* Compute mean and variance over time */ 00437 data->learning->data[s].sup_index_mean = gsl_stats_mean(tas_rea_mean_sub, 1, ntime_sub[s]); 00438 data->learning->data[s].sup_index_var = gsl_stats_variance(tas_rea_mean_sub, 1, ntime_sub[s]); 00439 00440 /* Normalize using mean and variance */ 00441 (void) normalize_field(data->learning->data[s].sup_index, tas_rea_mean_sub, data->learning->data[s].sup_index_mean, 00442 data->learning->data[s].sup_index_var, 1, 1, ntime_sub[s]); 00443 00444 /* Compute mean and variance over time for each point */ 00445 sup_mean = (double *) malloc(data->learning->sup_nlon*data->learning->sup_nlat*ntime_sub[s] * sizeof(double)); 00446 if (sup_mean == NULL) alloc_error(__FILE__, __LINE__); 00447 sup_var = (double *) malloc(data->learning->sup_nlon*data->learning->sup_nlat*ntime_sub[s] * sizeof(double)); 00448 if (sup_var == NULL) alloc_error(__FILE__, __LINE__); 00449 (void) time_mean_variance_field_2d(sup_mean, sup_var, tas_rea_sub, data->learning->sup_nlon, data->learning->sup_nlat, ntime_sub[s]); 00450 00451 /* Normalize whole secondary 2D field using mean and variance at each point */ 00452 (void) normalize_field_2d(data->learning->data[s].sup_val, tas_rea_sub, sup_mean, 00453 sup_var, data->learning->sup_nlon, data->learning->sup_nlat, ntime_sub[s]); 00454 00455 (void) free(sup_mean); 00456 sup_mean = NULL; 00457 (void) free(sup_var); 00458 sup_var = NULL; 00459 00461 data->learning->data[s].ntime = ntime_sub[s]; 00462 data->learning->data[s].time = (double *) malloc(ntime_sub[s] * sizeof(double)); 00463 if (data->learning->data[s].time == NULL) alloc_error(__FILE__, __LINE__); 00464 data->learning->data[s].time_s->year = (int *) malloc(ntime_sub[s] * sizeof(int)); 00465 if (data->learning->data[s].time_s->year == NULL) alloc_error(__FILE__, __LINE__); 00466 data->learning->data[s].time_s->month = (int *) malloc(ntime_sub[s] * sizeof(int)); 00467 if (data->learning->data[s].time_s->month == NULL) alloc_error(__FILE__, __LINE__); 00468 data->learning->data[s].time_s->day = (int *) malloc(ntime_sub[s] * sizeof(int)); 00469 if (data->learning->data[s].time_s->day == NULL) alloc_error(__FILE__, __LINE__); 00470 data->learning->data[s].time_s->hour = (int *) malloc(ntime_sub[s] * sizeof(int)); 00471 if (data->learning->data[s].time_s->hour == NULL) alloc_error(__FILE__, __LINE__); 00472 data->learning->data[s].time_s->minutes = (int *) malloc(ntime_sub[s] * sizeof(int)); 00473 if (data->learning->data[s].time_s->minutes == NULL) alloc_error(__FILE__, __LINE__); 00474 data->learning->data[s].time_s->seconds = (double *) malloc(ntime_sub[s] * sizeof(double)); 00475 if (data->learning->data[s].time_s->seconds == NULL) alloc_error(__FILE__, __LINE__); 00476 00477 /* Retrieve time index spanning selected months and assign time structure values */ 00478 t = 0; 00479 00480 /* Initialize udunits */ 00481 ut_set_error_message_handler(ut_ignore); 00482 unitSystem = ut_read_xml(NULL); 00483 ut_set_error_message_handler(ut_write_to_stderr); 00484 dataunits = ut_parse(unitSystem, data->conf->time_units, UT_ASCII); 00485 00486 for (nt=0; nt<ntime_learn_all; nt++) 00487 for (ntt=0; ntt<data->conf->season[s].nmonths; ntt++) 00488 if (data->learning->time_s->month[nt] == data->conf->season[s].month[ntt]) { 00489 data->learning->data[s].time_s->year[t] = data->learning->time_s->year[nt]; 00490 data->learning->data[s].time_s->month[t] = data->learning->time_s->month[nt]; 00491 data->learning->data[s].time_s->day[t] = data->learning->time_s->day[nt]; 00492 data->learning->data[s].time_s->hour[t] = data->learning->time_s->hour[nt]; 00493 data->learning->data[s].time_s->minutes[t] = data->learning->time_s->minutes[nt]; 00494 data->learning->data[s].time_s->seconds[t] = data->learning->time_s->seconds[nt]; 00495 istat = utInvCalendar2(data->learning->data[s].time_s->year[t], data->learning->data[s].time_s->month[t], 00496 data->learning->data[s].time_s->day[t], data->learning->data[s].time_s->hour[t], 00497 data->learning->data[s].time_s->minutes[t], data->learning->data[s].time_s->seconds[t], 00498 dataunits, &(data->learning->data[s].time[t])); 00499 t++; 00500 } 00501 00502 (void) ut_free(dataunits); 00503 (void) ut_free_system(unitSystem); 00504 00507 buf_learn = (double *) realloc(buf_learn, ntime_sub[s] * (data->learning->rea_neof + data->learning->obs_neof) * sizeof(double)); 00508 if (buf_learn == NULL) alloc_error(__FILE__, __LINE__); 00509 00510 /* Normalisation by the first Singular Value */ 00511 rea_first_sing = data->learning->rea->sing[0]; 00512 for (eof=0; eof<data->learning->rea_neof; eof++) { 00513 rea_sing = data->learning->rea->sing[eof]; 00514 for (nt=0; nt<ntime_sub[s]; nt++) { 00515 buf_learn_rea_sub[nt+eof*ntime_sub[s]] = buf_learn_rea_sub[nt+eof*ntime_sub[s]] * rea_sing / rea_first_sing; 00516 buf_learn[nt+eof*ntime_sub[s]] = buf_learn_rea_sub[nt+eof*ntime_sub[s]]; 00517 } 00518 } 00519 if (data->learning->obs_neof != 0) { 00520 obs_first_sing = data->learning->obs->sing[0]; 00521 for (eof=0; eof<data->learning->obs_neof; eof++) { 00522 obs_sing = data->learning->obs->sing[eof]; 00523 for (nt=0; nt<ntime_sub[s]; nt++) { 00524 buf_learn_obs_sub[nt+eof*ntime_sub[s]] = buf_learn_obs_sub[nt+eof*ntime_sub[s]] * obs_sing / obs_first_sing; 00525 buf_learn[nt+(eof+data->learning->rea_neof)*ntime_sub[s]] = buf_learn_obs_sub[nt+eof*ntime_sub[s]]; 00526 } 00527 } 00528 } 00529 00530 /* Compute best clusters */ 00531 buf_weight = (double *) realloc(buf_weight, data->conf->season[s].nclusters * (data->learning->rea_neof + data->learning->obs_neof) * 00532 sizeof(double)); 00533 if (buf_weight == NULL) alloc_error(__FILE__, __LINE__); 00534 niter = best_clusters(buf_weight, buf_learn, data->conf->classif_type, data->conf->npartitions, 00535 data->conf->nclassifications, data->learning->rea_neof + data->learning->obs_neof, 00536 data->conf->season[s].nclusters, ntime_sub[s]); 00537 00538 /* Keep only first data->learning->rea_neof EOFs */ 00539 data->learning->data[s].weight = (double *) 00540 malloc(data->conf->season[s].nclusters*data->learning->rea_neof * sizeof(double)); 00541 if (data->learning->data[s].weight == NULL) alloc_error(__FILE__, __LINE__); 00542 for (clust=0; clust<data->conf->season[s].nclusters; clust++) 00543 for (eof=0; eof<data->learning->rea_neof; eof++) 00544 data->learning->data[s].weight[eof+clust*data->learning->rea_neof] = 00545 buf_weight[eof+clust*(data->learning->rea_neof+data->learning->obs_neof)]; 00546 00547 /* Classify each day in the current clusters */ 00548 data->learning->data[s].class_clusters = (int *) malloc(ntime_sub[s] * sizeof(int)); 00549 if (data->learning->data[s].class_clusters == NULL) alloc_error(__FILE__, __LINE__); 00550 (void) class_days_pc_clusters(data->learning->data[s].class_clusters, buf_learn, 00551 data->learning->data[s].weight, data->conf->classif_type, 00552 data->learning->rea_neof, data->conf->season[s].nclusters, 00553 ntime_sub[s]); 00554 00555 /* Set mean and variance of distances to clusters to 1.0 because we first need to compute distances and */ 00556 /* we don't have a control run in learning mode */ 00557 mean_dist = (double *) realloc(mean_dist, data->conf->season[s].nclusters * sizeof(double)); 00558 if (mean_dist == NULL) alloc_error(__FILE__, __LINE__); 00559 var_dist = (double *) realloc(var_dist, data->conf->season[s].nclusters * sizeof(double)); 00560 if (var_dist == NULL) alloc_error(__FILE__, __LINE__); 00561 for (clust=0; clust<data->conf->season[s].nclusters; clust++) { 00562 mean_dist[clust] = 1.0; 00563 var_dist[clust] = 1.0; 00564 } 00565 00566 /* Compute distances to clusters using normalization */ 00567 dist = (double *) realloc(dist, data->conf->season[s].nclusters*ntime_sub[s] * sizeof(double)); 00568 if (dist == NULL) alloc_error(__FILE__, __LINE__); 00569 (void) dist_clusters_normctrl(dist, buf_learn_pc_sub, data->learning->data[s].weight, 00570 data->learning->pc_normalized_var, data->learning->pc_normalized_var, mean_dist, var_dist, 00571 data->learning->rea_neof, data->conf->season[s].nclusters, ntime_sub[s]); 00572 /* Normalize */ 00573 for (clust=0; clust<data->conf->season[s].nclusters; clust++) { 00574 /* Calculate mean over time */ 00575 mean_dist[clust] = gsl_stats_mean(&(dist[clust*ntime_sub[s]]), 1, ntime_sub[s]); 00576 /* Calculate variance over time */ 00577 var_dist[clust] = gsl_stats_variance(&(dist[clust*ntime_sub[s]]), 1, ntime_sub[s]); 00578 /* Normalize */ 00579 for (nt=0; nt<ntime_sub[s]; nt++) 00580 dist[nt+clust*ntime_sub[s]] = ( dist[nt+clust*ntime_sub[s]] - mean_dist[clust] ) / sqrt(var_dist[clust]); 00581 } 00582 00583 /* Classify each day in the current clusters */ 00584 /* data->learning->data[s].class_clusters */ 00585 /* data->learning->data[s].class_clusters = (int *) malloc(ntime_sub[s] * sizeof(int)); 00586 if (data->learning->data[s].class_clusters == NULL) alloc_error(__FILE__, __LINE__); 00587 (void) class_days_pc_clusters(data->learning->data[s].class_clusters, buf_learn, 00588 data->learning->data[s].weight, data->conf->classif_type, 00589 data->learning->rea_neof, data->conf->season[s].nclusters, ntime_sub[s]);*/ 00590 00591 /* Allocate memory for regression */ 00592 precip_reg = (double *) malloc(data->conf->season[s].nreg * sizeof(double)); 00593 if (precip_reg == NULL) alloc_error(__FILE__, __LINE__); 00594 precip_index = (double *) malloc(ntime_sub[s] * sizeof(double)); 00595 if (precip_index == NULL) alloc_error(__FILE__, __LINE__); 00596 precip_err = (double *) malloc(ntime_sub[s] * sizeof(double)); 00597 if (precip_err == NULL) alloc_error(__FILE__, __LINE__); 00598 dist_reg = (double *) malloc(data->conf->season[s].nreg*ntime_sub[s] * sizeof(double)); 00599 if (dist_reg == NULL) alloc_error(__FILE__, __LINE__); 00600 vif = (double *) malloc(data->conf->season[s].nreg * sizeof(double)); 00601 if (vif == NULL) alloc_error(__FILE__, __LINE__); 00602 00603 /* Create variable to hold values of x vector for regression */ 00604 /* Begin with distances to clusters */ 00605 for (clust=0; clust<data->conf->season[s].nclusters; clust++) 00606 for (t=0; t<ntime_sub[s]; t++) 00607 dist_reg[t+clust*ntime_sub[s]] = dist[t+clust*ntime_sub[s]]; 00608 00609 /* For special seasons using secondary field in the regression, append values to x vector for regression */ 00610 if (data->conf->season[s].nreg == (data->conf->season[s].nclusters+1)) { 00611 clust = data->conf->season[s].nclusters; 00612 for (t=0; t<ntime_sub[s]; t++) 00613 dist_reg[t+clust*ntime_sub[s]] = data->learning->data[s].sup_index[t]; 00614 } 00615 00616 data->learning->data[s].precip_reg_cst = (double *) malloc(data->reg->npts * sizeof(double)); 00617 if (data->learning->data[s].precip_reg_cst == NULL) alloc_error(__FILE__, __LINE__); 00618 data->learning->data[s].precip_reg = (double *) malloc(data->reg->npts*data->conf->season[s].nreg * sizeof(double)); 00619 if (data->learning->data[s].precip_reg == NULL) alloc_error(__FILE__, __LINE__); 00620 data->learning->data[s].precip_reg_dist = (double *) 00621 malloc(data->conf->season[s].nclusters*ntime_sub[s] * sizeof(double)); 00622 if (data->learning->data[s].precip_reg_dist == NULL) alloc_error(__FILE__, __LINE__); 00623 data->learning->data[s].precip_index = (double *) malloc(data->reg->npts*ntime_sub[s] * sizeof(double)); 00624 if (data->learning->data[s].precip_index == NULL) alloc_error(__FILE__, __LINE__); 00625 data->learning->data[s].precip_index_obs = (double *) malloc(data->reg->npts*ntime_sub[s] * sizeof(double)); 00626 if (data->learning->data[s].precip_index_obs == NULL) alloc_error(__FILE__, __LINE__); 00627 data->learning->data[s].precip_reg_err = (double *) malloc(data->reg->npts*ntime_sub[s] * sizeof(double)); 00628 if (data->learning->data[s].precip_reg_err == NULL) alloc_error(__FILE__, __LINE__); 00629 data->learning->data[s].precip_reg_rsq = (double *) malloc(data->reg->npts * sizeof(double)); 00630 if (data->learning->data[s].precip_reg_rsq == NULL) alloc_error(__FILE__, __LINE__); 00631 data->learning->data[s].precip_reg_vif = (double *) malloc(data->conf->season[s].nreg * sizeof(double)); 00632 if (data->learning->data[s].precip_reg_vif == NULL) alloc_error(__FILE__, __LINE__); 00633 data->learning->data[s].precip_reg_autocor = (double *) malloc(data->reg->npts * sizeof(double)); 00634 if (data->learning->data[s].precip_reg_autocor == NULL) alloc_error(__FILE__, __LINE__); 00635 00636 /* Save distances */ 00637 for (t=0; t<ntime_sub[s]; t++) 00638 for (clust=0; clust<data->conf->season[s].nclusters; clust++) 00639 data->learning->data[s].precip_reg_dist[clust+t*data->conf->season[s].nclusters] = dist[t+clust*ntime_sub[s]]; 00640 00641 for (pt=0; pt<data->reg->npts; pt++) { 00642 /* Compute regression and save regression constant */ 00643 istat = regress(precip_reg, dist_reg, &(mean_precip_sub[pt*ntime_sub[s]]), &(data->learning->data[s].precip_reg_cst[pt]), 00644 precip_index, precip_err, &chisq, &rsq, vif, &autocor, data->conf->season[s].nreg, ntime_sub[s]); 00645 /* Save R^2 */ 00646 data->learning->data[s].precip_reg_rsq[pt] = rsq; 00647 /* Save residuals */ 00648 for (t=0; t<ntime_sub[s]; t++) 00649 data->learning->data[s].precip_reg_err[pt+t*data->reg->npts] = precip_err[t]; 00650 /* Save autocorrelation of residuals */ 00651 data->learning->data[s].precip_reg_autocor[pt] = autocor; 00652 /* Save Variance Inflation Factor VIF, and compute mean VIF */ 00653 if (pt == 0) { 00654 meanvif = 0.0; 00655 for (term=0; term<data->conf->season[s].nreg; term++) { 00656 data->learning->data[s].precip_reg_vif[term] = vif[term]; 00657 meanvif += vif[term]; 00658 } 00659 meanvif = meanvif / (double) data->conf->season[s].nreg; 00660 } 00661 00662 // (void) fprintf(stdout, "%s: pt=%d R^2=%lf CHI^2=%lf ACOR=%lf\n", __FILE__, pt, rsq, chisq, autocor); 00663 00664 /* Save regression coefficients */ 00665 for (clust=0; clust<data->conf->season[s].nreg; clust++) 00666 data->learning->data[s].precip_reg[pt+clust*data->reg->npts] = precip_reg[clust]; 00667 00668 /* Save precipitation index */ 00669 for (t=0; t<ntime_sub[s]; t++) 00670 data->learning->data[s].precip_index[pt+t*data->reg->npts] = precip_index[t]; 00671 00672 /* Save observed precipitation index */ 00673 for (t=0; t<ntime_sub[s]; t++) 00674 data->learning->data[s].precip_index_obs[pt+t*data->reg->npts] = mean_precip_sub[t+pt*ntime_sub[s]]; 00675 } 00676 00677 (void) fprintf(stdout, "%s: MeanVIF=%lf\n", __FILE__, meanvif); 00678 00679 (void) free(precip_reg); 00680 (void) free(precip_index); 00681 (void) free(precip_err); 00682 (void) free(dist_reg); 00683 (void) free(vif); 00684 00685 (void) free(buf_learn_rea_sub); 00686 buf_learn_rea_sub = NULL; 00687 if (data->learning->obs_neof != 0) { 00688 (void) free(buf_learn_obs_sub); 00689 buf_learn_obs_sub = NULL; 00690 } 00691 (void) free(buf_learn_pc_sub); 00692 buf_learn_pc_sub = NULL; 00693 (void) free(tas_rea_mean_sub); 00694 tas_rea_mean_sub = NULL; 00695 (void) free(tas_rea_sub); 00696 tas_rea_sub = NULL; 00697 (void) free(mean_precip_sub); 00698 mean_precip_sub = NULL; 00699 } 00700 00701 (void) free(tas_rea); 00702 (void) free(tas_rea_mean); 00703 (void) free(mean_precip); 00704 (void) free(buf_weight); 00705 (void) free(buf_learn); 00706 (void) free(buf_learn_rea); 00707 if (data->learning->obs_neof != 0) (void) free(buf_learn_obs); 00708 (void) free(buf_learn_pc); 00709 (void) free(ntime_sub); 00710 (void) free(rea_var); 00711 (void) free(mean_dist); 00712 (void) free(var_dist); 00713 (void) free(dist); 00714 00715 /* If wanted, write learning data to files for later use */ 00716 if (data->learning->learning_save == TRUE) { 00717 (void) printf("Writing learning fields.\n"); 00718 istat = write_learning_fields(data); 00719 } 00720 if (niter == 1) { 00721 (void) fprintf(stderr, "%s: ERROR: In one classification, only 1 iteration was needed! Probably an error in your EOF data or configuration. Must abort...\n", 00722 __FILE__); 00723 return -1; 00724 } 00725 } 00726 00727 /* Success status */ 00728 return 0; 00729 }