bes  Updated for version 3.20.8
build_sidecar.cc
1 /*********************************************************************
2  * build_sidecar.cc *
3  * *
4  * Created on: Mar 7, 2019 *
5  * *
6  * Purpose: Index the STARE value that corresponds to a given *
7  * iterator from an array, as well as its lat/lon value. *
8  * *
9  * Author: Kodi Neumiller, kneumiller@opendap.org *
10  * *
11  ********************************************************************/
12 
13 // This is a simple application that additionally can read data from
14 // OPeNDAP servers (Hyrax, TDS, ...). It does not use the BES framework.
15 // jhrg 6/17/20
16 
17 #include <unistd.h>
18 #include <string>
19 #include <memory>
20 #include <algorithm>
21 #include <cassert>
22 #include <chrono>
23 
24 #include <hdf5.h>
25 
26 #include <STARE.h>
27 
28 #include <D4Connect.h>
29 #include <Connect.h>
30 #include <Array.h>
31 #include <Error.h>
32 
33 using namespace std;
34 using namespace libdap;
35 
36 static bool verbose = false;
37 #define VERBOSE(x) do { if (verbose) x; } while(false)
38 static bool very_verbose = false;
39 #define VERY_VERBOSE(x) do { if (very_verbose) x; } while(false)
40 
45 struct coord {
46  unsigned long x;
47  unsigned long y;
48 
49  float64 lon;
50  float64 lat;
51 
52  // The STARE library's name for the type is STARE_ArrayIndexSpatialValue
53  uint64 s_index;
54 };
55 
67 struct coordinates {
68  vector<hsize_t> dims;
69 
70  vector<unsigned long> x;
71  vector<unsigned long> y;
72 
73  vector<float64> lon;
74  vector<float64> lat;
75 
76  STARE_ArrayIndexSpatialValues s_index;
77 
78  coordinates() {}
79 
85  coordinates(vector<float64>latitude, vector<float64>longitude) {
86  assert(latitude.size() == longitude.size());
87  set_size(latitude.size());
88 
89  copy(latitude.begin(), latitude.end(), lat.begin());
90  copy(longitude.begin(), longitude.end(), lon.begin());
91  }
92 
97  void set_size(size_t size) {
98  x.resize(size);
99  y.resize(size);
100  lon.resize(size);
101  lat.resize(size);
102  s_index.resize(size);
103  }
104 
111  hsize_t *get_dims() { return &dims[0]; }
112 
113  unsigned long *get_x() { return &x[0]; }
114  unsigned long *get_y() { return &y[0]; }
115 
116  float64 *get_lon() { return &lon[0]; }
117  float64 *get_lat() { return &lat[0]; }
118 
119  STARE_ArrayIndexSpatialValue *get_s_index() { return &s_index[0]; }
121 };
122 
130 void read_lat_lon_url(const string &data_url, const string &lat_name, const string &lon_name, coordinates *c) {
131 
132  unique_ptr<libdap::Connect> url(new libdap::Connect(data_url));
133 
134  string latlon_ce = lat_name + "," + lon_name;
135 
136  std::vector<float> lat;
137  std::vector<float> lon;
138 
139  vector<coord> indexArray;
140 
141  try {
142  libdap::BaseTypeFactory factory;
143  libdap::DataDDS dds(&factory);
144 
145  VERBOSE(cerr << "\n\n\tRequesting data from " << data_url << endl);
146 
147  //Makes sure only the Latitude and Longitude variables are requested
148  url->request_data(dds, latlon_ce);
149 
150  //Create separate libdap arrays to store the lat and lon arrays individually
151  libdap::Array *url_lat = dynamic_cast<libdap::Array *>(dds.var(lat_name));
152  libdap::Array *url_lon = dynamic_cast<libdap::Array *>(dds.var(lon_name));
153 
154  // ----Error checking---- //
155  if (url_lat == 0 || url_lon == 0) {
156  throw libdap::Error("Expected both lat and lon arrays");
157  }
158 
159  if (url_lat->dimensions() != 2) {
160  throw libdap::Error("Incorrect latitude dimensions");
161  }
162 
163  if (url_lon->dimensions() != 2) {
164  throw libdap::Error("Incorrect longitude dimensions");
165  }
166 
167  int size_y = url_lat->dimension_size(url_lat->dim_begin());
168  int size_x = url_lat->dimension_size(url_lat->dim_begin() + 1);
169 
170  if (size_y != url_lon->dimension_size(url_lon->dim_begin())
171  || size_x != url_lon->dimension_size(url_lon->dim_begin() + 1)) {
172  throw libdap::Error("The size of the latitude and longitude arrays are not the same");
173  }
174 
175  // Set the dimension sizes
176  c->dims.resize(2);
177  c->dims[0] = size_y;
178  c->dims[1] = size_x;
179 
180  // Set the sizes and transfer the values from the 'url_lat/lon' to the
181  // lat/lon vectors in 'c'
182  c->set_size(url_lat->length()); // This sets the sizes for all the vectors
183  url_lat->value(c->get_lat());
184  url_lon->value(c->get_lon());
185  }
186  catch (libdap::Error &e) {
187  cerr << "ERROR: " << e.get_error_message() << endl;
188  exit(EXIT_FAILURE);
189  }
190 
191  VERBOSE(cerr << "\tsize of lat array: " << c->lat.size() << endl);
192  VERBOSE(cerr << "\tsize of lon array: " << c->lon.size() << endl);
193 }
194 
206 vector<hsize_t> read_lat_lon(const string &filename, const string &lat_name, const string &lon_name,
207  vector<float64> &lat, vector<float64> &lon) {
208 
209  //Read the file and store the datasets
210  hid_t file = H5Fopen(filename.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
211  if (file < 0)
212  throw Error(string("Could not open the file '").append(filename).append("'"), __FILE__, __LINE__);
213 
214  hid_t latDataset = H5Dopen(file, lat_name.c_str(), H5P_DEFAULT);
215  if (latDataset < 0)
216  throw Error(string("Could not open the HDF5 dataset '").append(lat_name).append("'"), __FILE__,
217  __LINE__);
218 
219  hid_t lonDataset = H5Dopen(file, lon_name.c_str(), H5P_DEFAULT);
220  if (lonDataset < 0)
221  throw Error(string("Could not open the HDF5 dataset '").append(lon_name).append("'"), __FILE__,
222  __LINE__);
223 
224  //Get the number of dimensions
225  //Should be 2, but this is future proofing just in case,
226  //that way I don't have to go back and figure it all out again kln 10/17/19
227  hid_t dspace = H5Dget_space(latDataset);
228  if (dspace < 0)
229  throw Error(string("Could not open the HDF5 data space for '").append(lat_name).append("'"),
230  __FILE__, __LINE__);
231 
232  const int ndims = H5Sget_simple_extent_ndims(dspace);
233  if (ndims != 2)
234  throw Error(string("The latitude variable '").append(lat_name).append("' should be a 2D array"),
235  __FILE__, __LINE__);
236 
237  vector<hsize_t> dims(ndims);
238 
239  //Get the size of the dimensions so that we know how big to make the memory space
240  //dims holds the size of the ndims dimensions.
241  H5Sget_simple_extent_dims(dspace, &dims[0], NULL);
242 
243  //We need to get the filespace and memspace before reading the values from each dataset
244  hid_t latFilespace = H5Dget_space(latDataset);
245  if (latFilespace < 0)
246  throw Error(string("Could not open the HDF5 file space for '").append(lat_name).append("'"),
247  __FILE__, __LINE__);
248 
249  hid_t lonFilespace = H5Dget_space(lonDataset);
250  if (lonFilespace < 0)
251  throw Error(string("Could not open the HDF5 file space for '").append(lon_name).append("'"),
252  __FILE__, __LINE__);
253 
254  //The filespace will tell us what the size of the vectors need to be for reading in the
255  // data from the h5 file. We could also use memspace, but they should be the same size.
256  hssize_t latSize = H5Sget_select_npoints(latFilespace);
257  VERBOSE(cerr << "\n\tlat dataspace size: " << latSize);
258  hssize_t lonSize = H5Sget_select_npoints(lonFilespace);
259  VERBOSE(cerr << "\n\tlon dataspace size: " << lonSize << endl);
260 
261  if (latSize != lonSize)
262  throw Error(
263  string("The size of the Latitude and Longitude arrays must be equal in '").append(filename).append("'"),
264  __FILE__, __LINE__);
265 
266  lat.resize(latSize);
267  lon.resize(lonSize);
268 
269  hid_t memspace = H5Screate_simple(ndims, &dims[0], NULL);
270  if (memspace < 0)
271  throw Error(
272  string("Could not make an HDF5 memory space while working with '").append(filename).append("'"),
273  __FILE__, __LINE__);
274 
275  //Read the data file and store the values of each dataset into an array
276  // was H5T_NATIVE_FLOAT. jhrg 4/17/20
277  herr_t status = H5Dread(latDataset, H5T_NATIVE_DOUBLE, memspace, latFilespace, H5P_DEFAULT, &lat[0]);
278  if (status < 0)
279  throw Error(string("Could not read data for '").append(lat_name).append("'"), __FILE__, __LINE__);
280 
281 
282  status = H5Dread(lonDataset, H5T_NATIVE_DOUBLE, memspace, lonFilespace, H5P_DEFAULT, &lon[0]);
283  if (status < 0)
284  throw Error(string("Could not read data for '").append(lon_name).append("'"), __FILE__, __LINE__);
285 
286  VERBOSE(cerr << "\tsize of lat array: " << lat.size() << endl);
287  VERBOSE(cerr << "\tsize of lon array: " << lon.size() << endl);
288 
289  return dims;
290 }
291 
299 void read_lat_lon(const string &filename, const string &lat_name, const string &lon_name, coordinates *c) {
300 
301  //Read the file and store the datasets
302  hid_t file = H5Fopen(filename.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
303  if (file < 0)
304  throw Error(string("Could not open the file '").append(filename).append("'"), __FILE__, __LINE__);
305 
306  hid_t latDataset = H5Dopen(file, lat_name.c_str(), H5P_DEFAULT);
307  if (latDataset < 0)
308  throw Error(string("Could not open the HDF5 dataset '").append(lat_name).append("'"), __FILE__,
309  __LINE__);
310 
311  hid_t lonDataset = H5Dopen(file, lon_name.c_str(), H5P_DEFAULT);
312  if (lonDataset < 0)
313  throw Error(string("Could not open the HDF5 dataset '").append(lon_name).append("'"), __FILE__,
314  __LINE__);
315 
316  //Get the number of dimensions
317  //Should be 2, but this is future proofing just in case,
318  //that way I don't have to go back and figure it all out again kln 10/17/19
319  hid_t dspace = H5Dget_space(latDataset);
320  if (dspace < 0)
321  throw Error(string("Could not open the HDF5 data space for '").append(lat_name).append("'"),
322  __FILE__, __LINE__);
323 
324  const int ndims = H5Sget_simple_extent_ndims(dspace);
325  if (ndims != 2)
326  throw Error(string("The latitude variable '").append(lat_name).append("' should be a 2D array"),
327  __FILE__, __LINE__);
328 
329  c->dims.resize(ndims);
330 
331  //Get the size of the dimensions so that we know how big to make the memory space
332  //dims holds the size of the ndims dimensions.
333  H5Sget_simple_extent_dims(dspace, c->get_dims(), NULL);
334 
335  //We need to get the filespace and memspace before reading the values from each dataset
336  hid_t latFilespace = H5Dget_space(latDataset);
337  if (latFilespace < 0)
338  throw Error(string("Could not open the HDF5 file space for '").append(lat_name).append("'"),
339  __FILE__, __LINE__);
340 
341  hid_t lonFilespace = H5Dget_space(lonDataset);
342  if (lonFilespace < 0)
343  throw Error(string("Could not open the HDF5 file space for '").append(lon_name).append("'"),
344  __FILE__, __LINE__);
345 
346  //The filespace will tell us what the size of the vectors need to be for reading in the
347  // data from the h5 file. We could also use memspace, but they should be the same size.
348  hssize_t latSize = H5Sget_select_npoints(latFilespace);
349  VERBOSE(cerr << "\n\tlat dataspace size: " << latSize);
350  hssize_t lonSize = H5Sget_select_npoints(lonFilespace);
351  VERBOSE(cerr << "\n\tlon dataspace size: " << lonSize << endl);
352 
353  if (latSize != lonSize)
354  throw Error(
355  string("The size of the Latitude and Longitude arrays must be equal in '").append(filename).append("'"),
356  __FILE__, __LINE__);
357 
358  c->set_size(latSize);
359 
360  hid_t memspace = H5Screate_simple(ndims, c->get_dims(), NULL);
361  if (memspace < 0)
362  throw Error(
363  string("Could not make an HDF5 memory space while working with '").append(filename).append("'"),
364  __FILE__, __LINE__);
365 
366  //Read the data file and store the values of each dataset into an array
367  // was H5T_NATIVE_FLOAT. jhrg 4/17/20
368  herr_t status = H5Dread(latDataset, H5T_NATIVE_DOUBLE, memspace, latFilespace, H5P_DEFAULT, c->get_lat());
369  if (status < 0)
370  throw Error(string("Could not read data for '").append(lat_name).append("'"), __FILE__, __LINE__);
371 
372 
373  status = H5Dread(lonDataset, H5T_NATIVE_DOUBLE, memspace, lonFilespace, H5P_DEFAULT, c->get_lon());
374  if (status < 0)
375  throw Error(string("Could not read data for '").append(lon_name).append("'"), __FILE__, __LINE__);
376 
377  VERBOSE(cerr << "\tsize of lat array: " << c->lat.size() << endl);
378  VERBOSE(cerr << "\tsize of lon array: " << c->lon.size() << endl);
379 }
380 
390 unique_ptr< vector<coord> > build_coords(STARE &stare, const vector<hsize_t> &dims,
391  const vector<float64> &latitude, const vector<float64> &longitude) {
392 
393  unique_ptr< vector<coord> > coords(new vector<coord>(latitude.size()));
394 
395  auto lat = latitude.begin();
396  auto lon = longitude.begin();
397  //Assume data are stored in row-major order; dims[0] is the row, dims[1] is the column
398  unsigned long long n = 0;
399  for (unsigned long r = 0; r < dims[0]; ++r) {
400  for (unsigned long c = 0; c < dims[1]; ++c) {
401  (*coords)[n].x = c;
402  (*coords)[n].y = r;
403 
404  (*coords)[n].lat = *lat;
405  (*coords)[n].lon = *lon;
406 
407  (*coords)[n].s_index = stare.ValueFromLatLonDegrees(*lat, *lon);
408 
409  VERY_VERBOSE(cerr << "Coord: " << *lat << ", " << *lon << " -> " << hex << (*coords)[n].s_index << dec << endl);
410 
411  ++n;
412  ++lat;
413  ++lon;
414  }
415  }
416 
417  return coords;
418 }
419 
429 unique_ptr<coordinates>
430 build_coordinates(STARE &stare, const vector<hsize_t> &dims,
431  const vector<float64> &latitude, const vector<float64> &longitude) {
432 
433  // This ctor sets the size of the vectors in the 'coordinates' struct.
434  unique_ptr<coordinates> c(new coordinates(latitude, longitude));
435 
436  //Assume data are stored in row-major order; dims[0] is the row, dims[1] is the column
437  unsigned long n = 0;
438  for (unsigned long row = 0; row < dims[0]; ++row) {
439  for (unsigned long col = 0; col < dims[1]; ++col) {
440  c->x[n] = col;
441  c->y[n] = row;
442 
443  c->s_index[n] = stare.ValueFromLatLonDegrees(c->lat[n], c->lon[n]);
444 
445  VERY_VERBOSE(cerr << "Coord: " << c->lat[n] << ", " << c->lon[n] << " -> " << hex << c->s_index[n] << dec << endl);
446 
447  ++n;
448  }
449  }
450 
451  return c;
452 }
453 
460 void
461 compute_coordinates(STARE &stare, coordinates *c) {
462 
463  //Assume data are stored in row-major order; dims[0] is the row, dims[1] is the column
464  unsigned long n = 0;
465  for (unsigned long row = 0; row < c->dims[0]; ++row) {
466  for (unsigned long col = 0; col < c->dims[1]; ++col) {
467  c->x[n] = col;
468  c->y[n] = row;
469 
470  c->s_index[n] = stare.ValueFromLatLonDegrees(c->lat[n], c->lon[n]);
471 
472  VERY_VERBOSE(cerr << "Coord: " << c->lat[n] << ", " << c->lon[n] << " -> " << hex << c->s_index[n] << dec << endl);
473 
474  ++n;
475  }
476  }
477 }
478 
486 STARE_ArrayIndexSpatialValue
487 set_s_index_resolution(STARE &stare, STARE_ArrayIndexSpatialValue target, STARE_ArrayIndexSpatialValue adjacent) {
488  static EmbeddedLevelNameEncoding lj; // Use this to get the mask
489  int lvl = stare.cmpSpatialResolutionEstimateI(target, adjacent);
490  return (target & ~lj.levelMaskSciDB) | lvl;
491 }
492 
512 void
513 compute_coordinates_resolution(STARE &stare, coordinates *c) {
514  EmbeddedLevelNameEncoding lj; // Use this to get the mask
515  //Assume data are stored in row-major order; dims[0] is the row, dims[1] is the column
516  unsigned long n = 0;
517  unsigned long max_row = c->dims[0];
518  for (unsigned long row = 0; row < max_row; ++row) {
519  // This code uses the pixel to the left and/or right of the target.
520  unsigned long max_col = c->dims[1];
521  unsigned long mid_col = max_col / 2;
522  for (unsigned long col = 0; col < mid_col; ++col) {
523  // Compute resolution using the point to the right (n + 1)
524  c->s_index[n] = set_s_index_resolution(stare, c->s_index[n], c->s_index[n+1]);
525  ++n;
526  }
527  for (unsigned long col = mid_col; col < max_col; ++col) {
528  // Compute resolution using the point to the left (n - 1)
529  c->s_index[n] = set_s_index_resolution(stare, c->s_index[n], c->s_index[n-1]);
530  ++n;
531  }
532 
533  }
534 }
535 
545 void writeHDF5(const string &filename, string tmpStorage, vector<coord> *coords) {
546  //Allows us to use hdf5 1.10 generated files with hdf5 1.8
547  // !---Must be removed if a feature from 1.10 is required to use---!
548  // -kln 5/16/19
549  hid_t fapl = H5Pcreate(H5P_FILE_ACCESS);
550 #ifdef H5F_LIBVER_V18
551  H5Pset_libver_bounds (fapl, H5F_LIBVER_EARLIEST, H5F_LIBVER_V18);
552 #else
553  H5Pset_libver_bounds(fapl, H5F_LIBVER_EARLIEST, H5F_LIBVER_LATEST);
554 #endif
555 
556  //H5Fcreate returns a file id that will be saved in variable "file"
557  hid_t file = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
558 
559  //The rank is used to determine the dimensions for the dataspace
560  // Since we only use a one dimension array we can always assume the Rank should be 1
561  hsize_t coords_size = coords->size();
562  hid_t dataspace = H5Screate_simple(1 /*RANK*/, &coords_size, NULL);
563 
564 #if 0
565  //Taken out because CF doesn't support compound types,
566  // so each variable will need to be stored in its own array
567  // -kln 5/17/19
568 
569  /*
570  * Create the memory datatype.
571  * Because the "coords" struct has ints and floats we need to make sure we put
572  * the correct offset onto memory for each data type
573  */
574  VERBOSE(cerr << "\nCreating datatypes: x, y, lat, lon, stareIndex -> ");
575 
576  dataTypes = H5Tcreate (H5T_COMPOUND, sizeof(coords));
577  H5Tinsert(dataTypes, "x", HOFFSET(coords, x), H5T_NATIVE_INT);
578  H5Tinsert(dataTypes, "y", HOFFSET(coords, y), H5T_NATIVE_INT);
579  H5Tinsert(dataTypes, "lat", HOFFSET(coords, lat), H5T_NATIVE_FLOAT);
580  H5Tinsert(dataTypes, "lon", HOFFSET(coords, lon), H5T_NATIVE_FLOAT);
581  H5Tinsert(dataTypes, "stareIndex", HOFFSET(coords, stareIndex), H5T_NATIVE_INT);
582 
583  /*
584  * Create the dataset
585  */
586  const char *datasetName = "StareData";
587  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
588  dataset = H5Dcreate2(file, datasetName, dataTypes, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
589 
590  VERBOSE(cerr << "Writing data to dataset" << endl);
591  H5Dwrite(dataset, dataTypes, H5S_ALL, H5S_ALL, H5P_DEFAULT, &keyVals[0]);
592 #endif
593 
594  /*
595  * Create the datasets
596  */
597  const char *datasetName = "X";
598  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
599  hid_t datasetX = H5Dcreate2(file, datasetName, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
600 
601  datasetName = "Y";
602  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
603  hid_t datasetY = H5Dcreate2(file, datasetName, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
604 
605  datasetName = "Latitude";
606  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
607  // was H5T_NATIVE_FLOAT. jhrg 4/17/20
608  hid_t datasetLat = H5Dcreate2(file, datasetName, H5T_NATIVE_DOUBLE, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
609 
610  datasetName = "Longitude";
611  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
612  hid_t datasetLon = H5Dcreate2(file, datasetName, H5T_NATIVE_DOUBLE, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
613 
614  datasetName = "Stare Index";
615  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
616  hid_t datasetStare = H5Dcreate2(file, datasetName, H5T_NATIVE_INT64, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
617 
618  /*
619  * Write the data to the datasets
620  */
621  //Need to store each value in its own array
622  vector<unsigned long> xArray;
623  vector<unsigned long> yArray;
624  vector<float64> latArray;
625  vector<float64> lonArray;
626  vector<uint64> s_indices;
627 
628  for (vector<coord>::iterator i = coords->begin(), e = coords->end(); i != e; ++i) {
629  xArray.push_back(i->x);
630  yArray.push_back(i->y);
631  latArray.push_back(i->lat);
632  lonArray.push_back(i->lon);
633  s_indices.push_back(i->s_index);
634  }
635 
636  VERBOSE(cerr << "Writing data to dataset" << endl);
637  H5Dwrite(datasetX, H5T_NATIVE_ULONG, H5S_ALL, H5S_ALL, H5P_DEFAULT, &xArray[0]);
638  H5Dwrite(datasetY, H5T_NATIVE_ULONG, H5S_ALL, H5S_ALL, H5P_DEFAULT, &yArray[0]);
639  H5Dwrite(datasetLat, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &latArray[0]);
640  H5Dwrite(datasetLon, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &lonArray[0]);
641  H5Dwrite(datasetStare, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &s_indices[0]);
642 
643  /*
644  * Close/release resources.
645  */
646  H5Sclose(dataspace);
647  H5Fclose(file);
648 
649  VERBOSE(cerr << "\nData written to file: " << filename << endl);
650 
651  //Store the sidecar files in /tmp/ (or the provided directory) so that it can easily be found for the
652  // server functions.
653  if (tmpStorage.empty())
654  tmpStorage = "/tmp/" + filename;
655  else
656  tmpStorage = tmpStorage + filename;
657 
658  rename(filename.c_str(), tmpStorage.c_str());
659  VERBOSE(cerr << "Data moved to: " << tmpStorage << endl);
660 }
661 
671 void writeHDF5(const string &filename, string tmp_storage, coordinates *c) {
672  //Allows us to use hdf5 1.10 generated files with hdf5 1.8
673  // !---Must be removed if a feature from 1.10 is required to use---!
674  // -kln 5/16/19
675  hid_t fapl = H5Pcreate(H5P_FILE_ACCESS);
676 #ifdef H5F_LIBVER_V18
677  H5Pset_libver_bounds (fapl, H5F_LIBVER_EARLIEST, H5F_LIBVER_V18);
678 #else
679  H5Pset_libver_bounds(fapl, H5F_LIBVER_EARLIEST, H5F_LIBVER_LATEST);
680 #endif
681 
682  //H5Fcreate returns a file id that will be saved in variable "file"
683  hid_t file = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
684 
685  //The rank is used to determine the dimensions for the dataspace
686  // Since we only use a one dimension array we can always assume the Rank should be 1
687  hsize_t coords_size = c->x.size();
688  hid_t dataspace = H5Screate_simple(1 /*RANK*/, &coords_size, NULL);
689 
690 #if 0
691  //Taken out because CF doesn't support compound types,
692  // so each variable will need to be stored in its own array
693  // -kln 5/17/19
694 
695  /*
696  * Create the memory datatype.
697  * Because the "coords" struct has ints and floats we need to make sure we put
698  * the correct offset onto memory for each data type
699  */
700  VERBOSE(cerr << "\nCreating datatypes: x, y, lat, lon, stareIndex -> ");
701 
702  dataTypes = H5Tcreate (H5T_COMPOUND, sizeof(coords));
703  H5Tinsert(dataTypes, "x", HOFFSET(coords, x), H5T_NATIVE_INT);
704  H5Tinsert(dataTypes, "y", HOFFSET(coords, y), H5T_NATIVE_INT);
705  H5Tinsert(dataTypes, "lat", HOFFSET(coords, lat), H5T_NATIVE_FLOAT);
706  H5Tinsert(dataTypes, "lon", HOFFSET(coords, lon), H5T_NATIVE_FLOAT);
707  H5Tinsert(dataTypes, "stareIndex", HOFFSET(coords, stareIndex), H5T_NATIVE_INT);
708 
709  /*
710  * Create the dataset
711  */
712  const char *datasetName = "StareData";
713  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
714  dataset = H5Dcreate2(file, datasetName, dataTypes, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
715 
716  VERBOSE(cerr << "Writing data to dataset" << endl);
717  H5Dwrite(dataset, dataTypes, H5S_ALL, H5S_ALL, H5P_DEFAULT, &keyVals[0]);
718 #endif
719 
720  /*
721  * Create the datasets
722  */
723  const char *datasetName = "X";
724  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
725  hid_t datasetX = H5Dcreate2(file, datasetName, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
726 
727  datasetName = "Y";
728  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
729  hid_t datasetY = H5Dcreate2(file, datasetName, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
730 
731  datasetName = "Latitude";
732  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
733  // was H5T_NATIVE_FLOAT. jhrg 4/17/20
734  hid_t datasetLat = H5Dcreate2(file, datasetName, H5T_NATIVE_DOUBLE, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
735 
736  datasetName = "Longitude";
737  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
738  hid_t datasetLon = H5Dcreate2(file, datasetName, H5T_NATIVE_DOUBLE, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
739 
740  datasetName = "Stare_Index";
741  VERBOSE(cerr << "Creating dataset: " << datasetName << " -> ");
742  hid_t datasetStare = H5Dcreate2(file, datasetName, H5T_NATIVE_INT64, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
743 
744  /*
745  * Write the data to the datasets
746  */
747 
748  VERBOSE(cerr << "Writing data to dataset" << endl);
749  H5Dwrite(datasetX, H5T_NATIVE_ULONG, H5S_ALL, H5S_ALL, H5P_DEFAULT, &(c->x[0]));
750  H5Dwrite(datasetY, H5T_NATIVE_ULONG, H5S_ALL, H5S_ALL, H5P_DEFAULT, &(c->y[0]));
751  H5Dwrite(datasetLat, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &(c->lat[0]));
752  H5Dwrite(datasetLon, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, &(c->lon[0]));
753  H5Dwrite(datasetStare, H5T_NATIVE_INT64, H5S_ALL, H5S_ALL, H5P_DEFAULT, &(c->s_index[0]));
754 
755  /*
756  * Close/release resources.
757  */
758  H5Sclose(dataspace);
759  H5Fclose(file);
760 
761  VERBOSE(cerr << "\nData written to file: " << filename << endl);
762 
763  //Store the sidecar files in /tmp/ (or the provided directory) so that it can easily be found for the
764  // server functions.
765  if (tmp_storage.empty())
766  tmp_storage = "/tmp/" + filename;
767  else
768  tmp_storage = tmp_storage + filename;
769 
770  rename(filename.c_str(), tmp_storage.c_str());
771  VERBOSE(cerr << "Data moved to: " << tmp_storage << endl);
772 }
773 
774 static void usage() {
775  cerr << "build_sidecar [options] <filename> <latitude-name> <longitude-name>" << endl;
776  cerr << "-o output file: \tOutput the STARE data to the given output file" << endl;
777  cerr << "-v|V verbose/very verbose" << endl;
778  cerr << "-t transfer location: \tTransfer the generated sidecar file to the given directory" << endl;
779  cerr << "-b STARE Build Level: \tHigher levels -> longer initialization time. (default is 5)" << endl;
780  cerr << "-s STARE default Level: \tHigher levels -> finer resolution. (default is 27)" << endl;
781  cerr << "-a Algotithm: \t1, 2 or 3 (default is 3)" << endl;
782  cerr << "-r Include resolution information in the indices. Works for algorithm 2 and 3 only" << endl;
783 }
784 
785 static string
786 get_sidecar_filename(const string &dataUrl, const string &suffix = "_sidecar.h5") {
787  // Assume the granule is called .../path/file.ext where both the
788  // slashes and dots might actually not be there. Assume also that
789  // the data are in an HDF5 file. jhrg 1/15/20
790 
791  // Locate the granule name inside the provided url.
792  // Once the granule is found, add ".h5" to the granule name
793  // Rename the new H5 file to be <granulename.h5>
794  size_t granulePos = dataUrl.find_last_of('/');
795  string granuleName = dataUrl.substr(granulePos + 1);
796  size_t findDot = granuleName.find_last_of('.');
797  return granuleName.substr(0, findDot).append(suffix);
798 }
799 
800 static bool
801 is_url(const string &name) {
802  return (name.find("https://") != string::npos
803  || name.find("http://") != string::npos);
804 
805 }
806 
818 int main(int argc, char *argv[]) {
819  int c;
820  extern char *optarg;
821  extern int optind;
822 
823  string newName = "";
824  string tmpStorage = "./"; // Default is the CWD.
825  string extension = "_stare.h5";
826  float build_level = 5.0; // The default build level, fast start time, longer index lookup.
827  float level = 27.0;
828  int alg = 3;
829  bool compute_resolution = false;
830 
831  while ((c = getopt(argc, argv, "hvVro:t:b:s:a:e:")) != -1) {
832  switch (c) {
833  case 'o':
834  newName = optarg;
835  break;
836  case 'v':
837  verbose = true;
838  break;
839  case 'V':
840  verbose = true;
841  very_verbose = true;
842  break;
843  case 't':
844  tmpStorage = optarg;
845  break;
846  case 'b':
847  build_level = atof(optarg);
848  break;
849  case 's':
850  level = atof(optarg);
851  break;
852  case 'a':
853  alg = atoi(optarg);
854  break;
855  case 'r':
856  compute_resolution = true;
857  break;
858  case 'e':
859  extension = optarg;
860  break;
861  case 'h':
862  default:
863  usage();
864  exit(EXIT_SUCCESS);
865  break;
866  }
867  }
868 
869  // This resets argc and argv once the optional arguments have been processed. jhrg 12/5/19
870  argc -= optind;
871  argv += optind;
872 
873  if (argc != 3) {
874  cerr << "Expected 3 required arguments" << endl;
875  usage();
876  exit(EXIT_FAILURE);
877  }
878 
879  //Required argument values
880  string dataset = argv[0];
881  string lat_name = argv[1];
882  string lon_name = argv[2];
883 
884  if (newName.empty())
885  newName = get_sidecar_filename(dataset, extension);
886 
887  try {
888  STARE stare(level, build_level);
889  using namespace std::chrono;
890  auto start = high_resolution_clock::now();
891 
892  // Algorithms 1 and 2 are deprecated
893  switch (alg) {
894  case 1: {
895  vector<float64> lat;
896  vector<float64> lon;
897 
898  vector<hsize_t> dims = read_lat_lon(dataset, lat_name, lon_name, lat, lon);
899  unique_ptr<vector<coord> > coords = build_coords(stare, dims, lat, lon);
900 
901  if (compute_resolution)
902  VERBOSE(cerr << "STARE index resolution is not available for algorithm one.");
903 
904  writeHDF5(newName, tmpStorage, coords.get());
905  break;
906  }
907 
908  case 2: {
909  vector<float64> lat;
910  vector<float64> lon;
911 
912  vector<hsize_t> dims = read_lat_lon(dataset, lat_name, lon_name, lat, lon);
913  unique_ptr<coordinates> c = build_coordinates(stare, dims, lat, lon);
914 
915  if (compute_resolution)
916  compute_coordinates_resolution(stare, c.get());
917 
918  writeHDF5(newName, tmpStorage, c.get());
919  break;
920  }
921 
922  // This case handles reading from URLs in addition to HDF5 files.
923  default:
924  case 3: {
925  unique_ptr<coordinates> c(new coordinates());
926  if (is_url(dataset))
927  read_lat_lon_url(dataset, lat_name, lon_name, c.get());
928  else
929  read_lat_lon(dataset, lat_name, lon_name, c.get());
930 
931  compute_coordinates(stare, c.get());
932 
933  if (compute_resolution)
934  compute_coordinates_resolution(stare, c.get());
935 
936  writeHDF5(newName, tmpStorage, c.get());
937  break;
938  }
939  }
940 
941  auto total = duration_cast<milliseconds>(high_resolution_clock::now()-start).count();
942  VERBOSE(cerr << "Time for the algorithm " << alg << ": " << total << "ms." << endl);
943  }
944  catch (libdap::Error &e) {
945  cerr << "Error: " << e.get_error_message() << endl;
946  exit(EXIT_FAILURE);
947  }
948  catch (exception &e) {
949  cerr << "C++ Error: " << e.what() << endl;
950  exit(EXIT_FAILURE);
951  }
952 
953  exit(EXIT_SUCCESS);
954 }