bes Updated for version 3.20.10
heos5cfdap.cc
Go to the documentation of this file.
1// This file is part of hdf5_handler: an HDF5 file handler for the OPeNDAP
2// data server.
3
4// Copyright (c) 2011-2016 The HDF Group, Inc. and OPeNDAP, Inc.
5//
6// This is free software; you can redistribute it and/or modify it under the
7// terms of the GNU Lesser General Public License as published by the Free
8// Software Foundation; either version 2.1 of the License, or (at your
9// option) any later version.
10//
11// This software is distributed in the hope that it will be useful, but
12// WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13// or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
14// License for more details.
15//
16// You should have received a copy of the GNU Lesser General Public
17// License along with this library; if not, write to the Free Software
18// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19//
20// You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
21// You can contact The HDF Group, Inc. at 1800 South Oak Street,
22// Suite 203, Champaign, IL 61820
23
32#include "config_hdf5.h"
33
34#include <sys/types.h>
35#include <sys/stat.h>
36#include <fcntl.h>
37#include <unistd.h>
38#include <iostream>
39#include <sstream>
40
41#include <BESLog.h>
42#include <BESDebug.h>
43
44#include <libdap/parser.h>
45#include "heos5cfdap.h"
46#include "h5cfdaputil.h"
47#include "HDF5CFByte.h"
48#include "HDF5CFInt8.h"
49#include "HDF5CFUInt16.h"
50#include "HDF5CFInt16.h"
51#include "HDF5CFUInt32.h"
52#include "HDF5CFInt32.h"
53#include "HDF5CFUInt64.h"
54#include "HDF5CFInt64.h"
55#include "HDF5CFFloat32.h"
56#include "HDF5CFFloat64.h"
57#include "HDF5CFStr.h"
58#include "HDF5CFArray.h"
62#include "HDF5CFGeoCF1D.h"
63#include "HDF5CFGeoCFProj.h"
64#include "HDF5RequestHandler.h"
65#include "h5apicompatible.h"
66
67#include "he5dds.tab.hh"
68#include "HE5Parser.h"
69#include "HE5Checker.h"
70#include "he5das.tab.hh"
71
72struct yy_buffer_state;
73
74yy_buffer_state *he5dds_scan_string(const char *str);
75int he5ddsparse(HE5Parser *he5parser);
76int he5dasparse(libdap::parser_arg *arg);
77int he5ddslex_destroy();
78int he5daslex_destroy();
79
81yy_buffer_state *he5das_scan_string(const char *str);
82
83using namespace HDF5CF;
84
85// Map EOS5 to DAP DDS
86void map_eos5_cfdds(DDS &dds, hid_t file_id, const string & filename) {
87
88 BESDEBUG("h5","Coming to HDF-EOS5 products DDS mapping function map_eos5_cfdds "<<endl);
89
90
91 string st_str ="";
92 string core_str="";
93 string arch_str="";
94 string xml_str ="";
95 string subset_str="";
96 string product_str="";
97 string other_str ="";
98 bool st_only = true;
99
100 // Read ECS metadata: merge them into one C++ string
101 read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
102 if(""==st_str) {
103 string msg =
104 "unable to obtain the HDF-EOS5 struct metadata ";
105 throw InternalErr(__FILE__, __LINE__, msg);
106 }
107
108 bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
109
110 EOS5File *f = NULL;
111
112 try {
113 f = new EOS5File(filename.c_str(),file_id);
114 }
115 catch(...) {
116 throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
117 }
118
119 bool include_attr = false;
120
121 // This first "try-catch" block will use the parsed info
122 try {
123
124 // Parse the structmetadata
125 HE5Parser p;
126 HE5Checker c;
127 he5dds_scan_string(st_str.c_str());
128 he5ddsparse(&p);
129 he5ddslex_destroy();
130
131 // Retrieve ProjParams from StructMetadata
132 p.add_projparams(st_str);
133#if 0
134 //p.print();
135#endif
136
137 // Check if the HDF-EOS5 grid has the valid parameters, projection codes.
138 if (c.check_grids_unknown_parameters(&p)) {
139 throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
140 }
141
142 if (c.check_grids_missing_projcode(&p)) {
143 throw InternalErr("The HDF-EOS5 is missing project code ");
144 }
145
146 // We gradually add the support of different projection codes
147 if (c.check_grids_support_projcode(&p)) {
148 throw InternalErr("The current project code is not supported");
149 }
150
151 // HDF-EOS5 provides default pixel and origin values if they are not defined.
152 c.set_grids_missing_pixreg_orig(&p);
153
154 // Check if this multi-grid file shares the same grid.
155 bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
156
157 // Retrieve all HDF5 info(Not the values)
158 f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
159
160 // Adjust EOS5 Dimension names/sizes based on the parsed results
161 f->Adjust_EOS5Dim_Info(&p);
162
163 // Translate the parsed output to HDF-EOS5 grids/swaths/zonal.
164 // Several maps related to dimension and coordiantes are set up here.
165 f->Add_EOS5File_Info(&p, grids_mllcv);
166
167 // Add the dimension names
168 f->Add_Dim_Name(&p);
169 }
170 catch (HDF5CF::Exception &e){
171 if(f!=NULL)
172 delete f;
173 throw InternalErr(e.what());
174 }
175 catch(...) {
176 if(f!=NULL)
177 delete f;
178 throw;
179 }
180
181 // The parsed struct will no longer be in this "try-catch" block.
182 try {
183
184 // NASA Aura files need special handlings. So first check if this file is an Aura file.
186
187 // Adjust the variable name
189
190 // Handle coordinate variables
191 f->Handle_CVar();
192
193 // Adjust variable and dimension names again based on the handling of coordinate variables.
195
196
197 // We need to use the CV units to distinguish lat/lon from th 3rd CV when
198 // memory cache is turned on.
199 if((HDF5RequestHandler::get_lrdata_mem_cache() != NULL) ||
200 (HDF5RequestHandler::get_srdata_mem_cache() != NULL)){
201
202 // Handle unsupported datatypes including the attributes
204
205 // Handle unsupported dataspaces including the attributes
207
208 // We need to retrieve coordinate variable attributes for memory cache use.
210
211 }
212 else {
213
214 // Handle unsupported datatypes
215 f->Handle_Unsupported_Dtype(include_attr);
216
217 // Handle unsupported dataspaces
218 f->Handle_Unsupported_Dspace(include_attr);
219
220 }
221
222
223 // Need to retrieve the units of CV when memory cache is turned on.
224 // The units of CV will be used to distinguish whether this CV is
225 // latitude/longitude or a third-dimension CV.
226 // isLatLon() will use the units value.
227 if((HDF5RequestHandler::get_lrdata_mem_cache() != NULL) ||
228 (HDF5RequestHandler::get_srdata_mem_cache() != NULL))
229 f->Adjust_Attr_Info();
230
231 // May need to adjust the object names for special objects. Currently no operations
232 // are done in this routine.
233 f->Adjust_Obj_Name();
234
235 // Flatten the object name
236 f->Flatten_Obj_Name(include_attr);
237
238 // Handle name clashing
239 if(true == is_check_nameclashing)
240 f->Handle_Obj_NameClashing(include_attr);
241
242 // Check if this should follow COARDS, yes, set the COARDS flag.
244
245 // For COARDS, the dimension name needs to be changed.
246 f->Adjust_Dim_Name();
247 if(true == is_check_nameclashing)
249
250 // We need to turn off the very long string in the TES file to avoid
251 // the choking of netCDF Java tools. So this special variable routine
252 // is listed at last. We may need to turn off this if netCDF can handle
253 // long string better.
254 f->Handle_SpVar();
255 }
256 catch (HDF5CF::Exception &e){
257 if(f != NULL)
258 delete f;
259 throw InternalErr(e.what());
260 }
261
262 // Generate EOS5 DDS
263 try {
264 gen_eos5_cfdds(dds,f);
265 }
266 catch(...) {
267 if (f!=NULL)
268 delete f;
269 throw;
270 }
271
272 if (f!=NULL)
273 delete f;
274}
275
276// Map EOS5 to DAP DAS
277void map_eos5_cfdas(DAS &das, hid_t file_id, const string &filename) {
278
279 BESDEBUG("h5","Coming to HDF-EOS5 products DAS mapping function map_eos5_cfdas "<<endl);
280 string st_str ="";
281 string core_str="";
282 string arch_str="";
283 string xml_str ="";
284 string subset_str="";
285 string product_str="";
286 string other_str ="";
287 bool st_only = true;
288
289 read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
290 if(""==st_str) {
291 string msg =
292 "unable to obtain the HDF-EOS5 struct metadata ";
293 throw InternalErr(__FILE__, __LINE__, msg);
294 }
295
296 bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
297
298 bool is_add_path_attrs = HDF5RequestHandler::get_add_path_attrs();
299
300 EOS5File *f = NULL;
301 try {
302 f = new EOS5File(filename.c_str(),file_id);
303 }
304 catch(...) {
305 throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
306 }
307 bool include_attr = true;
308
309 // The first "try-catch" block will use the parsed info.
310 try {
311
312 HE5Parser p;
313 HE5Checker c;
314 he5dds_scan_string(st_str.c_str());
315
316 he5ddsparse(&p);
317 he5ddslex_destroy();
318 p.add_projparams(st_str);
319#if 0
320 //p.print();
321 // cerr<<"main loop p.za_list.size() = "<<p.za_list.size() <<endl;
322#endif
323
324 if (c.check_grids_unknown_parameters(&p)) {
325 throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
326 }
327
328 if (c.check_grids_missing_projcode(&p)) {
329 throw InternalErr("The HDF-EOS5 is missing project code ");
330 }
331 if (c.check_grids_support_projcode(&p)) {
332 throw InternalErr("The current project code is not supported");
333 }
334 c.set_grids_missing_pixreg_orig(&p);
335
336 bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
337
338 f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
339 f->Adjust_EOS5Dim_Info(&p);
340 f->Add_EOS5File_Info(&p, grids_mllcv);
341 f->Add_Dim_Name(&p);
342 }
343 catch (HDF5CF::Exception &e){
344 if(f != NULL)
345 delete f;
346 throw InternalErr(e.what());
347 }
348 catch(...) {
349 if(f != NULL)
350 delete f;
351 throw;
352 }
353
354 try {
357 f->Handle_CVar();
359 f->Handle_Unsupported_Dtype(include_attr);
360
361 // Remove unsupported dataspace
362 f->Handle_Unsupported_Dspace(include_attr);
363
364 // Need to retrieve the attribute values.
366
367
368 // Handle other unsupported objects,
369 // currently it mainly generates the info. for the
370 // unsupported objects other than datatype, dataspace,links and named datatype
371 // This function needs to be called after retrieving supported attributes.
372 f->Handle_Unsupported_Others(include_attr);
373
374 // Add/adjust CF attributes
375 f->Adjust_Attr_Info();
376 f->Adjust_Obj_Name();
377 f->Flatten_Obj_Name(include_attr);
378 if (true == is_check_nameclashing)
379 f->Handle_Obj_NameClashing(include_attr);
381
382#if 0
383 //f->Adjust_Dim_Name();
384 //if(true == is_check_nameclashing)
385 // f->Handle_DimNameClashing();
386#endif
387
388 // Add supplemental attributes
389 f->Add_Supplement_Attrs(is_add_path_attrs);
390
391 // Handle coordinate attributes
392 f->Handle_Coor_Attr();
394 }
395 catch (HDF5CF::Exception &e){
396 if(f != NULL)
397 delete f;
398 throw InternalErr(e.what());
399 }
400
401 // Generate DAS for the EOS5
402 try {
403 gen_eos5_cfdas(das,file_id,f);
404 }
405 catch(...) {
406 if (f != NULL)
407 delete f;
408 throw;
409 }
410
411 if( f != NULL)
412 delete f;
413
414}
415
416// Generate DDS for the EOS5
417void gen_eos5_cfdds(DDS &dds, HDF5CF::EOS5File *f) {
418
419 BESDEBUG("h5","Coming to HDF-EOS5 products DDS generation function gen_eos5_cfdds "<<endl);
420 const vector<HDF5CF::Var *>& vars = f->getVars();
421 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
422 const string filename = f->getPath();
423 const hid_t file_id = f->getFileID();
424
425 // Read Variable info.
426 vector<HDF5CF::Var *>::const_iterator it_v;
427 vector<HDF5CF::EOS5CVar *>::const_iterator it_cv;
428
429 for (it_v = vars.begin(); it_v !=vars.end();++it_v) {
430 BESDEBUG("h5","variable full path= "<< (*it_v)->getFullPath() <<endl);
431 gen_dap_onevar_dds(dds,*it_v,file_id,filename);
432 }
433
434 for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
435 BESDEBUG("h5","variable full path= "<< (*it_cv)->getFullPath() <<endl);
436 gen_dap_oneeos5cvar_dds(dds,*it_cv,file_id,filename);
437
438 }
439
440 // We need to provide grid_mapping info. for multiple grids.
441 // Here cv_lat_miss_index represents the missing latitude(HDF-EOS grid without the latitude field) cv index
442 // This index is used to create the grid_mapping variable for different grids.
443 unsigned short cv_lat_miss_index = 1;
444 for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
445 if((*it_cv)->getCVType() == CV_LAT_MISS) {
446 if((*it_cv)->getProjCode() != HE5_GCTP_GEO) {
447 // Here we need to add grid_mapping variables for each grid
448 // For projections other than sinusoidal since attribute values for LAMAZ and PS
449 // are different for each grid.
450 gen_dap_oneeos5cf_dds(dds,*it_cv);
451 add_cf_grid_mapinfo_var(dds,(*it_cv)->getProjCode(),cv_lat_miss_index);
452 cv_lat_miss_index++;
453 }
454 }
455 }
456}
457
458void gen_dap_oneeos5cf_dds(DDS &dds,const HDF5CF::EOS5CVar* cvar) {
459
460 BESDEBUG("h5","Coming to gen_dap_oneeos5cf_dds() "<<endl);
461
462 float cv_point_lower = cvar->getPointLower();
463 float cv_point_upper = cvar->getPointUpper();
464 float cv_point_left = cvar->getPointLeft();
465 float cv_point_right = cvar->getPointRight();
466 EOS5GridPCType cv_proj_code = cvar->getProjCode();
467 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
468 if(dims.size() !=2)
469 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
470 add_cf_grid_cvs(dds,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims);
471
472}
473
474void gen_dap_oneeos5cf_das(DAS &das,const vector<HDF5CF::Var*>& vars, const HDF5CF::EOS5CVar* cvar,const unsigned short g_suffix) {
475
476 BESDEBUG("h5","Coming to gen_dap_oneeos5cf_das() "<<endl);
477#if 0
478 float cv_point_lower = cvar->getPointLower();
479 float cv_point_upper = cvar->getPointUpper();
480 float cv_point_left = cvar->getPointLeft();
481 float cv_point_right = cvar->getPointRight();
482#endif
483 EOS5GridPCType cv_proj_code = cvar->getProjCode();
484 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
485
486#if 0
487cerr<<"cv_point_lower is "<<cv_point_lower <<endl;
488cerr<<"cvar name is "<<cvar->getName() <<endl;
489for(vector<HDF5CF::Dimension*>::const_iterator it_d = dims.begin(); it_d != dims.end(); ++it_d)
490 cerr<<"dim name das is "<<(*it_d)->getNewName() <<endl;
491#endif
492
493 if(dims.size() !=2)
494 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
495#if 0
496 add_cf_grid_cv_attrs(das,vars,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims,cvar->getParams(),g_suffix);
497#endif
498 add_cf_grid_cv_attrs(das,vars,cv_proj_code,dims,cvar->getParams(),g_suffix);
499
500}
501
502//For EOS5, generate the ignored object info. for the CF option
503void gen_eos5_cf_ignored_obj_info(DAS &das, HDF5CF::EOS5File *f) {
504
505 BESDEBUG("h5","Coming to gen_eos5_cf_ignored_obj_info() "<<endl);
506 AttrTable *at = das.get_table("Ignored_Object_Info");
507 if (NULL == at)
508 at = das.add_table("Ignored_Object_Info", new AttrTable);
509
510 at->append_attr("Message","String",f->Get_Ignored_Msg());
511
512
513}
514
515// Generate DDS for EOS5 coordinate variables
516void gen_dap_oneeos5cvar_dds(DDS &dds,const HDF5CF::EOS5CVar* cvar, const hid_t file_id, const string & filename) {
517
518 BESDEBUG("h5","Coming to gen_dap_oneeos5cvar_dds() "<<endl);
519 BaseType *bt = NULL;
520
521 // TODO: need to handle 64-bit integer for DAP4 CF
522 if(cvar->getType()==H5INT64 || cvar->getType() == H5UINT64)
523 return;
524 switch(cvar->getType()) {
525#define HANDLE_CASE(tid,type) \
526 case tid: \
527 bt = new (type)(cvar->getNewName(),cvar->getFullPath()); \
528 break;
529
530 HANDLE_CASE(H5FLOAT32, HDF5CFFloat32);
531 HANDLE_CASE(H5FLOAT64, HDF5CFFloat64);
532 HANDLE_CASE(H5CHAR,HDF5CFInt16);
533 HANDLE_CASE(H5UCHAR, HDF5CFByte);
534 HANDLE_CASE(H5INT16, HDF5CFInt16);
535 HANDLE_CASE(H5UINT16, HDF5CFUInt16);
536 HANDLE_CASE(H5INT32, HDF5CFInt32);
537 HANDLE_CASE(H5UINT32, HDF5CFUInt32);
538 HANDLE_CASE(H5FSTRING, Str);
539 HANDLE_CASE(H5VSTRING, Str);
540 default:
541 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
542#undef HANDLE_CASE
543 }
544
545 if (bt) {
546
547 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
548 vector <HDF5CF::Dimension*>:: const_iterator it_d;
549 vector <size_t> dimsizes;
550 dimsizes.resize(cvar->getRank());
551 for(int i = 0; i <cvar->getRank();i++)
552 dimsizes[i] = (dims[i])->getSize();
553
554
555 if(dims.empty())
556 throw InternalErr(__FILE__,__LINE__,"the coordinate variables cannot be scalar.");
557 switch(cvar->getCVType()) {
558
559 case CV_EXIST:
560 {
561
562#if 0
563for(vector<HDF5CF::Attribute *>::const_iterator it_ra = cvar->getAttributes().begin();
564 it_ra != cvar->getAttributes().end(); ++it_ra) {
565cerr<<"cvar attribute name is "<<(*it_ra)->getNewName() <<endl;
566cerr<<"cvar attribute value type is "<<(*it_ra)->getType() <<endl;
567}
568cerr<<"cvar new name exist at he s5cfdap.cc is "<<cvar->getNewName() <<endl;
569#endif
570 bool is_latlon = cvar->isLatLon();
571 HDF5CFArray *ar = NULL;
572 try {
573 ar = new HDF5CFArray (
574 cvar->getRank(),
575 file_id,
576 filename,
577 cvar->getType(),
578 dimsizes,
579 cvar->getFullPath(),
580 cvar->getTotalElems(),
581 CV_EXIST,
582 is_latlon,
583 cvar->getCompRatio(),
584 false,
585 cvar->getNewName(),
586 bt);
587 }
588 catch (...) {
589 delete bt;
590 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDF5CFArray.");
591 }
592
593 for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
594 if (""==(*it_d)->getNewName())
595 ar->append_dim((*it_d)->getSize());
596 else
597 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
598 }
599
600 dds.add_var(ar);
601 delete bt;
602 delete ar;
603 }
604 break;
605
606 case CV_LAT_MISS:
607 case CV_LON_MISS:
608 {
609
610 HDFEOS5CFMissLLArray *ar = NULL;
611 try {
612#if 0
613cerr<<"cvar zone here is "<<cvar->getZone() <<endl;
614cerr<<"cvar Sphere here is "<<cvar->getSphere() <<endl;
615cerr<<"cvar getParams here 1 is "<<cvar->getParams()[0]<<endl;
616#endif
617 ar = new HDFEOS5CFMissLLArray (
618 cvar->getRank(),
619 filename,
620 file_id,
621 cvar->getFullPath(),
622 cvar->getCVType(),
623 cvar->getPointLower(),
624 cvar->getPointUpper(),
625 cvar->getPointLeft(),
626 cvar->getPointRight(),
627 cvar->getPixelReg(),
628 cvar->getOrigin(),
629 cvar->getProjCode(),
630 cvar->getParams(),
631 cvar->getZone(),
632 cvar->getSphere(),
633 cvar->getXDimSize(),
634 cvar->getYDimSize(),
635 cvar->getNewName(),
636 bt);
637 }
638 catch (...) {
639 delete bt;
640 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDFEOS5CFMissLLArray.");
641 }
642
643 for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
644 if (""==(*it_d)->getNewName())
645 ar->append_dim((*it_d)->getSize());
646 else
647 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
648 }
649
650 dds.add_var(ar);
651 delete bt;
652 delete ar;
653 }
654 break;
655
656 case CV_NONLATLON_MISS:
657 {
658
659 if (cvar->getRank() !=1) {
660 delete bt;
661 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
662 }
663 int nelem = (cvar->getDimensions()[0])->getSize();
664
665 HDFEOS5CFMissNonLLCVArray *ar = NULL;
666 try {
668 cvar->getRank(),
669 nelem,
670 cvar->getNewName(),
671 bt);
672 }
673 catch (...) {
674 delete bt;
675 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDFEOS5CFMissNonLLCVArray.");
676 }
677
678
679 for(it_d = dims.begin(); it_d != dims.end(); it_d++) {
680 if (""==(*it_d)->getNewName())
681 ar->append_dim((*it_d)->getSize());
682 else
683 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
684 }
685 dds.add_var(ar);
686 delete bt;
687 delete ar;
688
689
690 }
691 break;
692 case CV_SPECIAL:
693 // Currently only support Aura TES files. May need to revise when having more
694 // special products KY 2012-2-3
695 {
696
697 if (cvar->getRank() !=1) {
698 delete bt;
699 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
700 }
701 int nelem = (cvar->getDimensions()[0])->getSize();
702 HDFEOS5CFSpecialCVArray *ar = NULL;
703
704 try {
706 cvar->getRank(),
707 filename,
708 file_id,
709 cvar->getType(),
710 nelem,
711 cvar->getFullPath(),
712 cvar->getNewName(),
713 bt);
714 }
715 catch (...) {
716 delete bt;
717 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDF5CFArray.");
718 }
719
720
721 for(it_d = dims.begin(); it_d != dims.end(); ++it_d){
722 if (""==(*it_d)->getNewName())
723 ar->append_dim((*it_d)->getSize());
724 else
725 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
726 }
727 dds.add_var(ar);
728 delete bt;
729 delete ar;
730 }
731 break;
732 case CV_MODIFY:
733 default:
734 delete bt;
735 throw InternalErr(__FILE__,__LINE__,"Unsupported coordinate variable type.");
736 }
737
738 }
739
740}
741
742// Generate EOS5 DAS
743void gen_eos5_cfdas(DAS &das, hid_t file_id, HDF5CF::EOS5File *f) {
744
745 BESDEBUG("h5","Coming to HDF-EOS5 products DAS generation function gen_eos5_cfdas "<<endl);
746
747 // First check if this is for generating the ignored object info.
748 if(true == f->Get_IgnoredInfo_Flag()) {
749 gen_eos5_cf_ignored_obj_info(das, f);
750 return;
751 }
752
753 const vector<HDF5CF::Var *>& vars = f->getVars();
754 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
755 const vector<HDF5CF::Group *>& grps = f->getGroups();
756 const vector<HDF5CF::Attribute *>& root_attrs = f->getAttributes();
757
758 vector<HDF5CF::Var *>::const_iterator it_v;
759 vector<HDF5CF::EOS5CVar *>::const_iterator it_cv;
760 vector<HDF5CF::Group *>::const_iterator it_g;
761 vector<HDF5CF::Attribute *>::const_iterator it_ra;
762
763 // Handling the file attributes(attributes under the root group)
764 // The table name is "HDF_GLOBAL".
765 if (false == root_attrs.empty()) {
766 AttrTable *at = das.get_table(FILE_ATTR_TABLE_NAME);
767 if (NULL == at)
768 at = das.add_table(FILE_ATTR_TABLE_NAME, new AttrTable);
769
770 for (it_ra = root_attrs.begin(); it_ra != root_attrs.end(); it_ra++) {
771 gen_dap_oneobj_das(at,*it_ra,NULL);
772 }
773 }
774
775 if (false == grps.empty()) {
776 for (it_g = grps.begin();
777 it_g != grps.end(); ++it_g) {
778 AttrTable *at = das.get_table((*it_g)->getNewName());
779 if (NULL == at)
780 at = das.add_table((*it_g)->getNewName(), new AttrTable);
781
782 for (it_ra = (*it_g)->getAttributes().begin();
783 it_ra != (*it_g)->getAttributes().end(); ++it_ra) {
784 //gen_dap_oneobj_das(at,*it_ra,NULL);
785 // TODO: ADDING a BES KEY
786 if((*it_ra)->getNewName()=="Conventions" &&((*it_g)->getNewName() == "HDFEOS_ADDITIONAL_FILE_ATTRIBUTES")
787 && (true==HDF5RequestHandler::get_eos5_rm_convention_attr_path())) {
788 AttrTable *at_das = das.get_table(FILE_ATTR_TABLE_NAME);
789 if (NULL == at_das)
790 at_das = das.add_table(FILE_ATTR_TABLE_NAME, new AttrTable);
791 gen_dap_oneobj_das(at_das,*it_ra,NULL);
792 }
793 else
794 gen_dap_oneobj_das(at,*it_ra,NULL);
795 }
796 }
797 }
798
799 for (it_v = vars.begin();
800 it_v != vars.end(); ++it_v) {
801 if (false == ((*it_v)->getAttributes().empty())) {
802
803 // TODO: Need to handle 64-bit int support for DAP4 CF.
804 if(H5INT64 == (*it_v)->getType() || H5UINT64 == (*it_v)->getType()){
805 continue;
806 }
807
808 AttrTable *at = das.get_table((*it_v)->getNewName());
809 if (NULL == at)
810 at = das.add_table((*it_v)->getNewName(), new AttrTable);
811
812 for (it_ra = (*it_v)->getAttributes().begin();
813 it_ra != (*it_v)->getAttributes().end(); ++it_ra) {
814 gen_dap_oneobj_das(at,*it_ra,*it_v);
815 }
816 }
817 }
818
819 for (it_cv = cvars.begin(); it_cv !=cvars.end();it_cv++) {
820
821 if (false == ((*it_cv)->getAttributes().empty())) {
822
823 // TODO: Need to handle 64-bit int support for DAP4 CF.
824 if(H5INT64 == (*it_cv)->getType() || H5UINT64 == (*it_cv)->getType()){
825 continue;
826 }
827
828 AttrTable *at = das.get_table((*it_cv)->getNewName());
829 if (NULL == at)
830 at = das.add_table((*it_cv)->getNewName(), new AttrTable);
831
832 for (it_ra = (*it_cv)->getAttributes().begin();
833 it_ra != (*it_cv)->getAttributes().end(); ++it_ra) {
834 gen_dap_oneobj_das(at,*it_ra,*it_cv);
835 }
836 }
837 }
838
839 // Add CF 1-D projection variables
840 unsigned short cv_lat_miss_index = 1;
841 // This code block will add grid_mapping attribute info. to corresponding variables.
842 for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
843 if((*it_cv)->getCVType() == CV_LAT_MISS) {
844 if((*it_cv)->getProjCode() != HE5_GCTP_GEO) {
845 gen_dap_oneeos5cf_das(das,vars,*it_cv,cv_lat_miss_index);
846 cv_lat_miss_index++;
847 }
848 }
849 }
850
851 for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
852 if((*it_cv)->getProjCode() == HE5_GCTP_LAMAZ) {
853 if((*it_cv)->getCVType() == CV_LAT_MISS || (*it_cv)->getCVType() == CV_LON_MISS) {
854 AttrTable *at = das.get_table((*it_cv)->getNewName());
855 if (NULL == at)
856 at = das.add_table((*it_cv)->getNewName(), new AttrTable);
857 if((*it_cv)->getCVType() == CV_LAT_MISS)
858 add_ll_valid_range(at,true);
859 else
860 add_ll_valid_range(at,false);
861 }
862 }
863 }
864
865
866 bool disable_ecsmetadata = HDF5RequestHandler::get_disable_ecsmeta();
867
868 if(disable_ecsmetadata == false) {
869
870 // To keep the backward compatiablity with the old handler,
871 // we parse the special ECS metadata to DAP attributes
872
873 string st_str ="";
874 string core_str="";
875 string arch_str="";
876 string xml_str ="";
877 string subset_str="";
878 string product_str="";
879 string other_str ="";
880 bool st_only = false;
881
882 read_ecs_metadata(file_id, st_str, core_str, arch_str, xml_str,
883 subset_str, product_str, other_str, st_only);
884
885#if 0
886if(st_str!="") "h5","Final structmetadata "<<st_str <<endl;
887if(core_str!="") "h5","Final coremetadata "<<core_str <<endl;
888if(arch_str!="") "h5","Final archivedmetadata "<<arch_str <<endl;
889if(xml_str!="") "h5","Final xmlmetadata "<<xml_str <<endl;
890if(subset_str!="") "h5","Final subsetmetadata "<<subset_str <<endl;
891if(product_str!="") "h5","Final productmetadata "<<product_str <<endl;
892if(other_str!="") "h5","Final othermetadata "<<other_str <<endl;
893
894#endif
895 if(st_str != ""){
896
897#if 0
898 string check_disable_smetadata_key ="H5.DisableStructMetaAttr";
899 bool is_check_disable_smetadata = false;
900#endif
901 bool is_check_disable_smetadata = HDF5RequestHandler::get_disable_structmeta();
902
903 if (false == is_check_disable_smetadata) {
904
905 AttrTable *at = das.get_table("StructMetadata");
906 if (NULL == at)
907 at = das.add_table("StructMetadata", new AttrTable);
908 parser_arg arg(at);
909
910 he5das_scan_string((const char*) st_str.c_str());
911 if (he5dasparse(&arg) != 0
912 || false == arg.status()){
913
914 ERROR_LOG("HDF-EOS5 parse error while processing a "
915 << "StructMetadata " << " HDFEOS attribute" << endl);
916 }
917
918 he5daslex_destroy();
919
920 }
921 }
922
923 if(core_str != ""){
924 AttrTable *at = das.get_table("CoreMetadata");
925 if (NULL == at)
926 at = das.add_table("CoreMetadata", new AttrTable);
927 parser_arg arg(at);
928 he5das_scan_string((const char*) core_str.c_str());
929 if (he5dasparse(&arg) != 0
930 || false == arg.status()){
931
932 ERROR_LOG("HDF-EOS5 parse error while processing a "
933 << "CoreMetadata " << " HDFEOS attribute" << endl);
934 }
935
936 he5daslex_destroy();
937 }
938 if(arch_str != ""){
939 AttrTable *at = das.get_table("ArchiveMetadata");
940 if (NULL == at)
941 at = das.add_table("ArchiveMetadata", new AttrTable);
942 parser_arg arg(at);
943 he5das_scan_string((const char*) arch_str.c_str());
944 if (he5dasparse(&arg) != 0
945 || false == arg.status()){
946
947 ERROR_LOG("HDF-EOS5 parse error while processing a "
948 << "ArchiveMetadata " << " HDFEOS attribute" << endl);
949 }
950 he5daslex_destroy();
951 }
952
953 // XML attribute includes double quote("), this will choke netCDF Java library.
954 // So we replace double_quote(") with &quote.This is currently the OPeNDAP way.
955 // XML attribute cannot parsed. So just pass the string.
956 if(xml_str != ""){
957 AttrTable *at = das.get_table("XMLMetadata");
958 if (NULL == at)
959 at = das.add_table("XMLMetadata", new AttrTable);
960 HDF5CFDAPUtil::replace_double_quote(xml_str);
961 at->append_attr("Contents","String",xml_str);
962 }
963
964 // SubsetMetadata and ProductMetadata exist in HDF-EOS2 files.
965 // So far we haven't found any metadata in NASA HDF-EOS5 files,
966 // but will keep an eye on it. KY 2012-3-6
967 if(subset_str != ""){
968 AttrTable *at = das.get_table("SubsetMetadata");
969 if (NULL == at)
970 at = das.add_table("SubsetMetadata", new AttrTable);
971 parser_arg arg(at);
972 he5das_scan_string((const char*) subset_str.c_str());
973 if (he5dasparse(&arg) != 0
974 || false == arg.status()) {
975
976 ERROR_LOG("HDF-EOS5 parse error while processing a "
977 << "SubsetMetadata " << " HDFEOS attribute" << endl);
978 }
979 he5daslex_destroy();
980 }
981 if(product_str != ""){
982 AttrTable *at = das.get_table("ProductMetadata");
983 if (NULL == at)
984 at = das.add_table("ProductMetadata", new AttrTable);
985 parser_arg arg(at);
986 he5das_scan_string((const char*) product_str.c_str());
987 if (he5dasparse(&arg) != 0
988 || false == arg.status()){
989 ERROR_LOG("HDF-EOS5 parse error while processing a "
990 << "ProductMetadata " << " HDFEOS attribute" << endl);
991 }
992 he5daslex_destroy();
993 }
994
995 // All other metadata under "HDF-EOS Information" will not be
996 // parsed since we don't know how to parse them.
997 // We will simply pass a string to the DAS.
998 if (other_str != ""){
999 AttrTable *at = das.get_table("OtherMetadata");
1000 if (NULL == at)
1001 at = das.add_table("OtherMetadata", new AttrTable);
1002 at->append_attr("Contents","String",other_str);
1003 }
1004
1005 }
1006 // CHECK ALL UNLIMITED DIMENSIONS from the coordinate variables based on the names.
1007 if(f->HaveUnlimitedDim() == true) {
1008
1009 AttrTable *at = das.get_table("DODS_EXTRA");
1010 if (NULL == at)
1011 at = das.add_table("DODS_EXTRA", new AttrTable);
1012 string unlimited_names;
1013
1014 for (it_cv = cvars.begin();
1015 it_cv != cvars.end(); it_cv++) {
1016#if 0
1017 bool has_unlimited_dim = false;
1018#endif
1019 // Check unlimited dimension names.
1020 for (vector<Dimension*>::const_iterator ird = (*it_cv)->getDimensions().begin();
1021 ird != (*it_cv)->getDimensions().end(); ++ird) {
1022
1023 // Currently we only check one unlimited dimension, which is the most
1024 // common case. When receiving the conventions from JG, will add
1025 // the support of multi-unlimited dimension. KY 2016-02-09
1026 if((*ird)->HaveUnlimitedDim() == true) {
1027
1028 if(unlimited_names=="") {
1029 unlimited_names = (*ird)->getNewName();
1030 at->append_attr("Unlimited_Dimension","String",unlimited_names);
1031 }
1032 else {
1033 if(unlimited_names.rfind((*ird)->getNewName()) == string::npos) {
1034 unlimited_names = unlimited_names+" "+(*ird)->getNewName();
1035 at->append_attr("Unlimited_Dimension","String",(*ird)->getNewName());
1036 }
1037 }
1038 }
1039
1040 }
1041
1042#if 0
1043 //if(true == has_unlimited_dim)
1044 // break;
1045#endif
1046 }
1047#if 0
1048 //if(unlimited_names!="")
1049 // at->append_attr("Unlimited_Dimension","String",unlimited_names);
1050#endif
1051 }
1052
1053}
1054
1055// Read ECS metadata
1056void read_ecs_metadata(hid_t s_file_id,
1057 string &total_strmeta_value,
1058 string &total_coremeta_value,
1059 string &total_archmeta_value,
1060 string &total_xmlmeta_value,
1061 string &total_submeta_value,
1062 string &total_prometa_value,
1063 string &total_othermeta_value,
1064 bool s_st_only) {
1065
1066 BESDEBUG("h5","Coming to read_ecs_metadata() "<<endl);
1067 string ecs_group = "/HDFEOS INFORMATION";
1068 hid_t ecs_grp_id = -1;
1069 if ((ecs_grp_id = H5Gopen(s_file_id, ecs_group.c_str(),H5P_DEFAULT))<0) {
1070 string msg =
1071 "h5_ecs_meta: unable to open the HDF5 group ";
1072 msg +=ecs_group;
1073 throw InternalErr(__FILE__, __LINE__, msg);
1074 }
1075
1076 H5G_info_t g_info;
1077 hsize_t nelems = 0;
1078
1079 if (H5Gget_info(ecs_grp_id,&g_info) <0) {
1080 string msg =
1081 "h5_ecs_meta: unable to obtain the HDF5 group info. for ";
1082 msg +=ecs_group;
1083 H5Gclose(ecs_grp_id);
1084 throw InternalErr(__FILE__, __LINE__, msg);
1085 }
1086
1087 nelems = g_info.nlinks;
1088
1089 ssize_t oname_size = 0;
1090#if 0
1091 int cur_archmeta_suffix = 0;
1092 int cur_coremeta_suffix = 0;
1093 int cur_strmeta_suffix = 0;
1094 int cur_xmlmeta_suffix = 0;
1095#endif
1096
1097 int archmeta_num = -1;
1098 int coremeta_num = -1;
1099 int xmlmeta_num = -1;
1100 int strmeta_num = -1;
1101 int submeta_num = -1;
1102 int prometa_num = -1;
1103
1104 // Initalize the total number for different metadata.
1105 int archmeta_num_total = 0;
1106 int coremeta_num_total = 0;
1107 int xmlmeta_num_total = 0;
1108 int strmeta_num_total = 0;
1109 int submeta_num_total = 0;
1110 int prometa_num_total = 0;
1111 int othermeta_num_total = 0;
1112
1113 bool archmeta_no_suffix = true;
1114 bool coremeta_no_suffix = true;
1115 bool strmeta_no_suffix = true;
1116 bool xmlmeta_no_suffix = true;
1117 bool submeta_no_suffix = true;
1118 bool prometa_no_suffix = true;
1119
1120 // Define a vector of string to hold all dataset names.
1121 vector<string> s_oname(nelems);
1122
1123 // Define an EOSMetadata array that can describe the metadata type for each object
1124 // We initialize the value to OtherMeta.
1125 EOS5Metadata metatype[nelems];
1126
1127 for (unsigned int i =0; i<nelems; i++)
1128 metatype[i] = OtherMeta;
1129
1130 for (hsize_t i = 0; i < nelems; i++) {
1131
1132 // Query the length of the object name.
1133 oname_size =
1134 H5Lget_name_by_idx(ecs_grp_id,".",H5_INDEX_NAME,H5_ITER_NATIVE,i,NULL,
1135 0, H5P_DEFAULT);
1136 if (oname_size <= 0) {
1137 string msg = "hdf5 object name error from: ";
1138 msg += ecs_group;
1139 H5Gclose(ecs_grp_id);
1140 throw InternalErr(__FILE__, __LINE__, msg);
1141 }
1142
1143 // Obtain the name of the object.
1144 vector<char> oname(oname_size + 1);
1145 if (H5Lget_name_by_idx(ecs_grp_id,".",H5_INDEX_NAME,H5_ITER_NATIVE,i,&oname[0],
1146 (size_t)(oname_size+1), H5P_DEFAULT)<0){
1147 string msg = "hdf5 object name error from: ";
1148 msg += ecs_group;
1149 H5Gclose(ecs_grp_id);
1150 throw InternalErr(__FILE__, __LINE__, msg);
1151 }
1152
1153 // Check if this object is an HDF5 dataset, not, throw an error.
1154 // First, check if it is the hard link or the soft link
1155 H5L_info_t linfo;
1156 if (H5Lget_info(ecs_grp_id,&oname[0],&linfo,H5P_DEFAULT)<0) {
1157 string msg = "hdf5 link name error from: ";
1158 msg += ecs_group;
1159 H5Gclose(ecs_grp_id);
1160 throw InternalErr(__FILE__, __LINE__, msg);
1161 }
1162
1163 // This is the soft link.
1164 if (linfo.type == H5L_TYPE_SOFT){
1165 string msg = "hdf5 link name error from: ";
1166 msg += ecs_group;
1167 H5Gclose(ecs_grp_id);
1168 throw InternalErr(__FILE__, __LINE__, msg);
1169 }
1170
1171 // Obtain the object type
1172 H5O_info_t oinfo;
1173 if (H5OGET_INFO_BY_IDX(ecs_grp_id, ".", H5_INDEX_NAME, H5_ITER_NATIVE,
1174 i, &oinfo, H5P_DEFAULT)<0) {
1175 string msg = "Cannot obtain the object info ";
1176 msg += ecs_group;
1177 H5Gclose(ecs_grp_id);
1178 throw InternalErr(__FILE__, __LINE__, msg);
1179 }
1180
1181 if(oinfo.type != H5O_TYPE_DATASET) {
1182 string msg = "hdf5 link name error from: ";
1183 msg += ecs_group;
1184 H5Gclose(ecs_grp_id);
1185 throw InternalErr(__FILE__, __LINE__, msg);
1186 }
1187
1188 // We want to remove the last '\0' character added by C .
1189 string s_one_oname(oname.begin(),oname.end()-1);
1190 s_oname[i] = s_one_oname;
1191
1192 // Calculate how many elements we have for each category(StructMetadata, CoreMetadata, etc.)
1193 if (((s_one_oname.find("StructMetadata"))==0) ||
1194 ((s_one_oname.find("structmetadata"))==0)){
1195
1196 metatype[i] = StructMeta;
1197
1198 // Do we have suffix for the metadata?
1199 // If this metadata doesn't have any suffix, it should only come to this loop once.
1200 // That's why, when checking the first time, no_suffix is always true.
1201 // If we have already found that it doesn't have any suffix,
1202 // it should not go into this loop. throw an error.
1203 if (false == strmeta_no_suffix) {
1204 string msg = "StructMetadata/structmetadata without suffix should only appear once. ";
1205 H5Gclose(ecs_grp_id);
1206 throw InternalErr(__FILE__, __LINE__, msg);
1207 }
1208
1209 else if(strmeta_num_total >0)
1210 strmeta_num_total++;
1211 else { // either no suffix or the first time to loop the one having the suffix.
1212 if ((0 == s_one_oname.compare("StructMetadata"))||
1213 (0 == s_one_oname.compare("structmetadata")))
1214 strmeta_no_suffix = false;
1215 else strmeta_num_total++;
1216 }
1217#if 0
1218"h5","strmeta_num_total= "<<strmeta_num_total <<endl;
1219if(strmeta_no_suffix) "h5","structmeta data has the suffix" <<endl;
1220else "h5","structmeta data doesn't have the suffix" <<endl;
1221#endif
1222 }
1223
1224 if(false == s_st_only) {
1225
1226 if ((0 == (s_one_oname.find("CoreMetadata"))) ||
1227 (0 == (s_one_oname.find("coremetadata")))){
1228
1229 metatype[i] = CoreMeta;
1230
1231 // Do we have suffix for the metadata?
1232 // When checking the first time, no_suffix is always true.
1233 // If we have already found that it doesn't have any suffix,
1234 // it should not go into this loop anyway. throw an error.
1235 if (false == coremeta_no_suffix) {
1236 string msg = "CoreMetadata/coremetadata without suffix should only appear once. ";
1237 H5Gclose(ecs_grp_id);
1238 throw InternalErr(__FILE__, __LINE__, msg);
1239 }
1240
1241 else if(coremeta_num_total >0)
1242 coremeta_num_total++;
1243 else { // either no suffix or the first time to loop the one having the suffix.
1244 // If no suffix is true, it should be out of the loop. In case it comes
1245 // to the loop again, we set "coremeta_no_suffix" be false so an error
1246 // can be thrown. This is counter-intutitive. Hopefully people can understand it.
1247 if ((0 == s_one_oname.compare("CoreMetadata")) ||
1248 (0 == s_one_oname.compare("coremetadata")))
1249 coremeta_no_suffix = false;
1250 else coremeta_num_total++;
1251 }
1252#if 0
1253"h5","coremeta_num_total= "<<coremeta_num_total <<endl;
1254if(coremeta_no_suffix) "h5","coreuctmeta data has the suffix" <<endl;
1255else "h5","coremeta data doesn't have the suffix" <<endl;
1256#endif
1257 }
1258
1259 // OMI has the metadata name as "ArchiveMetadata.0"
1260 else if ((0 == (s_one_oname.find("ArchivedMetadata"))) ||
1261 (0 == (s_one_oname.find("archivedmetadata"))) ||
1262 (0 == (s_one_oname.find("ArchiveMetadata"))) ||
1263 (0 == (s_one_oname.find("archivemetadata")))){
1264
1265 metatype[i] = ArchivedMeta;
1266 // Do we have suffix for the metadata?
1267 // When checking the first time, no_suffix is always true.
1268 // If we have already found that it doesn't have any suffix,
1269 // it should not go into this loop anyway. throw an error.
1270 if (false == archmeta_no_suffix) {
1271 string msg = "archivedmetadata/ArchivedMetadata without suffix should only appear once. ";
1272 H5Gclose(ecs_grp_id);
1273 throw InternalErr(__FILE__, __LINE__, msg);
1274 }
1275
1276 else if(archmeta_num_total >0)
1277 archmeta_num_total++;
1278 else { // either no suffix or the first time to loop the one having the suffix.
1279 if ((0 == s_one_oname.compare("ArchivedMetadata"))||
1280 (0 == s_one_oname.compare("archivedmetadata")) ||
1281 (0 == s_one_oname.compare("archivemetadata")) ||
1282 (0 == s_one_oname.compare("ArchiveMetadata")))
1283 archmeta_no_suffix = false;
1284 else
1285 archmeta_num_total++;
1286 }
1287#if 0
1288"h5","archmeta_num_total= "<<archmeta_num_total <<endl;
1289if(archmeta_no_suffix) "h5","archuctmeta data has the suffix" <<endl;
1290else "h5","archmeta data doesn't have the suffix" <<endl;
1291#endif
1292
1293 }
1294
1295 else if (((s_one_oname.find("SubsetMetadata"))==0) ||
1296 ((s_one_oname.find("subsetmetadata"))==0)){
1297
1298 metatype[i] = SubsetMeta;
1299 // Do we have suffix for the metadata?
1300 // When checking the first time, no_suffix is always true.
1301 // If we have already found that it doesn't have any suffix,
1302 // it should not go into this loop anyway. throw an error.
1303 if (false == submeta_no_suffix) {
1304 H5Gclose(ecs_grp_id);
1305 string msg = "submetadata/SubMetadata without suffix should only appear once. ";
1306 throw InternalErr(__FILE__, __LINE__, msg);
1307 }
1308
1309 else if(submeta_num_total >0)
1310 submeta_num_total++;
1311 else { // either no suffix or the first time to loop the one having the suffix.
1312 if ((0 == s_one_oname.compare("SubsetMetadata"))||
1313 (0 == s_one_oname.compare("subsetmetadata")))
1314 submeta_no_suffix = false;
1315 else submeta_num_total++;
1316 }
1317#if 0
1318"h5","submeta_num_total= "<<submeta_num_total <<endl;
1319if(submeta_no_suffix) "h5","subuctmeta data has the suffix" <<endl;
1320else "h5","submeta data doesn't have the suffix" <<endl;
1321#endif
1322
1323 }
1324
1325 else if ((0 == (s_one_oname.find("XmlMetadata"))) ||
1326 (0 == (s_one_oname.find("xmlmetadata")))){
1327
1328 metatype[i] = XMLMeta;
1329
1330 // Do we have suffix for the metadata?
1331 // When checking the first time, no_suffix is always true.
1332 // If we have already found that it doesn't have any suffix,
1333 // it should not go into this loop anyway. throw an error.
1334 if (false == xmlmeta_no_suffix) {
1335 H5Gclose(ecs_grp_id);
1336 string msg = "xmlmetadata/Xmlmetadata without suffix should only appear once. ";
1337 throw InternalErr(__FILE__, __LINE__, msg);
1338 }
1339
1340 else if(xmlmeta_num_total >0)
1341 xmlmeta_num_total++;
1342 else { // either no suffix or the first time to loop the one having the suffix.
1343 if ((0 == s_one_oname.compare("XmlMetadata"))||
1344 (0 == s_one_oname.compare("xmlmetadata")))
1345 xmlmeta_no_suffix = false;
1346 else xmlmeta_num_total++;
1347 }
1348#if 0
1349"h5","xmlmeta_num_total= "<<xmlmeta_num_total <<endl;
1350if(xmlmeta_no_suffix) "h5","xmluctmeta data doesn't have the suffix" <<endl;
1351else "h5","xmlmeta data has the suffix" <<endl;
1352#endif
1353
1354 }
1355
1356 else if ((0 == (s_one_oname.find("ProductMetadata"))) ||
1357 (0 == (s_one_oname.find("productmetadata")))){
1358
1359 metatype[i] = ProductMeta;
1360 // Do we have suffix for the metadata?
1361 // When checking the first time, no_suffix is always true.
1362 // If we have already found that it doesn't have any suffix,
1363 // it should not go into this loop anyway. throw an error.
1364 if (!prometa_no_suffix) {
1365 H5Gclose(ecs_grp_id);
1366 string msg = "productmetadata/ProductMetadata without suffix should only appear once. ";
1367 throw InternalErr(__FILE__, __LINE__, msg);
1368 }
1369
1370 else if(prometa_num_total >0) prometa_num_total++;
1371 else { // either no suffix or the first time to loop the one having the suffix.
1372 if ((0 == s_one_oname.compare("ProductMetadata"))||
1373 (0 == s_one_oname.compare("productmetadata")))
1374 prometa_no_suffix = false;
1375 else prometa_num_total++;
1376 }
1377
1378 }
1379
1380 // All other metadata will be merged to one string, no need to check the name.
1381 else othermeta_num_total++;
1382 }
1383
1384 oname.clear();
1385 s_one_oname.clear();
1386 }
1387
1388 // Define a vector of string to hold StructMetadata.
1389 // StructMetadata must exist for a valid HDF-EOS5 file.
1390 vector<string> strmeta_value;
1391 if (strmeta_num_total <= 0) {
1392 string msg = "hdf5 object name error from: ";
1393 H5Gclose(ecs_grp_id);
1394 throw InternalErr(__FILE__, __LINE__, msg);
1395 }
1396 else {
1397 strmeta_value.resize(strmeta_num_total);
1398 for (int i = 0; i < strmeta_num_total; i++)
1399 strmeta_value[i]="";
1400 }
1401
1402 // All other metadata are optional.
1403 // Define a vector of string to hold archivedmetadata.
1404 vector<string> archmeta_value;
1405 if (archmeta_num_total >0) {
1406 archmeta_value.resize(archmeta_num_total);
1407 for (int i = 0; i < archmeta_num_total; i++)
1408 archmeta_value[i]="";
1409 }
1410
1411 // Define a vector of string to hold coremetadata.
1412 vector<string> coremeta_value;
1413 if (coremeta_num_total >0) {
1414 coremeta_value.resize(coremeta_num_total);
1415 for (int i = 0; i < coremeta_num_total; i++)
1416 coremeta_value[i]="";
1417 }
1418
1419 // Define a vector of string to hold xmlmetadata.
1420 vector<string> xmlmeta_value;
1421 if (xmlmeta_num_total >0) {
1422 xmlmeta_value.resize(xmlmeta_num_total);
1423 for (int i = 0; i < xmlmeta_num_total; i++)
1424 xmlmeta_value[i]="";
1425 }
1426
1427 // Define a vector of string to hold subsetmetadata.
1428 vector<string> submeta_value;
1429 if (submeta_num_total >0) {
1430 submeta_value.resize(submeta_num_total);
1431 for (int i = 0; i < submeta_num_total; i++)
1432 submeta_value[i]="";
1433 }
1434
1435 // Define a vector of string to hold productmetadata.
1436 vector<string> prometa_value;
1437 if (prometa_num_total >0) {
1438 prometa_value.resize(prometa_num_total);
1439 for (int i = 0; i < prometa_num_total; i++)
1440 prometa_value[i]="";
1441 }
1442
1443 // For all other metadata, we don't need to calculate the value, just append them.
1444
1445 // Now we want to retrieve the metadata value and combine them into one string.
1446 // Here we have to remember the location of every element of the metadata if
1447 // this metadata has a suffix.
1448 for (hsize_t i = 0; i < nelems; i++) {
1449
1450 // DDS parser only needs to parse the struct Metadata. So check
1451 // if st_only flag is true, will only read StructMetadata string.
1452 // Struct Metadata is generated by the HDF-EOS5 library, so the
1453 // name "StructMetadata.??" won't change for real struct metadata.
1454 //However, we still assume that somebody may not use the HDF-EOS5
1455 // library to add StructMetadata, the name may be "structmetadata".
1456 if (true == s_st_only &&
1457 (((s_oname[i].find("StructMetadata"))!=0) &&
1458 ((s_oname[i].find("structmetadata"))!=0))){
1459 continue;
1460 }
1461
1462 // Open the dataset, dataspace, datatype, number of elements etc. for this metadata
1463 hid_t s_dset_id = -1;
1464 hid_t s_space_id = -1;
1465 hid_t s_ty_id = -1;
1466 hssize_t s_nelms = -1;
1467 size_t dtype_size = -1;
1468
1469 if ((s_dset_id = H5Dopen(ecs_grp_id,s_oname[i].c_str(),H5P_DEFAULT))<0){
1470 string msg = "Cannot open HDF5 dataset ";
1471 msg += s_oname[i];
1472 H5Gclose(ecs_grp_id);
1473 throw InternalErr(__FILE__, __LINE__, msg);
1474 }
1475
1476 if ((s_space_id = H5Dget_space(s_dset_id))<0) {
1477 string msg = "Cannot open the data space of HDF5 dataset ";
1478 msg += s_oname[i];
1479 H5Dclose(s_dset_id);
1480 H5Gclose(ecs_grp_id);
1481 throw InternalErr(__FILE__, __LINE__, msg);
1482 }
1483
1484 if ((s_ty_id = H5Dget_type(s_dset_id)) < 0) {
1485 string msg = "Cannot get the data type of HDF5 dataset ";
1486 msg += s_oname[i];
1487 H5Sclose(s_space_id);
1488 H5Dclose(s_dset_id);
1489 H5Gclose(ecs_grp_id);
1490 throw InternalErr(__FILE__, __LINE__, msg);
1491 }
1492 if ((s_nelms = H5Sget_simple_extent_npoints(s_space_id))<0) {
1493 string msg = "Cannot get the number of points of HDF5 dataset ";
1494 msg += s_oname[i];
1495 H5Tclose(s_ty_id);
1496 H5Sclose(s_space_id);
1497 H5Dclose(s_dset_id);
1498 H5Gclose(ecs_grp_id);
1499 throw InternalErr(__FILE__, __LINE__, msg);
1500 }
1501 if ((dtype_size = H5Tget_size(s_ty_id))==0) {
1502
1503 string msg = "Cannot get the data type size of HDF5 dataset ";
1504 msg += s_oname[i];
1505 H5Tclose(s_ty_id);
1506 H5Sclose(s_space_id);
1507 H5Dclose(s_dset_id);
1508 H5Gclose(ecs_grp_id);
1509 throw InternalErr(__FILE__, __LINE__, msg);
1510 }
1511
1512 // Obtain the real value of the metadata
1513 vector<char> s_buf(dtype_size*s_nelms +1);
1514
1515 if ((H5Dread(s_dset_id,s_ty_id,H5S_ALL,H5S_ALL,H5P_DEFAULT,&s_buf[0]))<0) {
1516
1517 string msg = "Cannot read HDF5 dataset ";
1518 msg += s_oname[i];
1519 H5Tclose(s_ty_id);
1520 H5Sclose(s_space_id);
1521 H5Dclose(s_dset_id);
1522 H5Gclose(ecs_grp_id);
1523 throw InternalErr(__FILE__, __LINE__, msg);
1524 }
1525
1526 // Now we can safely close datatype, data space and dataset IDs.
1527 H5Tclose(s_ty_id);
1528 H5Sclose(s_space_id);
1529 H5Dclose(s_dset_id);
1530
1531
1532 // Convert from the vector<char> to a C++ string.
1533 string tempstr(s_buf.begin(),s_buf.end());
1534 s_buf.clear();
1535 size_t temp_null_pos = tempstr.find_first_of('\0');
1536
1537 // temp_null_pos returns the position of NULL,which is the last character of the string.
1538 // so the length of string before null is EQUAL to
1539 // temp_null_pos since pos starts at 0.
1540 string finstr = tempstr.substr(0,temp_null_pos);
1541
1542 // For the DDS parser, only return StructMetadata
1543 if (StructMeta == metatype[i]) {
1544
1545 // Now obtain the corresponding value in integer type for the suffix. '0' to 0 etc.
1546 try {
1547 strmeta_num = get_metadata_num(s_oname[i]);
1548 }
1549 catch(...) {
1550 H5Gclose(ecs_grp_id);
1551 throw InternalErr(__FILE__,__LINE__,"Obtain structmetadata suffix error.");
1552
1553 }
1554 // This is probably not necessary, since structmetadata may always have a suffix.
1555 // Leave here just in case the rules change or a special non-HDF-EOS5 library generated file.
1556 // when strmeta_num is -1, it means no suffix for this metadata. So the total structmetadata
1557 // is this string only.
1558 if (-1 == strmeta_num)
1559 total_strmeta_value = finstr;
1560 // strmeta_value at this point should be empty before assigning any values.
1561 else if (strmeta_value[strmeta_num]!="") {
1562 string msg = "The structmeta value array at this index should be empty string ";
1563 H5Gclose(ecs_grp_id);
1564 throw InternalErr(__FILE__, __LINE__, msg);
1565 }
1566 // assign the string vector to this value.
1567 else
1568 strmeta_value[strmeta_num] = finstr;
1569 }
1570
1571 // DAS parser needs all metadata.
1572 if (false == s_st_only &&
1573 (metatype[i] != StructMeta)) {
1574
1575 switch (metatype[i]) {
1576
1577 case CoreMeta:
1578 {
1579 if (coremeta_num_total < 0) {
1580 string msg = "There may be no coremetadata or coremetadata is not counted ";
1581 H5Gclose(ecs_grp_id);
1582 throw InternalErr(__FILE__, __LINE__, msg);
1583
1584 }
1585
1586 try {
1587 coremeta_num = get_metadata_num(s_oname[i]);
1588 }
1589 catch(...) {
1590 H5Gclose(ecs_grp_id);
1591 throw InternalErr(__FILE__,__LINE__,"Obtain coremetadata suffix error.");
1592
1593 }
1594
1595 // when coremeta_num is -1, it means no suffix for this metadata. So the total coremetadata
1596 // is this string only. Similar cases apply for the rest metadata.
1597 if ( -1 == coremeta_num )
1598 total_coremeta_value = finstr;
1599 else if (coremeta_value[coremeta_num]!="") {
1600 string msg = "The coremeta value array at this index should be empty string ";
1601 H5Gclose(ecs_grp_id);
1602 throw InternalErr(__FILE__, __LINE__, msg);
1603 }
1604
1605 // assign the string vector to this value.
1606 else
1607 coremeta_value[coremeta_num] = finstr;
1608 }
1609 break;
1610
1611 case ArchivedMeta:
1612 {
1613 if (archmeta_num_total < 0) {
1614 string msg = "There may be no archivemetadata or archivemetadata is not counted ";
1615 H5Gclose(ecs_grp_id);
1616 throw InternalErr(__FILE__, __LINE__, msg);
1617 }
1618 try {
1619 archmeta_num = get_metadata_num(s_oname[i]);
1620 }
1621 catch(...) {
1622 H5Gclose(ecs_grp_id);
1623 throw InternalErr(__FILE__,__LINE__,"Obtain archivemetadata suffix error.");
1624 }
1625 if (-1 == archmeta_num )
1626 total_archmeta_value = finstr;
1627 else if (archmeta_value[archmeta_num]!="") {
1628 string msg = "The archivemeta value array at this index should be empty string ";
1629 H5Gclose(ecs_grp_id);
1630 throw InternalErr(__FILE__, __LINE__, msg);
1631
1632 }
1633 // assign the string vector to this value.
1634 else
1635 archmeta_value[archmeta_num] = finstr;
1636 }
1637 break;
1638 case SubsetMeta:
1639 {
1640 if (submeta_num_total < 0) {
1641 string msg = "The subsetemeta value array at this index should be empty string ";
1642 H5Gclose(ecs_grp_id);
1643 throw InternalErr(__FILE__, __LINE__, msg);
1644 }
1645 try {
1646 submeta_num = get_metadata_num(s_oname[i]);
1647 }
1648 catch(...) {
1649 H5Gclose(ecs_grp_id);
1650 throw InternalErr(__FILE__,__LINE__,"Obtain subsetmetadata suffix error.");
1651 }
1652 if (-1 == submeta_num )
1653 total_submeta_value = finstr;
1654 else if (submeta_value[submeta_num]!="") {
1655 string msg = "The submeta value array at this index should be empty string ";
1656 H5Gclose(ecs_grp_id);
1657 throw InternalErr(__FILE__, __LINE__, msg);
1658 }
1659 // assign the string vector to this value.
1660 else
1661 submeta_value[submeta_num] = finstr;
1662 }
1663 break;
1664 case ProductMeta:
1665 {
1666 if (prometa_num_total < 0) {
1667 string msg = "There may be no productmetadata or productmetadata is not counted ";
1668 H5Gclose(ecs_grp_id);
1669 throw InternalErr(__FILE__, __LINE__, msg);
1670 }
1671 try {
1672 prometa_num = get_metadata_num(s_oname[i]);
1673 }
1674 catch(...) {
1675 H5Gclose(ecs_grp_id);
1676 throw InternalErr(__FILE__,__LINE__,"Obtain productmetadata suffix error.");
1677 }
1678 if (prometa_num == -1)
1679 total_prometa_value = finstr;
1680 else if (prometa_value[prometa_num]!="") {
1681 string msg = "The productmeta value array at this index should be empty string ";
1682 H5Gclose(ecs_grp_id);
1683 throw InternalErr(__FILE__, __LINE__, msg);
1684 }
1685 // assign the string vector to this value.
1686 else
1687 prometa_value[prometa_num] = finstr;
1688 }
1689 break;
1690 case XMLMeta:
1691 {
1692 if (xmlmeta_num_total < 0) {
1693 string msg = "There may be no xmlmetadata or xmlmetadata is not counted ";
1694 H5Gclose(ecs_grp_id);
1695 throw InternalErr(__FILE__, __LINE__, msg);
1696 }
1697 try {
1698 xmlmeta_num = get_metadata_num(s_oname[i]);
1699 }
1700 catch(...) {
1701 H5Gclose(ecs_grp_id);
1702 throw InternalErr(__FILE__,__LINE__,"Obtain XMLmetadata suffix error.");
1703 }
1704 if (-1 == xmlmeta_num )
1705 total_xmlmeta_value = finstr;
1706 else if (xmlmeta_value[xmlmeta_num]!="") {
1707 string msg = "The xmlmeta value array at this index should be empty string ";
1708 H5Gclose(ecs_grp_id);
1709 throw InternalErr(__FILE__, __LINE__, msg);
1710 }
1711 // assign the string vector to this value.
1712 else
1713 xmlmeta_value[xmlmeta_num] = finstr;
1714 }
1715 break;
1716 case OtherMeta:
1717 {
1718 if (othermeta_num_total < 0) {
1719 string msg = "There may be no othermetadata or other metadata is not counted ";
1720 H5Gclose(ecs_grp_id);
1721 throw InternalErr(__FILE__, __LINE__, msg);
1722 }
1723 total_othermeta_value = total_othermeta_value + finstr;
1724 }
1725 break;
1726 default :
1727 {
1728 string msg = "Unsupported metadata type ";
1729 H5Gclose(ecs_grp_id);
1730 throw InternalErr(__FILE__, __LINE__, msg);
1731 }
1732 }
1733 }
1734 tempstr.clear();
1735 finstr.clear();
1736 }
1737
1738 // Now we need to handle the concatenation of the metadata
1739 // first StructMetadata
1740 if (strmeta_num_total > 0) {
1741 // The no suffix one has been taken care.
1742 if (strmeta_num != -1) {
1743 for (int i = 0; i <strmeta_num_total; i++)
1744 total_strmeta_value +=strmeta_value[i];
1745 }
1746 }
1747
1748 // For the DAS handler
1749 if ( false == s_st_only) {
1750
1751 if (coremeta_num_total >0) {
1752 if (coremeta_num != -1) {
1753 for(int i = 0; i <coremeta_num_total; i++)
1754 total_coremeta_value +=coremeta_value[i];
1755 }
1756 }
1757
1758 if (archmeta_num_total >0) {
1759 if (archmeta_num != -1) {
1760 for(int i = 0; i <archmeta_num_total; i++)
1761 total_archmeta_value +=archmeta_value[i];
1762 }
1763 }
1764
1765 if (submeta_num_total >0) {
1766 if (submeta_num != -1) {
1767 for(int i = 0; i <submeta_num_total; i++)
1768 total_submeta_value +=submeta_value[i];
1769 }
1770 }
1771
1772 if (xmlmeta_num_total >0) {
1773 if (xmlmeta_num != -1) {
1774 for(int i = 0; i <xmlmeta_num_total; i++)
1775 total_xmlmeta_value +=xmlmeta_value[i];
1776 }
1777 }
1778
1779 if (prometa_num_total >0) {
1780 if (prometa_num != -1) {
1781 for(int i = 0; i <prometa_num_total; i++)
1782 total_prometa_value +=prometa_value[i];
1783 }
1784 }
1785 }
1786 H5Gclose(ecs_grp_id);
1787}
1788
1789// Helper function for read_ecs_metadata. Get the number after metadata.
1790int get_metadata_num(const string & meta_str) {
1791
1792 // The normal metadata names should be like coremetadata.0, coremetadata.1 etc.
1793 // We just find a not so nice coremetadata names such as coremetadata.0, coremetadata.0.1 for a HIRDLS-MLS-Aura-L3
1794 // We need to handle them. Here we assume no more than two dots in a name series. KY 2012-11-08
1795 size_t dot_pos = meta_str.find(".");
1796 if (dot_pos == string::npos) // No dot
1797 return -1;
1798 else if (meta_str.find_first_of(".") == meta_str.find_last_of(".")) { // One dot
1799 string num_str = meta_str.substr(dot_pos+1);
1800 stringstream ssnum(num_str);
1801 int num;
1802 ssnum >> num;
1803 if (ssnum.fail())
1804 throw InternalErr(__FILE__,__LINE__,"Suffix after dots is not a number.");
1805 return num;
1806 }
1807 else { // Two dots
1808 string str_after_first_dot = meta_str.substr(dot_pos+1);
1809 if (str_after_first_dot.find_first_of(".") != str_after_first_dot.find_last_of("."))
1810 throw InternalErr(__FILE__,__LINE__,"Currently don't support metadata names containing more than two dots.");
1811 // Here we don't check if names are like coremetadata.0 coremetadata.0.0 etc., Having ".0 .0.0" is,if not mistaken,
1812 // is insane.
1813 // Instead we hope that the data producers will produce data like coremetadata.0 coremetadata.0.1 coremeatadata.0.2
1814 // KY 2012-11-08
1815 size_t second_dot_pos = str_after_first_dot.find(".");
1816 string num_str = str_after_first_dot.substr(second_dot_pos+1);
1817 stringstream ssnum(num_str);
1818 int num;
1819 ssnum >> num;
1820 return num;
1821 }
1822
1823}
1824
1825void map_eos5_cfdmr(D4Group *d4_root, hid_t file_id, const string &filename) {
1826
1827 BESDEBUG("h5","Coming to HDF-EOS5 products DDS mapping function map_eos5_cfdds "<<endl);
1828
1829 string st_str ="";
1830 string core_str="";
1831 string arch_str="";
1832 string xml_str ="";
1833 string subset_str="";
1834 string product_str="";
1835 string other_str ="";
1836 bool st_only = false;
1837
1838 // Read ECS metadata: merge them into one C++ string
1839 read_ecs_metadata(file_id,st_str,core_str,arch_str,xml_str, subset_str,product_str,other_str,st_only);
1840 if(""==st_str) {
1841 string msg =
1842 "unable to obtain the HDF-EOS5 struct metadata ";
1843 throw InternalErr(__FILE__, __LINE__, msg);
1844 }
1845
1846 bool disable_ecsmetadata = HDF5RequestHandler::get_disable_ecsmeta();
1847 if(disable_ecsmetadata == false) {
1848
1849 bool is_check_disable_smetadata = HDF5RequestHandler::get_disable_structmeta();
1850
1851 if (false == is_check_disable_smetadata)
1852 add_grp_dap4_attr(d4_root,"StructMetadata",attr_str_c,st_str);
1853
1854 if(core_str != "")
1855 add_grp_dap4_attr(d4_root,"CoreMetadata",attr_str_c,core_str);
1856
1857 if(arch_str != "")
1858 add_grp_dap4_attr(d4_root,"ArchiveMetadata",attr_str_c,arch_str);
1859
1860 if(xml_str != "")
1861 add_grp_dap4_attr(d4_root,"XMLMetadata",attr_str_c,xml_str);
1862
1863 if(subset_str !="")
1864 add_grp_dap4_attr(d4_root,"SubsetMetadata",attr_str_c,subset_str);
1865
1866 if(product_str != "")
1867 add_grp_dap4_attr(d4_root,"ProductMetadata",attr_str_c,product_str);
1868
1869 if(other_str !="")
1870 add_grp_dap4_attr(d4_root,"OtherMetadata",attr_str_c,other_str);
1871 }
1872
1873 bool is_check_nameclashing = HDF5RequestHandler::get_check_name_clashing();
1874
1875 bool is_add_path_attrs = HDF5RequestHandler::get_add_path_attrs();
1876
1877 EOS5File *f = NULL;
1878
1879 try {
1880 f = new EOS5File(filename.c_str(),file_id);
1881 }
1882 catch(...) {
1883 throw InternalErr(__FILE__,__LINE__,"Cannot allocate the file object.");
1884 }
1885
1886 bool include_attr = true;
1887
1888 // This first "try-catch" block will use the parsed info
1889 try {
1890
1891 // Parse the structmetadata
1892 // Note: he5dds_scan_string just retrieves the variable info.
1893 // It is still used to handle DMR, no need to write another parser.
1894 // KY 2021-05-21
1895 HE5Parser p;
1896 HE5Checker c;
1897 he5dds_scan_string(st_str.c_str());
1898 he5ddsparse(&p);
1899 he5ddslex_destroy();
1900
1901 // Retrieve ProjParams from StructMetadata
1902 p.add_projparams(st_str);
1903#if 0
1904 //p.print();
1905#endif
1906
1907 // Check if the HDF-EOS5 grid has the valid parameters, projection codes.
1908 if (c.check_grids_unknown_parameters(&p)) {
1909 throw InternalErr("Unknown HDF-EOS5 grid paramters found in the file");
1910 }
1911
1912 if (c.check_grids_missing_projcode(&p)) {
1913 throw InternalErr("The HDF-EOS5 is missing project code ");
1914 }
1915
1916 // We gradually add the support of different projection codes
1917 if (c.check_grids_support_projcode(&p)) {
1918 throw InternalErr("The current project code is not supported");
1919 }
1920
1921 // HDF-EOS5 provides default pixel and origin values if they are not defined.
1922 c.set_grids_missing_pixreg_orig(&p);
1923
1924 // Check if this multi-grid file shares the same grid.
1925 bool grids_mllcv = c.check_grids_multi_latlon_coord_vars(&p);
1926
1927 // Retrieve all HDF5 info(Not the values)
1928 f->Retrieve_H5_Info(filename.c_str(),file_id,include_attr);
1929
1930 // Adjust EOS5 Dimension names/sizes based on the parsed results
1931 f->Adjust_EOS5Dim_Info(&p);
1932
1933 // Translate the parsed output to HDF-EOS5 grids/swaths/zonal.
1934 // Several maps related to dimension and coordiantes are set up here.
1935 f->Add_EOS5File_Info(&p, grids_mllcv);
1936
1937 // Add the dimension names
1938 f->Add_Dim_Name(&p);
1939 }
1940 catch (HDF5CF::Exception &e){
1941 if(f!=NULL)
1942 delete f;
1943 throw InternalErr(e.what());
1944 }
1945 catch(...) {
1946 if(f!=NULL)
1947 delete f;
1948 throw;
1949 }
1950
1951 // The parsed struct will no longer be in this "try-catch" block.
1952 try {
1953
1954 // NASA Aura files need special handlings. So first check if this file is an Aura file.
1956
1957 // Adjust the variable name
1959
1960 // Handle coordinate variables
1961 f->Handle_CVar();
1962
1963 // Adjust variable and dimension names again based on the handling of coordinate variables.
1965
1966
1967 // Old comments, leave them for the time being:
1968 // We need to use the CV units to distinguish lat/lon from th 3rd CV when
1969 // memory cache is turned on.
1970 //if((HDF5RequestHandler::get_lrdata_mem_cache() != NULL) ||
1971 // (HDF5RequestHandler::get_srdata_mem_cache() != NULL)){
1972
1973 // Handle unsupported datatypes including the attributes
1974 f->Handle_Unsupported_Dtype(true);
1975
1976 // Handle unsupported dataspaces including the attributes
1978
1979 // We need to retrieve coordinate variable attributes for memory cache use.
1981
1983
1984 // Handle other unsupported objects,
1985 // currently it mainly generates the info. for the
1986 // unsupported objects other than datatype, dataspace,links and named datatype
1987 // This function needs to be called after retrieving supported attributes.
1988 f->Handle_Unsupported_Others(include_attr);
1989
1990#if 0
1991 else {
1992
1993 // Handle unsupported datatypes
1994 f->Handle_Unsupported_Dtype(include_attr);
1995
1996 // Handle unsupported dataspaces
1997 f->Handle_Unsupported_Dspace(include_attr);
1998
1999 }
2000#endif
2001
2002
2003 // Need to retrieve the units of CV when memory cache is turned on.
2004 // The units of CV will be used to distinguish whether this CV is
2005 // latitude/longitude or a third-dimension CV.
2006 // isLatLon() will use the units value.
2007 //if((HDF5RequestHandler::get_lrdata_mem_cache() != NULL) ||
2008 // (HDF5RequestHandler::get_srdata_mem_cache() != NULL))
2009 f->Adjust_Attr_Info();
2010
2011 // May need to adjust the object names for special objects. Currently no operations
2012 // are done in this routine.
2013 f->Adjust_Obj_Name();
2014
2015 // Flatten the object name
2016 f->Flatten_Obj_Name(include_attr);
2017
2018 // Handle name clashing
2019 if(true == is_check_nameclashing)
2020 f->Handle_Obj_NameClashing(include_attr);
2021
2022 // Check if this should follow COARDS, yes, set the COARDS flag.
2023 f->Set_COARDS_Status();
2024
2025 // For COARDS, the dimension name needs to be changed.
2026 f->Adjust_Dim_Name();
2027 if(true == is_check_nameclashing)
2029
2030 f->Add_Supplement_Attrs(is_add_path_attrs);
2031
2032 // We need to turn off the very long string in the TES file to avoid
2033 // the choking of netCDF Java tools. So this special variable routine
2034 // is listed at last. We may need to turn off this if netCDF can handle
2035 // long string better.
2036 f->Handle_SpVar_DMR();
2037
2038 // Handle coordinate attributes
2039 f->Handle_Coor_Attr();
2040 //f->Handle_SpVar_Attr();
2041 }
2042 catch (HDF5CF::Exception &e){
2043 if(f != NULL)
2044 delete f;
2045 throw InternalErr(e.what());
2046 }
2047
2048 // Generate EOS5 DMR
2049 try {
2050 gen_eos5_cfdmr(d4_root,f);
2051 }
2052 catch(...) {
2053 if (f!=NULL)
2054 delete f;
2055 throw;
2056 }
2057
2058 if (f!=NULL)
2059 delete f;
2060
2061}
2062
2063void gen_eos5_cfdmr(D4Group *d4_root, HDF5CF::EOS5File *f) {
2064
2065 BESDEBUG("h5","Coming to HDF-EOS5 products DDS generation function "<<endl);
2066 const vector<HDF5CF::Var *>& vars = f->getVars();
2067 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
2068 const string filename = f->getPath();
2069 const hid_t file_id = f->getFileID();
2070 const vector<HDF5CF::Group *>& grps = f->getGroups();
2071 const vector<HDF5CF::Attribute *>& root_attrs = f->getAttributes();
2072
2073 vector<HDF5CF::Group *>::const_iterator it_g;
2074 vector<HDF5CF::Attribute *>::const_iterator it_ra;
2075
2076 if (false == root_attrs.empty()) {
2077 for (it_ra = root_attrs.begin(); it_ra != root_attrs.end(); ++it_ra)
2078 map_cfh5_grp_attr_to_dap4(d4_root,*it_ra);
2079 }
2080
2081 // We use the container since we claim to have no hierarchy.
2082 if (false == grps.empty()) {
2083 for (it_g = grps.begin();
2084 it_g != grps.end(); ++it_g) {
2085 D4Attribute *tmp_grp = new D4Attribute;
2086 tmp_grp->set_name((*it_g)->getNewName());
2087 tmp_grp->set_type(attr_container_c);
2088
2089 for (it_ra = (*it_g)->getAttributes().begin();
2090 it_ra != (*it_g)->getAttributes().end(); ++it_ra) {
2091 map_cfh5_attr_container_to_dap4(tmp_grp,(*it_ra));
2092 }
2093 d4_root->attributes()->add_attribute_nocopy(tmp_grp);
2094 }
2095 }
2096
2097 // Read Variable info.
2098 vector<HDF5CF::Var *>::const_iterator it_v;
2099 vector<HDF5CF::EOS5CVar *>::const_iterator it_cv;
2100
2101 for (it_v = vars.begin(); it_v !=vars.end();++it_v) {
2102 BESDEBUG("h5","variable full path= "<< (*it_v)->getFullPath() <<endl);
2103 gen_dap_onevar_dmr(d4_root,*it_v,file_id,filename);
2104 }
2105
2106 // Handle EOS5 grid mapping info.
2107 if (f->Have_EOS5_Grids()==true)
2108 gen_dap_eos5cf_gm_dmr(d4_root,f);
2109
2110 for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
2111 BESDEBUG("h5","variable full path= "<< (*it_cv)->getFullPath() <<endl);
2112 gen_dap_oneeos5cvar_dmr(d4_root,*it_cv,file_id,filename);
2113
2114 }
2115
2116 // CHECK ALL UNLIMITED DIMENSIONS from the coordinate variables based on the names.
2117 if(f->HaveUnlimitedDim() == true) {
2118
2119 string dods_extra = "DODS_EXTRA";
2120
2121 // If DODS_EXTRA exists, we will not create the unlimited dimensions.
2122 if(d4_root->attributes() != NULL) {
2123 //if((d4_root->attributes()->find(dods_extra))==NULL) {
2124
2125 string unlimited_dim_names ="";
2126
2127 for (it_cv = cvars.begin();
2128 it_cv != cvars.end(); it_cv++) {
2129
2130 // Check unlimited dimension names.
2131 for (vector<Dimension*>::const_iterator ird = (*it_cv)->getDimensions().begin();
2132 ird != (*it_cv)->getDimensions().end(); ++ird) {
2133
2134 // Currently we only check one unlimited dimension, which is the most
2135 // common case. When receiving the conventions from JG, will add
2136 // the support of multi-unlimited dimension. KY 2016-02-09
2137 if((*ird)->HaveUnlimitedDim() == true) {
2138
2139 if(unlimited_dim_names=="")
2140 unlimited_dim_names = (*ird)->getNewName();
2141 else {
2142 if(unlimited_dim_names.rfind((*ird)->getNewName()) == string::npos) {
2143 unlimited_dim_names = unlimited_dim_names+" "+(*ird)->getNewName();
2144 }
2145 }
2146 }
2147 }
2148 }
2149
2150 if(unlimited_dim_names != "") {
2151 D4Attribute *dods_extra_attr = new D4Attribute(dods_extra,attr_container_c);
2152 D4Attribute *unlimited_dim_attr = new D4Attribute("Unlimited_Dimension",attr_str_c);
2153 unlimited_dim_attr->add_value(unlimited_dim_names);
2154 dods_extra_attr->attributes()->add_attribute_nocopy(unlimited_dim_attr);
2155 d4_root->attributes()->add_attribute_nocopy(dods_extra_attr);
2156
2157 }
2158 else
2159 throw InternalErr(__FILE__, __LINE__, "Unlimited dimension should exist.");
2160 //}
2161 }
2162
2163 }
2164
2165}
2166
2167
2168void gen_dap_oneeos5cvar_dmr(D4Group* d4_root,const EOS5CVar* cvar,const hid_t file_id,const string & filename){
2169
2170 BESDEBUG("h5","Coming to gen_dap_oneeos5cvar_dmr() "<<endl);
2171 BaseType *bt = NULL;
2172
2173 switch(cvar->getType()) {
2174#define HANDLE_CASE(tid,type) \
2175 case tid: \
2176 bt = new (type)(cvar->getNewName(),cvar->getFullPath()); \
2177 break;
2178
2179 HANDLE_CASE(H5FLOAT32, HDF5CFFloat32);
2180 HANDLE_CASE(H5FLOAT64, HDF5CFFloat64);
2181 HANDLE_CASE(H5CHAR,HDF5CFInt8);
2182 HANDLE_CASE(H5UCHAR, HDF5CFByte);
2183 HANDLE_CASE(H5INT16, HDF5CFInt16);
2184 HANDLE_CASE(H5UINT16, HDF5CFUInt16);
2185 HANDLE_CASE(H5INT32, HDF5CFInt32);
2186 HANDLE_CASE(H5UINT32, HDF5CFUInt32);
2187 HANDLE_CASE(H5INT64, HDF5CFInt64);
2188 HANDLE_CASE(H5UINT64, HDF5CFUInt64);
2189 HANDLE_CASE(H5FSTRING, Str);
2190 HANDLE_CASE(H5VSTRING, Str);
2191 default:
2192 throw InternalErr(__FILE__,__LINE__,"unsupported data type.");
2193#undef HANDLE_CASE
2194 }
2195
2196 if (bt) {
2197
2198 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
2199 vector <HDF5CF::Dimension*>:: const_iterator it_d;
2200 vector <size_t> dimsizes;
2201 dimsizes.resize(cvar->getRank());
2202 for(int i = 0; i <cvar->getRank();i++)
2203 dimsizes[i] = (dims[i])->getSize();
2204
2205
2206 if(dims.empty())
2207 throw InternalErr(__FILE__,__LINE__,"the coordinate variables cannot be scalar.");
2208 switch(cvar->getCVType()) {
2209
2210 case CV_EXIST:
2211 {
2212
2213 bool is_latlon = cvar->isLatLon();
2214 HDF5CFArray *ar = NULL;
2215 try {
2216 bool is_dap4 = true;
2217 ar = new HDF5CFArray (
2218 cvar->getRank(),
2219 file_id,
2220 filename,
2221 cvar->getType(),
2222 dimsizes,
2223 cvar->getFullPath(),
2224 cvar->getTotalElems(),
2225 CV_EXIST,
2226 is_latlon,
2227 cvar->getCompRatio(),
2228 is_dap4,
2229 cvar->getNewName(),
2230 bt);
2231 }
2232 catch (...) {
2233 delete bt;
2234 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDF5CFArray.");
2235 }
2236
2237 for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
2238 if (""==(*it_d)->getNewName())
2239 ar->append_dim((*it_d)->getSize());
2240 else
2241 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
2242 }
2243
2244 ar->set_is_dap4(true);
2245 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2246 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2247 d4_root->add_var_nocopy(d4_var);
2248
2249 delete bt;
2250 delete ar;
2251 }
2252 break;
2253
2254 case CV_LAT_MISS:
2255 case CV_LON_MISS:
2256 {
2257
2258 HDFEOS5CFMissLLArray *ar = NULL;
2259 try {
2260#if 0
2261cerr<<"cvar zone here is "<<cvar->getZone() <<endl;
2262cerr<<"cvar Sphere here is "<<cvar->getSphere() <<endl;
2263cerr<<"cvar getParams here 1 is "<<cvar->getParams()[0]<<endl;
2264#endif
2265 ar = new HDFEOS5CFMissLLArray (
2266 cvar->getRank(),
2267 filename,
2268 file_id,
2269 cvar->getFullPath(),
2270 cvar->getCVType(),
2271 cvar->getPointLower(),
2272 cvar->getPointUpper(),
2273 cvar->getPointLeft(),
2274 cvar->getPointRight(),
2275 cvar->getPixelReg(),
2276 cvar->getOrigin(),
2277 cvar->getProjCode(),
2278 cvar->getParams(),
2279 cvar->getZone(),
2280 cvar->getSphere(),
2281 cvar->getXDimSize(),
2282 cvar->getYDimSize(),
2283 cvar->getNewName(),
2284 bt);
2285 }
2286 catch (...) {
2287 delete bt;
2288 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDFEOS5CFMissLLArray.");
2289 }
2290
2291 for(it_d = dims.begin(); it_d != dims.end(); ++it_d) {
2292 if (""==(*it_d)->getNewName())
2293 ar->append_dim((*it_d)->getSize());
2294 else
2295 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
2296 }
2297
2298 ar->set_is_dap4(true);
2299 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2300 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2301 add_var_sp_attrs_to_dap4(d4_var,cvar);
2302 d4_root->add_var_nocopy(d4_var);
2303
2304 delete bt;
2305 delete ar;
2306 }
2307 break;
2308
2309 case CV_NONLATLON_MISS:
2310 {
2311
2312 if (cvar->getRank() !=1) {
2313 delete bt;
2314 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
2315 }
2316 int nelem = (cvar->getDimensions()[0])->getSize();
2317
2318 HDFEOS5CFMissNonLLCVArray *ar = NULL;
2319 try {
2321 cvar->getRank(),
2322 nelem,
2323 cvar->getNewName(),
2324 bt);
2325 }
2326 catch (...) {
2327 delete bt;
2328 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDFEOS5CFMissNonLLCVArray.");
2329 }
2330
2331
2332 for(it_d = dims.begin(); it_d != dims.end(); it_d++) {
2333 if (""==(*it_d)->getNewName())
2334 ar->append_dim((*it_d)->getSize());
2335 else
2336 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
2337 }
2338
2339 ar->set_is_dap4(true);
2340 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2341 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2342 d4_root->add_var_nocopy(d4_var);
2343
2344 delete bt;
2345 delete ar;
2346
2347
2348 }
2349 break;
2350 case CV_SPECIAL:
2351 // Currently only support Aura TES files. May need to revise when having more
2352 // special products KY 2012-2-3
2353 {
2354
2355 if (cvar->getRank() !=1) {
2356 delete bt;
2357 throw InternalErr(__FILE__, __LINE__, "The rank of missing Z dimension field must be 1");
2358 }
2359 int nelem = (cvar->getDimensions()[0])->getSize();
2360 HDFEOS5CFSpecialCVArray *ar = NULL;
2361
2362 try {
2363 ar = new HDFEOS5CFSpecialCVArray(
2364 cvar->getRank(),
2365 filename,
2366 file_id,
2367 cvar->getType(),
2368 nelem,
2369 cvar->getFullPath(),
2370 cvar->getNewName(),
2371 bt);
2372 }
2373 catch (...) {
2374 delete bt;
2375 throw InternalErr(__FILE__,__LINE__,"unable to allocate memory for HDF5CFArray.");
2376 }
2377
2378
2379 for(it_d = dims.begin(); it_d != dims.end(); ++it_d){
2380 if (""==(*it_d)->getNewName())
2381 ar->append_dim((*it_d)->getSize());
2382 else
2383 ar->append_dim((*it_d)->getSize(), (*it_d)->getNewName());
2384 }
2385
2386 ar->set_is_dap4(true);
2387 BaseType* d4_var=ar->h5cfdims_transform_to_dap4(d4_root);
2388 map_cfh5_var_attrs_to_dap4(cvar,d4_var);
2389 d4_root->add_var_nocopy(d4_var);
2390 delete bt;
2391 delete ar;
2392 }
2393 break;
2394 case CV_MODIFY:
2395 default:
2396 delete bt;
2397 throw InternalErr(__FILE__,__LINE__,"Unsupported coordinate variable type.");
2398 }
2399
2400 }
2401
2402}
2403
2404
2405// generate dmr info for grid mapping (gm: grid mapping)
2406void gen_dap_eos5cf_gm_dmr(libdap::D4Group* d4_root,HDF5CF::EOS5File*f) {
2407
2408 // grid mapping projection vars
2409 // and add grid_mapping attribute for non-cv vars
2410 gen_gm_proj_var_info(d4_root,f);
2411
2412 // special grid mapping dimension variables.
2413 gen_gm_proj_spvar_info(d4_root,f);
2414
2415}
2416
2417//(1) Add grid mapping projection vars if we have any
2418//(2) Add grid_mapping attributes for all non-cv vars
2419void gen_gm_proj_var_info(libdap::D4Group* d4_root,HDF5CF::EOS5File* f) {
2420
2421 BESDEBUG("h5","Coming to HDF-EOS5 products DDS generation function "<<endl);
2422 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
2423 vector<HDF5CF::EOS5CVar *>::const_iterator it_cv;
2424
2425 // For multiple grids, multiple grid mapping variables are needed.
2426 // We use EOS5 coordinate variables to track this.
2427 unsigned short cv_lat_miss_index = 1;
2428 for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
2429 if((*it_cv)->getCVType() == CV_LAT_MISS) {
2430 if((*it_cv)->getProjCode() != HE5_GCTP_GEO) {
2431 gen_gm_oneproj_var(d4_root,*it_cv,cv_lat_miss_index);
2432 cv_lat_miss_index++;
2433 }
2434 }
2435 }
2436}
2437
2438// Generate the dummy grid_mapping variables,attributes and
2439// grid_mapping attributes for all the non-cv variables.
2440void gen_gm_oneproj_var(libdap::D4Group*d4_root,
2441 const HDF5CF::EOS5CVar* cvar,
2442 const unsigned short g_suffix) {
2443
2444 BESDEBUG("h5","Coming to gen_gm_oneproj_var() "<<endl);
2445 EOS5GridPCType cv_proj_code = cvar->getProjCode();
2446 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
2447
2448 if(dims.size() !=2)
2449 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
2450
2451 // 1. Add the grid mapping dummy projection variable dmr for each grid
2452 // 2. Add the grid_mapping attribute for each variable that this projection applies
2453 // now, we handle sinusoidal,PS and LAMAZ projections.
2454 if (HE5_GCTP_SNSOID == cv_proj_code || HE5_GCTP_PS == cv_proj_code || HE5_GCTP_LAMAZ== cv_proj_code) {
2455
2456 // Add the dummy projection variable.
2457 // The attributes of this variable can be used to store the grid mapping info.
2458 // To handle multi-grid cases, we need to add suffixes to distinguish them.
2459 string cf_projection_base = "eos_cf_projection";
2460 string cf_projection_name;
2461
2462 HDF5CFGeoCFProj * dummy_proj_cf = NULL;
2463
2464 if(HE5_GCTP_SNSOID == cv_proj_code) {
2465
2466 // AFAIK, one grid_mapping variable is necessary for multi-grids.
2467 // So we just leave one grid here.
2468 cf_projection_name = cf_projection_base;
2469 if(g_suffix == 1)
2470 dummy_proj_cf = new HDF5CFGeoCFProj(cf_projection_name, cf_projection_name);
2471 }
2472 else {
2473 stringstream t_suffix_ss;
2474 t_suffix_ss << g_suffix;
2475 cf_projection_name = cf_projection_base + "_" + t_suffix_ss.str();
2476 dummy_proj_cf = new HDF5CFGeoCFProj(cf_projection_name, cf_projection_name);
2477 }
2478
2479 if(dummy_proj_cf != NULL) {
2480 dummy_proj_cf->set_is_dap4(true);
2481 add_gm_oneproj_var_dap4_attrs(dummy_proj_cf,cv_proj_code,cvar->getParams());
2482 d4_root->add_var_nocopy(dummy_proj_cf);
2483 }
2484
2485 // Add the grid_mapping attributes to all non-cv variables for the grid.
2486 add_cf_grid_cv_dap4_attrs(d4_root,cf_projection_name,dims);
2487 }
2488
2489}
2490
2491//Generate DMR of special dimension variables.
2492void gen_gm_proj_spvar_info(libdap::D4Group* d4_root,HDF5CF::EOS5File* f){
2493
2494 BESDEBUG("h5","Coming to HDF-EOS5 products grid mapping variable generation function "<<endl);
2495 const vector<HDF5CF::EOS5CVar *>& cvars = f->getCVars();
2496 vector<HDF5CF::EOS5CVar *>::const_iterator it_cv;
2497
2498 for (it_cv = cvars.begin(); it_cv !=cvars.end();++it_cv) {
2499 if((*it_cv)->getCVType() == CV_LAT_MISS) {
2500 if((*it_cv)->getProjCode() != HE5_GCTP_GEO)
2501 gen_gm_oneproj_spvar(d4_root,*it_cv);
2502 }
2503 }
2504}
2505
2506void gen_gm_oneproj_spvar(libdap::D4Group *d4_root,const HDF5CF::EOS5CVar *cvar) {
2507
2508 BESDEBUG("h5","Coming to gen_gm_oneproj_spvar() "<<endl);
2509
2510 float cv_point_lower = cvar->getPointLower();
2511 float cv_point_upper = cvar->getPointUpper();
2512 float cv_point_left = cvar->getPointLeft();
2513 float cv_point_right = cvar->getPointRight();
2514 EOS5GridPCType cv_proj_code = cvar->getProjCode();
2515 const vector<HDF5CF::Dimension *>& dims = cvar->getDimensions();
2516 if(dims.size() !=2)
2517 throw InternalErr(__FILE__,__LINE__,"Currently we only support the 2-D CF coordinate projection system.");
2518 add_gm_spcvs(d4_root,cv_proj_code,cv_point_lower,cv_point_upper,cv_point_left,cv_point_right,dims);
2519
2520}
2521
2522void add_var_sp_attrs_to_dap4(BaseType *d4_var,const EOS5CVar* cvar) {
2523
2524 if(cvar->getProjCode() == HE5_GCTP_LAMAZ) {
2525 if(cvar->getCVType() == CV_LAT_MISS) {
2526 add_var_dap4_attr(d4_var,"valid_min", attr_float64_c, "-90.0");
2527 add_var_dap4_attr(d4_var,"valid_max", attr_float64_c, "90.0");
2528 }
2529 else {
2530 add_var_dap4_attr(d4_var,"valid_min", attr_float64_c, "-180.0");
2531 add_var_dap4_attr(d4_var,"valid_max", attr_float64_c, "180.0");
2532 }
2533 }
2534
2535}
2536
This class includes the methods to read data array into DAP buffer from an HDF5 dataset for the CF op...
This class provides a way to map HDF5 byte to DAP byte for the CF option.
This class provides a way to map HDF5 float to DAP float for the CF option.
This class provides a way to map HDF5 64-bit floating-point(double) to DAP 64-bit floating-point for ...
This class provides a way to map HDF5 int16 to DAP int16 for the CF option.
This class provides a way to map HDF5 32-bit integer to DAP Int32 for the CF option.
This class provides a way to map HDF5 64-bit integer to DAP4 Int64 for the CF option.
This class provides a way to map HDF5 int8 to DAP int16 for the CF option.
This class provides a way to map HDF5 Str to DAP Str for the CF option.
This class provides a way to map HDF5 unsigned 16-bit integer to DAP uint16 for the CF option.
This class provides a way to map HDF5 unsigned 32-bit integer to DAP uint32 for the CF option.
This class provides a way to map HDF5 64-bit unsigned integer to DAP4 UInt64 for the CF option.
include the entry functions to execute the handlers
This class specifies the retrieval of the missing lat/lon values for HDF-EOS5 products.
This class specifies the retrieval of the missing lat/lon values for HDFEOS5 products.
This class specifies the retrieval of special coordinate variable values for HDF-EOS5 products.
A class for parsing NASA HDF-EOS5 StructMetadata.
A class for parsing NASA HDF-EOS5 StructMetadata.
CVType getCVType() const
Get the coordinate variable type of this variable.
Definition: HDF5CF.h:372
This class is a derived class of CVar. It represents a coordinate variable for HDF-EOS5 files.
Definition: HDF5CF.h:450
This class is a derived class of File. It includes methods applied to HDF-EOS5 files only.
Definition: HDF5CF.h:1203
void Add_EOS5File_Info(HE5Parser *, bool)
Add HDF-EOS5 dimension and coordinate variable related info. to EOS5Grid,EOS5Swath etc.
Definition: HDFEOS5CF.cc:841
void Set_COARDS_Status()
Set COARDS flag.
Definition: HDFEOS5CF.cc:3384
virtual void Handle_Unsupported_Dtype(bool)
Handle unsupported HDF5 datatypes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:207
void Adjust_Var_Dim_NewName_Before_Flattening()
Adjust variable dimension names before the flattening for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:3034
void Adjust_Attr_Info()
Adjust the attribute info for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3407
virtual const std::string & Get_Ignored_Msg()
Obtain the message that contains the ignored object info.
Definition: HDF5CF.h:1309
virtual void Handle_SpVar_DMR()
Handle special variables and attributes for HDF-EOS5 files(for DMR)
Definition: HDFEOS5CF.cc:4134
void Handle_Obj_NameClashing(bool)
Handle the object name clashing for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3234
virtual void Retrieve_H5_CVar_Supported_Attr_Values()
Retrieve coordinate variable attributes.
Definition: HDFEOS5CF.cc:168
virtual void Adjust_Obj_Name()
This method is a no-op operation. Leave here since the method in the base class is pure virtual.
Definition: HDFEOS5CF.cc:4190
virtual void Handle_SpVar()
Handle special variables for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:3975
virtual void Retrieve_H5_Info(const char *path, hid_t file_id, bool include_attr)
Retrieve DDS information from the HDF5 file; a real implementation for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:161
virtual void Handle_DimNameClashing()
Definition: HDFEOS5CF.cc:3319
virtual void Handle_CVar()
Handle coordinate variable for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:1768
virtual void Handle_SpVar_Attr()
Handle special variables for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:4096
virtual void Retrieve_H5_Supported_Attr_Values()
Retrieve attribute values for the supported HDF5 datatypes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:184
virtual void Handle_Coor_Attr()
Handle the coordinates attribute for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3761
void Adjust_Var_NewName_After_Parsing()
Adjust variable names for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:1228
virtual void Add_Supplement_Attrs(bool)
Add the supplemental attributes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3670
void Add_Dim_Name(HE5Parser *)
Add the dimension name for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:1320
virtual void Handle_Unsupported_Others(bool)
Handle other unmapped objects/attributes for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:360
virtual void Flatten_Obj_Name(bool include_attr)
Flatten the object name for HDF-EOS5 files.
Definition: HDFEOS5CF.cc:3212
virtual void Adjust_Dim_Name()
Adjust the dimension name for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:3640
virtual void Handle_Unsupported_Dspace(bool)
Handle unsupported HDF5 dataspaces for HDF-EOS5 products.
Definition: HDFEOS5CF.cc:301
void Check_Aura_Product_Status()
Check if the HDF-EOS5 file is an Aura file. Special CF operations need to be used.
Definition: HDFEOS5CF.cc:1716
virtual bool Get_IgnoredInfo_Flag()
Obtain the flag to see if ignored objects should be generated.
Definition: HDF5CF.h:1304
const std::vector< EOS5CVar * > & getCVars() const
Obtain coordinate variables for HDF-EOS5 products.
Definition: HDF5CF.h:1215
virtual void Adjust_EOS5Dim_Info(HE5Parser *strmeta_info)
Adjust HDF-EOS5 dimension information.
Definition: HDFEOS5CF.cc:539
bool HaveUnlimitedDim() const
Has unlimited dimensions.
Definition: HDF5CF.h:691
hid_t getFileID() const
Obtain the HDF5 file ID.
Definition: HDF5CF.h:661
const std::vector< Attribute * > & getAttributes() const
Public interface to obtain information of all attributes under the root group.
Definition: HDF5CF.h:679
const std::vector< Group * > & getGroups() const
Public interface to obtain all the group info.
Definition: HDF5CF.h:685
const std::string & getPath() const
Obtain the path of the file.
Definition: HDF5CF.h:667
const std::vector< Var * > & getVars() const
Public interface to obtain information of all variables.
Definition: HDF5CF.h:673
int getRank() const
Get the dimension rank of this variable.
Definition: HDF5CF.h:305
const std::string & getFullPath() const
Get the full path of this variable.
Definition: HDF5CF.h:283
const std::string & getName() const
Get the original name of this variable.
Definition: HDF5CF.h:271
H5DataType getType() const
Get the data type of this variable(Not HDF5 datatype id)
Definition: HDF5CF.h:311
const std::vector< Dimension * > & getDimensions() const
Get the list of the dimensions.
Definition: HDF5CF.h:322
int getCompRatio() const
Get the compression ratio of this dataset.
Definition: HDF5CF.h:328
const std::string & getNewName() const
Get the new name of this variable.
Definition: HDF5CF.h:277
Helper functions for generating DAS attributes and a function to check BES Key.
yy_buffer_state * he5das_scan_string(const char *str)
Buffer state for NASA EOS metadata scanner.
Map and generate DDS and DAS for the CF option for HDF-EOS5 products.