Source code for openquake.calculators.export.hazard
# -*- coding: utf-8 -*-# vim: tabstop=4 shiftwidth=4 softtabstop=4## Copyright (C) 2014-2025 GEM Foundation## OpenQuake is free software: you can redistribute it and/or modify it# under the terms of the GNU Affero General Public License as published# by the Free Software Foundation, either version 3 of the License, or# (at your option) any later version.## OpenQuake is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the# GNU Affero General Public License for more details.## You should have received a copy of the GNU Affero General Public License# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.importreimportosimportsysimportjsonimportitertoolsimportcollectionsimportnumpyimportpandasfromopenquake.baselib.generalimportDictArray,AccumDictfromopenquake.baselibimporthdf5,writersfromopenquake.baselib.python3compatimportdecodefromopenquake.calculators.viewsimportview,text_tablefromopenquake.calculators.extractimportextract,get_sites,get_infofromopenquake.calculators.exportimportexportfromopenquake.commonlibimportcalc,utilF32=numpy.float32F64=numpy.float64U8=numpy.uint8U16=numpy.uint16U32=numpy.uint32# with compression you can save 60% of space by losing only 10% of saving timesavez=numpy.savez_compressed
[docs]defadd_quotes(values):# used to source names in CSV filesreturn['"%s"'%valforvalinvalues]
[docs]@export.add(('ruptures','csv'))defexport_ruptures_csv(ekey,dstore):""" :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """oq=dstore['oqparam']if'scenario'inoq.calculation_mode:return[]dest=dstore.export_path('ruptures.csv')arr=extract(dstore,'rupture_info')ifexport.sanity_check:bad=view('bad_ruptures',dstore)iflen(bad):# nonemptyprint(text_table(bad),file=sys.stderr)comment=dstore.metadatacomment.update(investigation_time=oq.investigation_time,ses_per_logic_tree_path=oq.ses_per_logic_tree_path)arr.array.sort(order='rup_id')writers.write_csv(dest,arr,comment=comment)return[dest]
[docs]defexport_hmaps_csv(key,dest,sitemesh,array,comment):""" Export the hazard maps of the given realization into CSV. :param key: output_type and export_type :param dest: name of the exported file :param sitemesh: site collection :param array: a composite array of dtype hmap_dt :param comment: comment to use as header of the exported CSV file """curves=util.compose_arrays(sitemesh,array)writers.write_csv(dest,curves,comment=comment)return[dest]
[docs]defhazard_curve_name(dstore,ekey,kind):""" :param calc_id: the calculation ID :param ekey: the export key :param kind: the kind of key """key,fmt=ekeyprefix={'hcurves':'hazard_curve','hmaps':'hazard_map','uhs':'hazard_uhs'}[key]ifkind.startswith('quantile-'):# strip the 7 characters 'hazard_'fname=dstore.build_fname('quantile_'+prefix[7:],kind[9:],fmt)else:fname=dstore.build_fname(prefix,kind,fmt)returnfname
[docs]defget_kkf(ekey):""" :param ekey: export key, for instance ('uhs/rlz-1', 'xml') :returns: key, kind and fmt from the export key, i.e. 'uhs', 'rlz-1', 'xml' """key,fmt=ekeyif'/'inkey:key,kind=key.split('/',1)else:kind=''returnkey,kind,fmt
[docs]defexport_aelo_csv(key,dstore):""" Export hcurves and uhs in an Excel-friendly format """# in AELO mode there is a single site and a single statistics, the meanassertkeyin('hcurves','uhs')oq=dstore['oqparam']sitecol=dstore['sitecol']lon,lat=sitecol.lons[0],sitecol.lats[0]fname=hazard_curve_name(dstore,(key,'csv'),'mean')comment=dstore.metadatacomment.update(lon=lon,lat=lat,kind='mean',investigation_time=oq.investigation_time)ifkey=='hcurves':arr=dstore['hcurves-stats'][0,0]# shape (M, L1)M,L1=arr.shapearray=numpy.zeros(M*L1,[('imt',hdf5.vstr),('iml',float),('poe',float)])form,imtinenumerate(oq.imtls):forli,imlinenumerate(oq.imtls[imt]):row=array[m*L1+li]row['imt']=imtrow['iml']=imlrow['poe']=arr[m,li]writers.write_csv(fname,array,comment=comment)elifkey=='uhs':arr=dstore['hmaps-stats'][0,0]# shape (M, P)periods=[imt.periodforimtinoq.imt_periods()]poes=[('poe-%s'%poe,float)forpoeinoq.poes]array=numpy.zeros(len(periods),[('period',float)]+poes)form,periodinenumerate(periods):row=array[m]row['period']=periodforp,poeinenumerate(oq.poes):row['poe-%s'%poe]=arr[m,p]writers.write_csv(fname,array,comment=comment)return[fname]
[docs]defget_all_imtls(dstore):""" :returns: a DictArray imt->imls if the datastore contains 'all_imtls' """try:grp=dstore['all_imtls']exceptKeyError:return{}returnDictArray({imt:grp[imt][:]forimtingrp.attrs['imts'].split()})
[docs]defexport_hcurves_by_imt_csv(key,kind,dstore,fname,sitecol,imtls,comment):""" Export the curves of the given realization into CSV. :param key: output_type and export_type :param kind: a string with the kind of output (realization or statistics) :param dstore: a DataStore instance :param fname: name of the exported file :param sitecol: site collection :param imtls: intensity measure type and levels :param comment: comment dictionary """nsites=len(sitecol)forimt,imlsinimtls.items():dest=add_imt(fname,imt)lst=[('lon',F32),('lat',F32),('depth',F32)]forimlinimls:lst.append(('poe-%.7f'%iml,F32))custom='custom_site_id'insitecol.array.dtype.namesifcustom:lst.insert(0,('custom_site_id','S8'))array=extract(dstore,'hcurves?kind=%s&imt=%s'%(kind,imt))[kind]hcurves=numpy.zeros(nsites,lst)ifcustom:forsid,csi,lon,lat,depinzip(range(nsites),sitecol.custom_site_id,sitecol.lons,sitecol.lats,sitecol.depths):hcurves[sid]=(csi,lon,lat,dep)+tuple(array[sid,0,:])else:hcurves=numpy.zeros(nsites,lst)forsid,lon,lat,depinzip(range(nsites),sitecol.lons,sitecol.lats,sitecol.depths):hcurves[sid]=(lon,lat,dep)+tuple(array[sid,0,:])comment.update(imt=imt)writers.write_csv(dest,hcurves,comment=comment,header=[namefor(name,dt)inlst])yielddest
[docs]@export.add(('hcurves','csv'),('hmaps','csv'),('uhs','csv'))defexport_hcurves_csv(ekey,dstore):""" Exports the hazard curves into several .csv files :param ekey: export key, i.e. a pair (datastore key, fmt) :param dstore: datastore object """ifos.environ.get('OQ_APPLICATION_MODE')=='AELO':returnexport_aelo_csv(ekey[0],dstore)oq=dstore['oqparam']info=get_info(dstore)R=dstore['full_lt'].get_num_paths()sitecol=dstore['sitecol']sitemesh=get_sites(sitecol)key,kind,fmt=get_kkf(ekey)fnames=[]comment=dstore.metadatahmap_dt=oq.hmap_dt()forkindinoq.get_kinds(kind,R):fname=hazard_curve_name(dstore,(key,fmt),kind)comment.update(kind=kind,investigation_time=oq.investigation_time)if(keyin('hmaps','uhs')andoq.uniform_hazard_spectraoroq.hazard_maps):hmap=extract(dstore,'hmaps?kind='+kind)[kind]ifkey=='uhs'andoq.poesandoq.uniform_hazard_spectra:uhs_curves=calc.make_uhs(hmap,info)writers.write_csv(fname,util.compose_arrays(sitemesh,uhs_curves),comment=comment)fnames.append(fname)elifkey=='hmaps'andoq.poesandoq.hazard_maps:fnames.extend(export_hmaps_csv(ekey,fname,sitemesh,hmap.flatten().view(hmap_dt),comment))elifkey=='hcurves':# shape (N, R|S, M, L1)if('amplification'inoq.inputsandoq.amplification_method=='convolution'):imtls=DictArray({imt:oq.soil_intensitiesforimtinoq.imtls})else:imtls=get_all_imtls(dstore)oroq.imtlsfnames.extend(export_hcurves_by_imt_csv(ekey,kind,dstore,fname,sitecol,imtls,comment))returnsorted(fnames)
UHS=collections.namedtuple('UHS','imls location')
[docs]defget_metadata(rlzs,kind):""" :param rlzs: realization array with field 'branch_path' :param str kind: kind of data, i.e. a key in the datastore :returns: a dictionary with smlt_path, gsimlt_path, statistics, quantile_value """metadata={}ifkind.startswith('rlz-'):smlt_path,gslt_path=rlzs[int(kind[4:])]['branch_path'].split('~')metadata['smlt_path']=smlt_pathmetadata['gsimlt_path']=gslt_pathelifkind.startswith('quantile-'):metadata['statistics']='quantile'metadata['quantile_value']=float(kind[9:])elifkind=='mean':metadata['statistics']='mean'elifkind=='max':metadata['statistics']='max'elifkind=='std':metadata['statistics']='std'returnmetadata
[docs]@export.add(('cs-stats','csv'))defexport_cond_spectra(ekey,dstore):sitecol=dstore['sitecol']aw=dstore[ekey[0]]# shape (N, P, K, M, 2)dframe=aw.to_dframe()writer=writers.CsvWriter(fmt=writers.FIVEDIGITS)fnames=[]forninsitecol.sids:df=dframe[dframe.site_id==n]deldf['site_id']fname=dstore.export_path('conditional-spectrum-%d.csv'%n)comment=dstore.metadata.copy()comment['site_id']=ncomment['lon']=sitecol.lons[n]comment['lat']=sitecol.lats[n]writer.save(df,fname,comment=comment)fnames.append(fname)returnfnames
[docs]@export.add(('median_spectrum_disagg','csv'))defexport_median_spectrum_disagg(ekey,dstore):oq=dstore['oqparam']sitecol=dstore['sitecol']writer=writers.CsvWriter(fmt=writers.FIVEDIGITS)fnames=[]totw=AccumDict(accum=0)forgrp_id,dsetindstore['median_spectrum_disagg'].items():array=dset[:]dtlist=[tup[:2]fortupinarray.dtype.descr]form,imtinenumerate(oq.imtls):arr=numpy.empty(len(array),dtlist)forcolinarr.dtype.names:ifcol.startswith(('mea','sig','wei')):arr[col]=array[col][:,m]else:arr[col]=array[col]ifcol.startswith('wei'):totw[imt]+=arr[col].sum()comment=dstore.metadata.copy()comment['site_id']=0comment['lon']=sitecol.lons[0]comment['lat']=sitecol.lats[0]fname=dstore.export_path(f'median_spectrum_disagg-{grp_id}-{imt}.csv')arr.sort(order='rup_id')writer.save(arr,fname,comment=comment)fnames.append(fname)# sanity check on the weightsforimtintotw:print('tot weight for',imt,totw[imt])# assert abs(totw[imt] - 1) < .01, (imt, totw[imt])returnfnames
# TODO: see if I can remove thisdef_extract(hmap,imt,j):# hmap[imt] can be a tuple or a scalar if j=0tup=hmap[imt]ifhasattr(tup,'__iter__'):returntup[j]assertj==0returntup
[docs]@export.add(('avg_gmf','csv'))defexport_avg_gmf_csv(ekey,dstore):oq=dstore['oqparam']ifdstore.parent:sitecol=dstore.parent['sitecol']if'complete'indstore.parent:sitecol.complete=dstore.parent['complete']else:sitecol=dstore['sitecol']if'complete'indstore:sitecol.complete=dstore['complete']if'custom_site_id'insitecol.array.dtype.names:dic=dict(custom_site_id=decode(sitecol.complete.custom_site_id))else:dic=dict(site_id=sitecol.complete.sids)dic['lon']=sitecol.complete.lonsdic['lat']=sitecol.complete.latsdata=dstore['avg_gmf'][:]# shape (2, N, C)imts=list(oq.imtls)form,imtinenumerate(oq.all_imts()):ifm<len(imts):imt=imts[m]dic['gmv_'+imt]=data[0,:,m]dic['gsd_'+imt]=data[1,:,m]fname=dstore.build_fname('avg_gmf','','csv')writers.CsvWriter(fmt=writers.FIVEDIGITS).save(pandas.DataFrame(dic),fname,comment=dstore.metadata)return[fname]
def_expand_gmv(array,imts):# the array-field gmv becomes a set of scalar fields gmv_<imt>dtype=array.dtypeassertdtype['gmv'].shape[0]==len(imts)dtlist=[]fornameindtype.names:dt=dtype[name]ifname=='gmv':forimtinimts:dtlist.append(('gmv_'+imt,F32))elifnamein('sid','eid'):dtlist.append((name,dt))else:# secondary perilsdtlist.append((name,dt))new=numpy.zeros(len(array),dtlist)imti={imt:ifori,imtinenumerate(imts)}forname,_dtindtlist:ifname.startswith('gmv_'):new[name]=array['gmv'][:,imti[name[4:]]]else:new[name]=array[name]returnnewDisaggMatrix=collections.namedtuple('DisaggMatrix','poe iml dim_labels matrix')
def_add_iml(df,imtls):# add field iml and remove field lvl in a dataframe with fields imt, lvlout=[]forimtinimtls:imls=imtls[imt]dframe=df[df.imt==imt]dframe['iml']=imls[dframe.lvl]deldframe['lvl']out.append(dframe)returnpandas.concat(out)
[docs]@export.add(('disagg-rlzs','csv'),('disagg-stats','csv'),('disagg-rlzs-traditional','csv'))defexport_disagg_csv(ekey,dstore):name,_ext=ekeyspec=name[7:]# rlzs, stats, rlzs-traditional, stats-traditionaloq=dstore['oqparam']sitecol=dstore['sitecol']ws=dstore['weights'][:]best_rlzs=dstore['best_rlzs'][:]N=len(best_rlzs)P=len(oq.poes)or1fnames=[]bins={name:dset[:]forname,dsetindstore['disagg-bins'].items()}ex='disagg?kind=%s&site_id=%s&spec=%s'trad='-traditional'if'traditional'innameelse''skip_keys=('Mag','Dist','Lon','Lat','Eps','TRT')metadata=dstore.metadatapoes_disagg=['nan']*Pforpinrange(P):try:poes_disagg[p]=str(oq.poes_disagg[p])exceptIndexError:passwriter=writers.CsvWriter(fmt='%.5E')forsinrange(N):lon,lat=sitecol.lons[s],sitecol.lats[s]md=dict(investigation_time=oq.investigation_time,mag_bin_edges=bins['Mag'].tolist(),dist_bin_edges=bins['Dist'].tolist(),lon_bin_edges=bins['Lon'][s].tolist(),lat_bin_edges=bins['Lat'][s].tolist(),eps_bin_edges=bins['Eps'].tolist(),tectonic_region_types=decode(bins['TRT'].tolist()),lon=lon,lat=lat)ifspec.startswith('rlzs')oroq.iml_disagg:weights=ws[best_rlzs[s]]weights/=weights.sum()# normalize to 1md['weights']=weights.tolist()md['rlz_ids']=best_rlzs[s].tolist()iml2=dstore['hmap3'][s]# shape (M, P)metadata.update(md)forkinoq.disagg_outputs:aw=extract(dstore,ex%(k,s,spec))ifaw.array.sum()==0:continuedf=aw.to_dframe(skip_zeros=False)# move the columns imt and poe at the beginning for backward compatcols=[colforcolindf.columnsifcolnotin('imt','poe')]# add the IMLs corresponding to the mean hazard mapscols=['imt','iml','poe']+colsimt2idx={imt:mform,imtinenumerate(oq.imtls)}poe2idx={poe:pforp,poeinenumerate(df.poe.unique())}imt_idx=[imt2idx[imt]forimtindf.imt]poe_idx=[poe2idx[poe]forpoeindf.poe]df['iml']=iml2[imt_idx,poe_idx]df=pandas.DataFrame({col:df[col]forcolincols}).sort_values(['imt','poe'])iflen(df):com={key:valueforkey,valueinmetadata.items()ifvalueisnotNoneandkeynotinskip_keys}com.update(metadata)stat='-mean'ifname=='disagg-stats'else''fname=dstore.export_path('%s%s%s-%d.csv'%(k,stat,trad,s))writer.save(df,fname,comment=com)fnames.append(fname)else:print('Empty file %s not saved',fname)returnsorted(fnames)
[docs]@export.add(('event_based_mfd','csv'))defexport_event_based_mfd(ekey,dstore):ifdstore['oqparam'].investigation_timeisNone:# there is no MFD in scenario calculationreturn[]aw=extract(dstore,'event_based_mfd?')path=dstore.export_path('event_based_mfd.csv')magfreq=numpy.zeros(len(aw.mag),[('mag',float),('freq',float)])magfreq['mag']=numpy.round(aw.mag,1)magfreq['freq']=aw.freqwriters.write_csv(path,magfreq,fmt='%.7e',comment=dstore.metadata)return[path]
# because of the code in server.views.calc_results we are not visualizing# .txt outputs, so we use .rst here
[docs]@export.add(('asce07','csv'),('asce41','csv'))defexport_asce(ekey,dstore):sitecol=dstore['sitecol']fors,siteinenumerate(sitecol):js=dstore[ekey[0]][s].decode('utf8')dic=json.loads(js)writer=writers.CsvWriter(fmt='%.5f')fname=dstore.export_path(ekey[0]+'-'+str(s)+'.csv')comment=dstore.metadata.copy()comment['lon']=sitecol.lons[s]comment['lat']=sitecol.lats[s]comment['vs30']=sitecol.vs30[s]comment['site_name']=dstore['oqparam'].description# e.g. 'CCA example'writer.save(dic.items(),fname,header=['parameter','value'],comment=comment)return[fname]
# NB: exporting only the site #0; this is okay
[docs]@export.add(('mag_dst_eps_sig','csv'))defexport_mag_dst_eps_sig(ekey,dstore):data=dstore[ekey[0]+'/0'][:]sitecol=dstore['sitecol']writer=writers.CsvWriter(fmt='%.5f')fname=dstore.export_path('%s.csv'%ekey[0])comment=dstore.metadata.copy()comment['lon']=sitecol.lons[0]comment['lat']=sitecol.lats[0]comment['vs30']=sitecol.vs30[0]comment['site_name']=dstore['oqparam'].description# e.g. 'CCA example'writer.save(data,fname,comment=comment)return[fname]
[docs]@export.add(('trt_gsim','csv'))defexport_trt_gsim(ekey,dstore):""" Export a CSV with fields (grp_id, trt, gsim) """rows=[]gsims=dstore['gsims'][:]data=dstore['source_groups'][:][['grp_id','trt','gsims']]data.sort(order='grp_id')g=0forgrp_id,trt,Gindata:forgsimingsims[g:g+G]:rows.append((grp_id,trt,gsim.replace(b'\n',b'\\n')))g+=Gfname=dstore.export_path('%s.csv'%ekey[0])writer=writers.CsvWriter()writer.save(rows,fname,['grp_id','trt','gsim'],comment=dstore.metadata)return[fname]