Source code for openquake.hazardlib.gsim.sgobba_2020
# -*- coding: utf-8 -*-# vim: tabstop=4 shiftwidth=4 softtabstop=4## Copyright (C) 2020, GEM Foundation## OpenQuake is free software: you can redistribute it and/or modify it# under the terms of the GNU Affero General Public License as published# by the Free Software Foundation, either version 3 of the License, or# (at your option) any later version.## OpenQuake is distributed in the hope that it will be useful,# but WITHOUT ANY WARRANTY; without even the implied warranty of# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the# GNU Affero General Public License for more details.## You should have received a copy of the GNU Affero General Public License# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>."""Module :mod:`openquake.hazardlib.gsim.sgobba_2020` implements:class:`~openquake.hazardlib.gsim.sgobba_2020.SgobbaEtAl2020`"""importosimportcopyimportnumpyasnpimportpandasaspdfromscipy.constantsimportgasgravity_accfromscipy.spatialimportcKDTreefromopenquake.hazardlib.geoimportPoint,Polygonfromopenquake.hazardlibimportconstfromopenquake.hazardlib.geo.meshimportMeshfromopenquake.hazardlib.imtimportPGA,SAfromopenquake.hazardlib.gsim.baseimportGMPE,CoeffsTable# From http://www.csgnetwork.com/degreelenllavcalc.htmlLEN_1_DEG_LAT_AT_43pt5=111.10245LEN_1_DEG_LON_AT_43pt5=80.87665DATA_FOLDER=os.path.join(os.path.dirname(__file__),'sgobba_2020')REGIONS={'1':[[13.37,42.13],[13.60,42.24],[13.48,42.51],[13.19,42.36]],'4':[[13.26,42.41],[13.43,42.49],[13.27,43.02],[12.96,42.86]],'5':[[13.03,42.90],[13.21,42.99],[13.10,43.13],[12.90,43.06]]}CONSTS={'Mh':5.0,'Rref':1.0,'PseudoDepth':6.0}def_get_cluster_correction(dat,C,ctx,imt):""" Get cluster correction. The use can specify various options through the cluster parameter. The available options are: - cluster = None In this case the code finds the most appropriate correction using the rupture position - cluster = 0 No cluster correction - cluser = 1 or 4 or 5 The code uses the correction for the given cluster """cluster=dat.clustershape=ctx.sids.shapecorrection=np.zeros_like(shape)# st.dev.tau_L2L=np.zeros(shape)Bp_model=np.zeros(shape)phi_P2P=np.zeros(shape)# No cluster correctionifcluster==0:tau_L2L=C['tau_L2L']phi_P2P=C['phi_P2P']returncorrection,tau_L2L,Bp_model,phi_P2P# the code finds the most appropriate correctionifclusterisNone:mesh=Mesh(np.array([ctx.hypo_lon]),np.array([ctx.hypo_lat]))# midp = ctx.surface.get_middle_point()# mesh = Mesh(np.array([midp.longitude]),np.array([midp.latitude]))forkeyinREGIONS:coo=np.array(REGIONS[key])pnts=[Point(lo,la)forlo,lainzip(coo[:,0],coo[:,1])]poly=Polygon(pnts)within=poly.intersects(mesh)ifwithin.all():cluster=int(key)break# if OUT clusters do not apply correctionsifclusterisNone:tau_L2L=C['tau_L2L']phi_P2P=C['phi_P2P']returncorrection,tau_L2L,Bp_model,phi_P2Pelse:# if IN clusters apply corrections# Cluster coefficientsfname='P_model_cluster{:d}.csv'.format(cluster)fname=os.path.join(DATA_FOLDER,fname)data=np.loadtxt(fname,delimiter=",",skiprows=1)# for st.dev.fname2='beta_dP2P_cluster{:d}.csv'.format(cluster)fname2=os.path.join(DATA_FOLDER,fname2)data2=np.loadtxt(fname2,delimiter=",",skiprows=1)# Compute the coefficientscorrection=np.zeros(shape)per=imt.periodforidxinnp.unique(dat.idxs):tmp=data[int(idx)]correction[dat.idxs==idx]=np.interp(per,dat.PERIODS,tmp[0:5])# Adding L2L correctionlabel="dL2L_cluster{:d}".format(cluster)correction+=C[label]# compute st.dev.foridxinnp.unique(dat.idxs):tmp2=data2[int(idx)]Bp_model[dat.idxs==idx]=np.interp(per,dat.PERIODS,tmp2[0:5])returncorrection,tau_L2L,Bp_model,phi_P2Pdef_get_distance_term(C,mag,ctx):""" Eq.3 - page 3 """term1=C['c1']*(mag-C['mref'])+C['c2']tmp=np.sqrt(ctx.rjb**2+CONSTS['PseudoDepth']**2)term2=np.log10(tmp/CONSTS['Rref'])term3=C['c3']*(tmp-CONSTS['Rref'])returnterm1*term2+term3def_get_magnitude_term(C,mag):""" Eq.2 - page 3 """returnnp.where(mag<=CONSTS['Mh'],C['b1']*(mag-CONSTS['Mh']),C['b2']*(mag-CONSTS['Mh']))def_get_site_correction(data,shape,imt):""" Get site correction """correction=np.zeros_like(shape)# Compute the coefficientscorrection=np.zeros(shape)# stand.dev.Bs_model=np.zeros(shape)phi_S2Sref=np.zeros(shape)per=imt.periodforidxinnp.unique(data.idxs):tmp=data.Smodel[int(idx)]correction[data.idxs==idx]=np.interp(per,data.PERIODS,tmp[0:5])tmp2=data.betaS2S[int(idx)]Bs_model[data.idxs==idx]=np.interp(per,data.PERIODS,tmp2[0:5])returncorrection,Bs_model,phi_S2Sref
# NB: the implementation here is HORRIBLE performance-wise,# because it is using KDTree and pandas!
[docs]classSgobbaEtAl2020(GMPE):""" Implements the GMM proposed by Sgobba et al. (2020). Warning: This GMM is not meant for national models where it would be too slow, it is meant for scenario calculations. :param event_id: A string identifying an event amongst the ones comprised in the list available in the file `event.csv` :param directionality: A boolean :param cluster: If set to 'None', the OQ Engine finds the corresponding cluster using the rupture epicentral location. If cluster=0, no cluster correction applied. Otherwise, if an integer ID is provided, that corresponds to the cluster id (available cluster indexes are 1, 4 and 5), the corresponding correction id applied. """#: Supported tectonic region type is 'active shallow crust'DEFINED_FOR_TECTONIC_REGION_TYPE=const.TRT.ACTIVE_SHALLOW_CRUST#: Set of :mod:`intensity measure types <openquake.hazardlib.imt>`#: this GSIM can calculate. A set should contain classes from module#: :mod:`openquake.hazardlib.imt`.DEFINED_FOR_INTENSITY_MEASURE_TYPES={PGA,SA}#: Supported intensity measure component is the geometric mean of two#: horizontal componentsDEFINED_FOR_INTENSITY_MEASURE_COMPONENT=const.IMC.GEOMETRIC_MEAN#: Supported standard deviation types are inter-event, intra-event#: and totalDEFINED_FOR_STANDARD_DEVIATION_TYPES={const.StdDev.TOTAL,const.StdDev.INTER_EVENT,const.StdDev.INTRA_EVENT}#: Required site parameters are lon and latREQUIRES_SITES_PARAMETERS={'lon','lat'}#: Required rupture parameters is magnitude, hypo_lat, hypo_lonREQUIRES_RUPTURE_PARAMETERS={'mag','hypo_lon','hypo_lat'}#: Required distance measure is RjbREQUIRES_DISTANCES={'rjb'}PERIODS=np.array([0,0.2,0.50251256281407,1.0,2.0])def__init__(self,event_id=None,directionality=False,cluster=None,site=False,bedrock=False):self.event_id=event_idself.directionality=directionalityself.cluster=clusterself.site=siteself.bedrock=bedrock# Get site indexes. They are used for the site correction and the# cluster (path) correction# Load the coordinate of the gridfname=os.path.join(DATA_FOLDER,'grid.csv')coo=np.fliplr(np.loadtxt(fname,delimiter=";"))self.kdt=cKDTree(coo)# Load the table with the between event coefficientsifevent_idisnotNone:self.event_id=event_idfname=os.path.join(DATA_FOLDER,'event.csv')# Create dataframe with eventsdf=pd.read_csv(fname,dtype={'id':str})df.set_index('id',inplace=True)self.df=dfassertevent_idindf.index.values# Site coefficientsfname=os.path.join(DATA_FOLDER,"S_model.csv")self.Smodel=np.loadtxt(fname,delimiter=",",skiprows=1)fname=os.path.join(DATA_FOLDER,"beta_dS2S.csv")self.betaS2S=np.loadtxt(fname,delimiter=",",skiprows=1)
[docs]defcompute(self,ctx:np.recarray,imts,mean,sig,tau,phi):""" Eq.1 - page 2 """form,imtinenumerate(imts):# Ergodic coeffsC=self.COEFFS[imt]# between-eventifself.event_idisnotNone:label="dBe_{:s}".format(str(imt))self.be=self.df.loc[self.event_id][label]self.be_std=0.0else:self.be_std=C['tau_ev']self.be=0.0# Site correctionpoints=np.array([ctx.lon,ctx.lat]).T# shape (N, 2)dsts,idxs=self.kdt.query(points)dat=Data(self.Smodel,self.cluster,self.PERIODS,self.betaS2S,idxs)sc=0phi_S2Sref=C['phi_S2S_ref']Bs_model=np.zeros(ctx.sids.shape)ifself.siteandself.bedrockisFalse:sc,Bs_model,phi_S2Sref=_get_site_correction(dat,ctx.sids.shape,imt)cc,tau_L2L,Bp_model,phi_P2P=_get_cluster_correction(dat,C,ctx,imt)# Get meanmean[m]=(C['a']+_get_magnitude_term(C,ctx.mag)+_get_distance_term(C,ctx.mag,ctx)+sc+cc+self.be)# To natural logarithm and fraction of gmean[m]=np.log(10.0**mean[m]/(gravity_acc*100))# Get stdsstd=np.sqrt(C['sigma_0']**2+self.be_std**2+tau_L2L**2+Bs_model+phi_S2Sref**2+Bp_model+phi_P2P**2)sig[m]=np.log(10.0**std)tau[m]=np.log(10.0**self.be_std)phi[m]=np.log(10.0**np.sqrt((std**2-self.be_std**2)))