## R-script for testing the Hasselmann model with estimates of observed forcing and the global mean temperature ## from CMIP5 GCMs and NCEP/NCAR reanalysis. ## Run this script in R-studio (https://www.rstudio.com/). ## The Hasselmann model is expressed in the following mathematical expression: ## C d/dt DT = Q - L DT ## We can evaluate how well it fits real data through the regression equation: ## y = a x1 + b x2 + const ## where 'a' is C, 'x1' is d/dt DT, 'b' is L, and 'x2' is DT. ## Rasmus Benestad, 2018-01-20 ## The R-package 'esd' is available from GitHub.com/metno/esd - instructions of how to install and ## documentation is available from the wiki-page of the GitHub repository. library(esd) marker <- function(x,y,dx,dy,col=rgb(0.7,0,0,0.05),border=rgb(0.7,0,0,0.3)) { wt <- seq(0,2*pi,length=360) polygon(x + 2*dx*cos(wt),y + 2*dy*sin(wt),col=col,border=border) } Crowley2000 <- read.table('ftp://ftp.ncdc.noaa.gov/pub/data/paleo/gcmoutput/crowley2000/forc-total-4_12_01.txt', skip=50,header=TRUE) Q <- zoo(x=rowSums(Crowley2000[,2:5]), order.by= Crowley2000[,1]) data("global.t2m.cmip5") ## Make sure all data are anomalies with a common baseline: 1961-1990 Q <- anomaly(Q,ref=1961:1990) global.t2m.cmip5 <- anomaly(global.t2m.cmip5,ref=1961:1990) ## Solve for C and L through regression for n model runs: n <- dim(global.t2m.cmip5)[2] C <- rep(NA,n); L <- C; dC <- C; dL <- L par(bty='n') plot(window(Q,start=1900,end=1999),lwd=3,main='Testing the Hasselmann model',ylab='Q (W/m**2)',xlab='', sub='Crowley 2000 Total Forcing updated April 2001') grid() for (i in 1:n) { y <- matchdate(Q,it=global.t2m.cmip5) x2 <- matchdate(global.t2m.cmip5[,i],it=Q) x1 <- c(diff(x2),NA) calibrate <- data.frame(y = coredata(y), x1 = coredata(x1), x2 = coredata(x2)) fit <- lm(y ~ x1 + x2, data=calibrate) sumfit <- summary(fit) C[i] <- sumfit$coefficients[2] L[i] <- sumfit$coefficients[3] dC[i] <- sumfit$coefficients[5] dL[i] <- sumfit$coefficients[6] print(c(C[i],L[i],dC[i],dL[i])) lines(zoo(predict(fit),order.by=index(y)),col=rgb(0.7,0,0,0.3),lwd=2) } ## Also get the NCEP/NCAR reanalysis 1: print('Get NCEP/NCAR reanalysis') if (!file.exists('air.mon.mean.nc')) download.file('https://www.esrl.noaa.gov/psd/repository/entry/get/air.mon.mean.nc?entryid=synth%3Ae570c8f9-ec09-4e89-93b4-babd5651e7a9%3AL25jZXAucmVhbmFseXNpcy5kZXJpdmVkL3N1cmZhY2UvYWlyLm1vbi5tZWFuLm5j',dest='air.mon.mean.nc') ncepncar <- anomaly(aggregate.area(annual(retrieve('air.mon.mean.nc')),FUN='mean'),ref=1961:1990) y <- matchdate(Q,it=ncepncar) x2 <- matchdate(ncepncar,it=Q) x1 <- c(diff(x2),NA) calibrate <- data.frame(y = coredata(y), x1 = coredata(x1), x2 = coredata(x2)) fit <- lm(y ~ x1 + x2, data=calibrate) sumfit <- summary(fit) lines(zoo(predict(fit),order.by=index(y)),col=rgb(0,0,0.7,0.5),lwd=3) lines(Q,lwd=3) text(1930,-3,expression(Q == C*frac(d*Delta*T,d*t)+ lambda*Delta*T),cex=1.5,col='grey30') legend(1970,-3,c('Q from GCMs','Q from reanalysis','Crowley (2000)'), col=c(rgb(0.7,0,0,0.3),rgb(0,0,0.7,0.5),'black'),lty=1,lwd=c(2,3,3),bty='n') ## Compare the estimates for C and L in scatter plots with error estimates: C <- c(C,sumfit$coefficients[2]); L <- c(L,sumfit$coefficients[3]) dC <- c(dC,sumfit$coefficients[5]); dL <- c(dL,sumfit$coefficients[6]) par(bty='n') plot(C,L,main='Regression coefficients',xlab='C',ylab=expression(lambda), xlim=range(C) +2*max(dC)*c(-1,1),ylim=range(L) +2*max(dL)*c(-1,1)) grid() for (i in 1:n) marker(C[i],L[i],dC[i],dL[i]) marker(C[n+1],L[n+1],dC[n+1],dL[n+1],col=rgb(0,0,0.7,0.05),border=rgb(0,0,0.7,0.3)) points(C,L,pch=19,cex=1.5,col=c(rep("darkred",n),"darkblue")) ## Estimate the climate sensitivities from C & L: print(c(round(quantile(L,probs=c(0.05,0.5,0.95)),mean(L)),2)) nf <- dim(Crowley2000)[1] print(paste('Estimated climate sensitivity based on lambda and a doubling of',Crowley2000[nf,1],'CO2-forcing of', Crowley2000[nf,4],'W/m**2 gives', 'mean for GCMs =', mean(round(Crowley2000[nf,4]/L[1:n]),2), 'with a 90% confidence interval:', round(quantile(Crowley2000[nf,4]/L[1:n],probs=0.05),2),'-', round(quantile(Crowley2000[nf,4]/L[1:n],probs=0.95),2), 'estimate from reanalysis =',round(Crowley2000[nf,4]/L[n+1],2))) ## Check the effect of varying missing-date mask on the global mean by applying the same mask to a GCM ## with no missing ## Download data files if (!file.exists('hadcrut4.nc')) download.file('https://crudata.uea.ac.uk/cru/data/temperature/HadCRUT.4.6.0.0.median.nc',dest='hadcrut4.nc') ## Read the data obs <- retrieve('hadcrut4.nc',param='temperature_anomaly') ## Estimate the fraction of the area that has valid data: coredata(obs) -> z; z[is.finite(z)] <- 1; coredata(obs) <- z dataarea <- aggregate.area(obs,FUN='sum') z[] <- 1; coredata(obs) <- z allarea <- aggregate.area(obs,FUN='sum') fraction <- 100*dataarea/allarea plot(fraction,main='Percentage of area with data in HadCRUT4',ylab='%',xlab='year',lwd=3) grid()