\documentclass[12pt,titlepage]{article}
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{amssymb}
\usepackage{amsthm}
\usepackage{mathtools}
\usepackage{graphicx}
\usepackage{color}
\usepackage{ucs}
\usepackage[utf8x]{inputenc}
\usepackage{xparse}
\usepackage{hyperref}
%----Macros----------
%
% Unresolved issues:
%
% \righttoleftarrow
% \lefttorightarrow
%
% \color{} with HTML colorspec
% \bgcolor
% \array with options (without options, it's equivalent to the matrix environment)
% Of the standard HTML named colors, white, black, red, green, blue and yellow
% are predefined in the color package. Here are the rest.
\definecolor{aqua}{rgb}{0, 1.0, 1.0}
\definecolor{fuschia}{rgb}{1.0, 0, 1.0}
\definecolor{gray}{rgb}{0.502, 0.502, 0.502}
\definecolor{lime}{rgb}{0, 1.0, 0}
\definecolor{maroon}{rgb}{0.502, 0, 0}
\definecolor{navy}{rgb}{0, 0, 0.502}
\definecolor{olive}{rgb}{0.502, 0.502, 0}
\definecolor{purple}{rgb}{0.502, 0, 0.502}
\definecolor{silver}{rgb}{0.753, 0.753, 0.753}
\definecolor{teal}{rgb}{0, 0.502, 0.502}
% Because of conflicts, \space and \mathop are converted to
% \itexspace and \operatorname during preprocessing.
% itex: \space{ht}{dp}{wd}
%
% Height and baseline depth measurements are in units of tenths of an ex while
% the width is measured in tenths of an em.
\makeatletter
\newdimen\itex@wd%
\newdimen\itex@dp%
\newdimen\itex@thd%
\def\itexspace#1#2#3{\itex@wd=#3em%
\itex@wd=0.1\itex@wd%
\itex@dp=#2ex%
\itex@dp=0.1\itex@dp%
\itex@thd=#1ex%
\itex@thd=0.1\itex@thd%
\advance\itex@thd\the\itex@dp%
\makebox[\the\itex@wd]{\rule[-\the\itex@dp]{0cm}{\the\itex@thd}}}
\makeatother
% \tensor and \multiscript
\makeatletter
\newif\if@sup
\newtoks\@sups
\def\append@sup#1{\edef\act{\noexpand\@sups={\the\@sups #1}}\act}%
\def\reset@sup{\@supfalse\@sups={}}%
\def\mk@scripts#1#2{\if #2/ \if@sup ^{\the\@sups}\fi \else%
\ifx #1_ \if@sup ^{\the\@sups}\reset@sup \fi {}_{#2}%
\else \append@sup#2 \@suptrue \fi%
\expandafter\mk@scripts\fi}
\def\tensor#1#2{\reset@sup#1\mk@scripts#2_/}
\def\multiscripts#1#2#3{\reset@sup{}\mk@scripts#1_/#2%
\reset@sup\mk@scripts#3_/}
\makeatother
% \slash
\makeatletter
\newbox\slashbox \setbox\slashbox=\hbox{$/$}
\def\itex@pslash#1{\setbox\@tempboxa=\hbox{$#1$}
\@tempdima=0.5\wd\slashbox \advance\@tempdima 0.5\wd\@tempboxa
\copy\slashbox \kern-\@tempdima \box\@tempboxa}
\def\slash{\protect\itex@pslash}
\makeatother
% math-mode versions of \rlap, etc
% from Alexander Perlis, "A complement to \smash, \llap, and lap"
% http://math.arizona.edu/~aprl/publications/mathclap/
\def\clap#1{\hbox to 0pt{\hss#1\hss}}
\def\mathllap{\mathpalette\mathllapinternal}
\def\mathrlap{\mathpalette\mathrlapinternal}
\def\mathclap{\mathpalette\mathclapinternal}
\def\mathllapinternal#1#2{\llap{$\mathsurround=0pt#1{#2}$}}
\def\mathrlapinternal#1#2{\rlap{$\mathsurround=0pt#1{#2}$}}
\def\mathclapinternal#1#2{\clap{$\mathsurround=0pt#1{#2}$}}
% Renames \sqrt as \oldsqrt and redefine root to result in \sqrt[#1]{#2}
\let\oldroot\root
\def\root#1#2{\oldroot #1 \of{#2}}
\renewcommand{\sqrt}[2][]{\oldroot #1 \of{#2}}
% Manually declare the txfonts symbolsC font
\DeclareSymbolFont{symbolsC}{U}{txsyc}{m}{n}
\SetSymbolFont{symbolsC}{bold}{U}{txsyc}{bx}{n}
\DeclareFontSubstitution{U}{txsyc}{m}{n}
% Manually declare the stmaryrd font
\DeclareSymbolFont{stmry}{U}{stmry}{m}{n}
\SetSymbolFont{stmry}{bold}{U}{stmry}{b}{n}
% Manually declare the MnSymbolE font
\DeclareFontFamily{OMX}{MnSymbolE}{}
\DeclareSymbolFont{mnomx}{OMX}{MnSymbolE}{m}{n}
\SetSymbolFont{mnomx}{bold}{OMX}{MnSymbolE}{b}{n}
\DeclareFontShape{OMX}{MnSymbolE}{m}{n}{
<-6> MnSymbolE5
<6-7> MnSymbolE6
<7-8> MnSymbolE7
<8-9> MnSymbolE8
<9-10> MnSymbolE9
<10-12> MnSymbolE10
<12-> MnSymbolE12}{}
% Declare specific arrows from txfonts without loading the full package
\makeatletter
\def\re@DeclareMathSymbol#1#2#3#4{%
\let#1=\undefined
\DeclareMathSymbol{#1}{#2}{#3}{#4}}
\re@DeclareMathSymbol{\neArrow}{\mathrel}{symbolsC}{116}
\re@DeclareMathSymbol{\neArr}{\mathrel}{symbolsC}{116}
\re@DeclareMathSymbol{\seArrow}{\mathrel}{symbolsC}{117}
\re@DeclareMathSymbol{\seArr}{\mathrel}{symbolsC}{117}
\re@DeclareMathSymbol{\nwArrow}{\mathrel}{symbolsC}{118}
\re@DeclareMathSymbol{\nwArr}{\mathrel}{symbolsC}{118}
\re@DeclareMathSymbol{\swArrow}{\mathrel}{symbolsC}{119}
\re@DeclareMathSymbol{\swArr}{\mathrel}{symbolsC}{119}
\re@DeclareMathSymbol{\nequiv}{\mathrel}{symbolsC}{46}
\re@DeclareMathSymbol{\Perp}{\mathrel}{symbolsC}{121}
\re@DeclareMathSymbol{\Vbar}{\mathrel}{symbolsC}{121}
\re@DeclareMathSymbol{\sslash}{\mathrel}{stmry}{12}
\re@DeclareMathSymbol{\bigsqcap}{\mathop}{stmry}{"64}
\re@DeclareMathSymbol{\biginterleave}{\mathop}{stmry}{"6}
\re@DeclareMathSymbol{\invamp}{\mathrel}{symbolsC}{77}
\re@DeclareMathSymbol{\parr}{\mathrel}{symbolsC}{77}
\makeatother
% \llangle, \rrangle, \lmoustache and \rmoustache from MnSymbolE
\makeatletter
\def\Decl@Mn@Delim#1#2#3#4{%
\if\relax\noexpand#1%
\let#1\undefined
\fi
\DeclareMathDelimiter{#1}{#2}{#3}{#4}{#3}{#4}}
\def\Decl@Mn@Open#1#2#3{\Decl@Mn@Delim{#1}{\mathopen}{#2}{#3}}
\def\Decl@Mn@Close#1#2#3{\Decl@Mn@Delim{#1}{\mathclose}{#2}{#3}}
\Decl@Mn@Open{\llangle}{mnomx}{'164}
\Decl@Mn@Close{\rrangle}{mnomx}{'171}
\Decl@Mn@Open{\lmoustache}{mnomx}{'245}
\Decl@Mn@Close{\rmoustache}{mnomx}{'244}
\makeatother
% Widecheck
\makeatletter
\DeclareRobustCommand\widecheck[1]{{\mathpalette\@widecheck{#1}}}
\def\@widecheck#1#2{%
\setbox\z@\hbox{\m@th$#1#2$}%
\setbox\tw@\hbox{\m@th$#1%
\widehat{%
\vrule\@width\z@\@height\ht\z@
\vrule\@height\z@\@width\wd\z@}$}%
\dp\tw@-\ht\z@
\@tempdima\ht\z@ \advance\@tempdima2\ht\tw@ \divide\@tempdima\thr@@
\setbox\tw@\hbox{%
\raise\@tempdima\hbox{\scalebox{1}[-1]{\lower\@tempdima\box
\tw@}}}%
{\ooalign{\box\tw@ \cr \box\z@}}}
\makeatother
% \mathraisebox{voffset}[height][depth]{something}
\makeatletter
\NewDocumentCommand\mathraisebox{moom}{%
\IfNoValueTF{#2}{\def\@temp##1##2{\raisebox{#1}{$\m@th##1##2$}}}{%
\IfNoValueTF{#3}{\def\@temp##1##2{\raisebox{#1}[#2]{$\m@th##1##2$}}%
}{\def\@temp##1##2{\raisebox{#1}[#2][#3]{$\m@th##1##2$}}}}%
\mathpalette\@temp{#4}}
\makeatletter
% udots (taken from yhmath)
\makeatletter
\def\udots{\mathinner{\mkern2mu\raise\p@\hbox{.}
\mkern2mu\raise4\p@\hbox{.}\mkern1mu
\raise7\p@\vbox{\kern7\p@\hbox{.}}\mkern1mu}}
\makeatother
%% Fix array
\newcommand{\itexarray}[1]{\begin{matrix}#1\end{matrix}}
%% \itexnum is a noop
\newcommand{\itexnum}[1]{#1}
%% Renaming existing commands
\newcommand{\underoverset}[3]{\underset{#1}{\overset{#2}{#3}}}
\newcommand{\widevec}{\overrightarrow}
\newcommand{\darr}{\downarrow}
\newcommand{\nearr}{\nearrow}
\newcommand{\nwarr}{\nwarrow}
\newcommand{\searr}{\searrow}
\newcommand{\swarr}{\swarrow}
\newcommand{\curvearrowbotright}{\curvearrowright}
\newcommand{\uparr}{\uparrow}
\newcommand{\downuparrow}{\updownarrow}
\newcommand{\duparr}{\updownarrow}
\newcommand{\updarr}{\updownarrow}
\newcommand{\gt}{>}
\newcommand{\lt}{<}
\newcommand{\map}{\mapsto}
\newcommand{\embedsin}{\hookrightarrow}
\newcommand{\Alpha}{A}
\newcommand{\Beta}{B}
\newcommand{\Zeta}{Z}
\newcommand{\Eta}{H}
\newcommand{\Iota}{I}
\newcommand{\Kappa}{K}
\newcommand{\Mu}{M}
\newcommand{\Nu}{N}
\newcommand{\Rho}{P}
\newcommand{\Tau}{T}
\newcommand{\Upsi}{\Upsilon}
\newcommand{\omicron}{o}
\newcommand{\lang}{\langle}
\newcommand{\rang}{\rangle}
\newcommand{\Union}{\bigcup}
\newcommand{\Intersection}{\bigcap}
\newcommand{\Oplus}{\bigoplus}
\newcommand{\Otimes}{\bigotimes}
\newcommand{\Wedge}{\bigwedge}
\newcommand{\Vee}{\bigvee}
\newcommand{\coproduct}{\coprod}
\newcommand{\product}{\prod}
\newcommand{\closure}{\overline}
\newcommand{\integral}{\int}
\newcommand{\doubleintegral}{\iint}
\newcommand{\tripleintegral}{\iiint}
\newcommand{\quadrupleintegral}{\iiiint}
\newcommand{\conint}{\oint}
\newcommand{\contourintegral}{\oint}
\newcommand{\infinity}{\infty}
\newcommand{\bottom}{\bot}
\newcommand{\minusb}{\boxminus}
\newcommand{\plusb}{\boxplus}
\newcommand{\timesb}{\boxtimes}
\newcommand{\intersection}{\cap}
\newcommand{\union}{\cup}
\newcommand{\Del}{\nabla}
\newcommand{\odash}{\circleddash}
\newcommand{\negspace}{\!}
\newcommand{\widebar}{\overline}
\newcommand{\textsize}{\normalsize}
\renewcommand{\scriptsize}{\scriptstyle}
\newcommand{\scriptscriptsize}{\scriptscriptstyle}
\newcommand{\mathfr}{\mathfrak}
\newcommand{\statusline}[2]{#2}
\newcommand{\tooltip}[2]{#2}
\newcommand{\toggle}[2]{#2}
% Theorem Environments
\theoremstyle{plain}
\newtheorem{theorem}{Theorem}
\newtheorem{lemma}{Lemma}
\newtheorem{prop}{Proposition}
\newtheorem{cor}{Corollary}
\newtheorem*{utheorem}{Theorem}
\newtheorem*{ulemma}{Lemma}
\newtheorem*{uprop}{Proposition}
\newtheorem*{ucor}{Corollary}
\theoremstyle{definition}
\newtheorem{defn}{Definition}
\newtheorem{example}{Example}
\newtheorem*{udefn}{Definition}
\newtheorem*{uexample}{Example}
\theoremstyle{remark}
\newtheorem{remark}{Remark}
\newtheorem{note}{Note}
\newtheorem*{uremark}{Remark}
\newtheorem*{unote}{Note}
%-------------------------------------------------------------------
\begin{document}
%-------------------------------------------------------------------
\section*{Blog - El Niño project (part 3)}
This is a [[Blog articles in progress|blog article in progress]], written by [[John Baez]]. To see discussions of the article as it is being written, visit the \href{http://forum.azimuthproject.org/discussion/1377/blog-el-nino-project-part-3/?Focus=11165#Comment_11165}{Azimuth Forum}.
If you want to write your own article, please read the directions on \href{http://www.azimuthproject.org/azimuth/show/How+to#blog}{How to blog}.
In February, this paper claimed that there's a 75\% chance the next El Niño will arrive by the end of 2014:
$\bullet$ Josef Ludescher, Avi Gozolchiani, Mikhail I. Bogachev, Armin Bunde, Shlomo Havlin, and Hans Joachim Schellnhuber, \href{http://www.climatelinc.eu/fileadmin/UG_ADVANCED/Publications/BIU_-_Avi__Halvin__et_al-Very_early_warning_of_next_El_Nino.pdf}{Very early warning of next El Niño}, , February 2014. (Click title for free version, journal name for official version.)
Since it was published in a reputable journal, it created a big stir! Being able to predict an El Ni\~n{}o more than 6 months in advance would be a big deal. El Ni\~n{}os can cause billions of dollars of damage.
But that's not the only reason we at the Azimuth Project want to analyze, criticize and improve this paper. Another reason is that it uses a ---and we like network theory.
Very roughly, the idea is this. Draw a big network of dots representing different places in the Pacific Ocean. For each pair of dots, compute a number saying how strongly correlated the temperatures are at those two places. The paper claims that when a El Ni\~n{}o is getting ready to happen, the average of these numbers is big. In other words, temperatures in the Pacific tend to go up and down in synch!
Whether this idea is right or wrong, it's interesting--- and it's not very hard for programmers to dive in and study it.
Two Azimuth members have done just that: , a software developer who works for financial firms in New York, and , a self-employed programmer who also works on genomics and Bayesian statistics. These guys have really brought new life to the Azimuth Code Project in the last few weeks, and it's exciting! It's even gotten me to do some programming myself.
Soon I'll start talking about the programs they've written, and how you can help. But today I'll summarize the paper by Ludescher . Their methodology is also explained here:
$\bullet$ Josef Ludescher, Avi Gozolchiani, Mikhail I. Bogachev, Armin Bunde, Shlomo Havlin, and Hans Joachim Schellnhuber, , , 30 May 2013.
The basic idea is to use a climate network. There are lots of variants on this idea, but here's a simple one. Start with a bunch of dots representing different places on the Earth. For any pair of dots $i$ and $j$, compute the of temperature histories at those two places. Call some function of this the `link strength' for that pair of dots. Compute the average link strength\ldots{} and get excited when this gets bigger than a certain value.
The papers by Ludescher use this strategy to predict El Ni\~n{}os. They build their climate network using correlations between daily temperature data for 14 grid points in the El Niño basin and 193 grid points outside this region, as shown here:
The red dots are the points in the El Ni\~n{}o basin.
Starting from this temperature data, they compute an `average link strength' in a way I'll describe later. When this number is bigger than a certain fixed value, they claim an El Ni\~n{}o is coming.
How do they decide if they're right? How do we tell when an El Ni\~n{}o actually arrives? One way is to use the `Ni\~n{}o 3.4 index'. This the area-averaged sea surface temperature anomaly in the yellow region here:
means the temperature minus its average over time: how much it is. When the Ni\~n{}o 3.4 index is over 0.5\textdegree{}C for at least 5 months, Ludescher say there's an El Ni\~n{}o.
Here is what they get:
The blue peaks are El Ni\~n{}os: episodes where the Ni\~n{}o 3.4 index is over 0.5\textdegree{}C for at least 5 months.
The red line is their `average link strength'. Whenever this exceeds a certain threshold $\Theta = 2.82$, and the Ni\~n{}o 3.4 index is not over 0.5\textdegree{}C, they predict an El Ni\~n{}o will start in the following calendar year.
The green arrows show their successful predictions. The dashed arrows show their false alarms. A little letter n appears next to each El Ni\~n{}o that they failed to predict.
You're probably wondering where the number $2.82$ came from. They get it from a learning algorithm that finds this threshold by optimizing the predictive power of their model. Chart A here shows the `learning phase' of their calculation. In this phase, they adjusted the threshold $\Theta$ so their procedure would do a good job. Chart B shows the `testing phase'. Here they used the value of $\Theta$ chosen in the learning phase, and checked to see how good a job it did. I'll let you read their paper for more details on how they chose $\Theta$.
But what about their prediction now? That's the green arrow at far right here:
On 17 September 2013, the red line went above the threshold! So, their scheme predicts an El Ni\~n{}o sometime in 2014. The chart at right is a zoomed-in version that shows the red line in August, September, October and November of 2014.
Now I mainly need to explain how they compute their `average link strength'.
Let $i$ stand for any point in this 9 $\times$ 23 grid:
For each day $t$ between June 1948 and November 2013, let $\tilde{T}_i(t)$ be the average surface air temperature at the point $i$ on day $t$.
Let $T_i(t)$ be $\tilde{T}_i(t)$ minus its . For example, if $t$ is June 1st 1970, we average the temperature at location $i$ over all June 1sts from 1948 to 2013, and subtract that from $\tilde{T}_i(t)$ to get $T_i(t)$. They call $T_i(t)$ the .
(A subtlety here: when we are doing prediction we can't know the future temperatures, so the climatological average is only the average over days meeting the above criteria.)
For any function of time, denote its moving average over the last 365 days by:
\begin{displaymath}
\langle f(t) \rangle = \frac{1}{365} \sum_{d = 0}^{364} f(t - d)
\end{displaymath}
Let $i$ be a point in the El Niño basin, and $j$ be a point outside it. For any time lags $\tau$ between 0 and 200 days, define the by:
\begin{displaymath}
\langle T_i(t) T_j(t - \tau) \rangle - \langle T_i(t) \rangle \langle T_j(t - \tau) \rangle
\end{displaymath}
Note that this is a way of studying the linear correlation between the temperature anomaly at node $i$ and the temperature anomaly a time $\tau$ earlier at some node $j$. So, it's about how temperature anomalies inside the El Ni\~n{}o basin are correlated to temperature anomalies outside this basin at earlier times.
Ludescher then normalize this, defining the $C_{i,j}^{t}(-\tau)$ to be the time-delayed cross-covariance divided by
\begin{displaymath}
\sqrt{\langle (T_i(t) - \langle T_i(t)\rangle)^2 \rangle} \;
\sqrt{\langle (T_j(t-\tau) - \langle T_j(t-\tau)\rangle)^2 \rangle}
\end{displaymath}
This is something like the standard deviation of $T_i(t)$ times the standard deviation of $T_j(t - \tau)$. Dividing by standard deviations is what people to turn covariances into correlations. But there are some potential problems here, which I'll discuss later.
They define $C_{i,j}^{t}(\tau)$ in a similar way, by taking
\begin{displaymath}
\langle T_i(t - \tau) T_j(t) \rangle - \langle T_i(t - \tau) \rangle \langle T_j(t) \rangle
\end{displaymath}
and normalizing it. So, this is about how temperature anomalies outside the El Ni\~n{}o basin are correlated to temperature anomalies inside this basin at earlier times.
Next, for nodes $i$ and $j$, and for each time point $t$, they determine the maximum, the mean and the standard deviation of $|C_{i,j}^t(\tau)|$, as $\tau$ ranges from -200 to 200 days.
They define the $S_{i j}(t)$ as the difference between the maximum and the mean value, divided by the standard deviation.
Finally, they let $S(t)$ be the , calculated by averaging $S_{i j}(t)$ over all pairs $(i,j)$ where $i$ is a node in the El Niño basin and $j$ is a node outside.
They compute $S(t)$ for every 10th day between January 1950 and November 2013. When $S(t)$ goes over 2.82, and the Ni\~n{}o 3.4 index is not over 0.5\textdegree{}C, they predict an El Ni\~n{}o in the next calendar year.
There's more to say about their methods. We'd like you to help us check their work and improve it. Soon I want to show you Graham Jones' software for replicating their calculations! But right now I just want to conclude by:
$\bullet$ mentioning a potential problem in the math, and
$\bullet$ telling you where to get the data used by Ludescher .
\hypertarget{Nuances}{}\subsubsection*{{Mathematical nuances}}\label{Nuances}
Ludescher normalize the time-delayed cross-covariance in a somewhat odd way. They claim to divide it by
\begin{displaymath}
\sqrt{\langle (T_i(t) - \langle T_i(t)\rangle)^2 \rangle} \;
\sqrt{\langle (T_j(t-\tau) - \langle T_j(t-\tau)\rangle)^2 \rangle}
\end{displaymath}
But this is a strange thing, since it has nested angle brackets, and the angle brackets are defined as a running average over the 365 days. Thus this quantity involves data going back twice as long: 730 days.
Furthermore, the `link strength' involves the above expression where $\tau$ goes up to 200 days. So, taking their definitions at face value, Ludescher could not actually compute their `link strength' until 930 days after the surface temperature data first starts at the beginning of 1948. That would be . But their graph of the link strength starts at the of 1950!
Perhaps they actually normalized the time-delayed cross-covariance by dividing it by this:
\begin{displaymath}
\sqrt{\langle T_i(t)^2\rangle - \langle T_i(t)\rangle^2 } \;
\sqrt{\langle T_j(t-\tau)^2 \rangle - \langle T_j(t-\tau)\rangle^2}
\end{displaymath}
This simpler expression avoids nested angle brackets, and it makes more sense conceptually. It is the standard deviation of $T_i(t)$ over the last 365 days, times of the standard deviation of $T_i(t-\tau)$ over the last 365 days.
As noted, the expression written by Ludescher does not equal this simpler expression, since:
\begin{displaymath}
\langle T_i(t) \; \langle T_i(t) \rangle \rangle \neq \langle T_i(t) \rangle \; \langle T_i(t) \rangle
\end{displaymath}
The reason is that
\begin{displaymath}
\langle T_i(t) \langle T_i(t) \rangle \rangle = \frac{1}{365} \sum_{d = 0}^{364} T_i(t-d) \langle T_i(t- d) \rangle = \frac{1}{365} \sum_{d = 0}^{364} \frac{1}{365} \sum_{D = 0}^{364} T_i(t-d) T_i(t- d -D)
\end{displaymath}
which is generically different from
\begin{displaymath}
\langle T_i(t) \rangle \langle T_i(t) \rangle = (\frac{1}{365} \sum_{d = 0}^{364} T_i(t-d))(\frac{1}{365} \sum_{d = 0}^{364} T_i(t-d) )
\end{displaymath}
Remember that $\tilde{T}_i(t)$ is the average surface air temperature at the grid point $i$ on day $t$. You can get these temperatures from here:
$\bullet$ .
More precisely, there's a bunch of files containing worldwide daily average temperatures on a 2.5\textdegree{} latitude $\times$ 2.5\textdegree{} longitude grid (144 $\times$ 73 grid points), from 1948 to 2010. If you go the website will help you get data from within a chosen rectangle in a grid, for a chosen time interval. These are , a format we will discuss later, when we get into more details about programming!
\textbf{Ni\~n{}o 3.4} is the area-averaged sea surface temperature anomaly in the region 5\textdegree{}S-5\textdegree{}N and 170\textdegree{}-120\textdegree{}W. You can get Ni\~n{}o3.4 data here:
\begin{itemize}%
\item , NOAA.
\end{itemize}
Ni\~n{}o 3.4 is just one of several official regions in the Pacific:
$\bullet$ Niño 1: 80\textdegree{}W-90\textdegree{}W and 5\textdegree{}S-10\textdegree{}S.
$\bullet$ Niño 2: 80\textdegree{}W-90\textdegree{}W and 0\textdegree{}S-5\textdegree{}S
$\bullet$ Niño 3: 90\textdegree{}W-150\textdegree{}W and 5\textdegree{}S-5\textdegree{}N.
$\bullet$ Niño 3.4: 120\textdegree{}W-170\textdegree{}W and 5\textdegree{}S-5\textdegree{}N.
$\bullet$ Niño 4: 160\textdegree{}E-150\textdegree{}W and 5\textdegree{}S-5\textdegree{}N.
For more details, read this:
$\bullet$ Kevin E. Trenberth, , (1997), 2771--2777.
category: blog, climate
[[!redirects Blog - El Nino project (part 3)]]
\end{document}