X-Git-Url: http://mj.ucw.cz/gitweb/?a=blobdiff_plain;f=notation.tex;h=e1e424f4ea29e21399375eac188f26167d2424a2;hb=2673d7b09e7049aa35f3f81b6e7b395c934bdfbd;hp=cd7f8dd4101f1641030de2e1d0d66378f0e05b46;hpb=aa8c33c5535bde5f26cec0052d04a5dc91f87952;p=saga.git diff --git a/notation.tex b/notation.tex index cd7f8dd..e1e424f 100644 --- a/notation.tex +++ b/notation.tex @@ -2,32 +2,109 @@ \input macros.tex \fi -\chapter{Notation} +\chapter{Notation}\id{notapp}% + +\section{Symbols} {\obeylines\parskip=0pt -\def\n#1#2{\>\hbox to 6em{#1 \dotfill} #2} -\def\[#1]{[\thmref{#1}]} -\n{$T[x,y]$}{the path in a tree~$T$ joining $x$ and $y$ \[heavy]} +\def\n#1#2{\>\hangindent=6em\hangafter=1 \hbox to 6em{#1 \dotfill~}#2} +\def\[#1]{~{\it(\ref{#1})}} + +\n{$A(x,y)$}{Ackermann's function \[ackerdef]} +\n{$A(x)$}{diagonal Ackermann's function \[ackerdef]} +\n{$\band$}{bitwise conjunction: $(x\band y)[i]=1$ iff $x[i]=1 \land y[i]=1$} +\n{$C_k$}{cycle on~$k$ vertices} +\n{${\cal D}(G)$}{optimal MSF decision tree for a~graph~$G$ \[decdef]} +\n{$D(G)$}{depth of ${\cal D}(G)$ \[decdef]} +\n{$D(m,n)$}{decision tree complexity of MSF \[decdef]} +\n{$D_n$}{$n\times n$ matrix with 0's on the main diagonal and 1's elsewhere \[hatrank]} +\n{$\deg_G(v)$}{degree of vertex~$v$ in graph~$G$; we omit $G$ if it is clear from context} +\n{$E(G)$}{set of edges of a graph~$G$} +\n{$E$}{$E(G)$ when the graph~$G$ is clear from context} +\n{${\E}X$}{expected value of a~random variable~$X$} +\n{$K_k$}{complete graph on~$k$ vertices} +\n{$L(\pi,A)$}{lexicographic ranking function for permutations on a~set~$A\subseteq{\bb N}$ \[brackets]} +\n{$L^{-1}(i,A)$}{lexicographic unranking function, the inverse of~$L$ \[brackets]} +\n{$\log n$}{a binary logarithm of the number~$n$} +\n{$\log^* n$}{iterated logarithm: $\log^*n := \min\{i \mid \log^{(i)}n \le 1\}$; the inverse of~$2\tower n$} +\n{$\(x)$}{position of the lowest bit set in~$x$ \[lsbmsb]} +\n{$\(x)$}{position of the highest bit set in~$x$ \[lsbmsb]} +\n{MSF}{minimum spanning forest \[mstdef]} +\n{$\msf(G)$}{the unique minimum spanning forest of a graph~$G$ \[mstnota]} +\n{MST}{minimum spanning tree \[mstdef]} +\n{$\mst(G)$}{the unique minimum spanning tree of a graph~$G$ \[mstnota]} +\n{$m(G)$}{number of edges of a graph~$G$, that is $\vert E(G)\vert$} +\n{$m$}{$m(G)$ when the graph~$G$ is clear from context} +\n{$\bb N$}{set of all natural numbers, including 0} +\n{${\bb N}^+$}{set of all positive integers} +\n{$N_0(M)$}{number of permutations satisfying the restrictions~$M$ \[restnota]} +\n{$n(G)$}{number of vertices of a graph~$G$, that is $\vert V(G)\vert$} +\n{$n$}{$n(G)$ when the graph~$G$ is clear from context} +\n{$\bnot$}{bitwise negation: $(\bnot x)[i]=1-x[i]$} +\n{$\O(g)$}{asymptotic~$O$: $f=\O(g)$ iff $\exists c>0: f(n)\le g(n)$ for all~$n\ge n_0$} +\n{$\widetilde\O(g)$}{$f=\widetilde\O(g)$ iff $f=\O(g\cdot\log^{\O(1)} g)$} +\n{$\bor$}{bitwise disjunction: $(x\bor y)[i]=1$ iff $x[i]=1 \lor y[i]=1$} +\n{${\cal P}_A$}{set of all permutations on a~set~$A$ \[restnota]} +\n{${\cal P}_{A,M}$}{set of all permutations on~$A$ satisfying the restrictions~$M$ \[restnota]} +\n{$\per M$}{permanent of a~square matrix~$M$} +\n{$\poly(n)$}{$f=\poly(n)$ iff $f=\O(n^c)$ for some $c$} +\n{${\rm Pr}[\varphi]$}{probability that a predicate~$\varphi$ is true} +\n{$\bb R$}{set of all real numbers} +\n{$R_{C,\prec}(x)$}{rank of~$x$ in a~set~$C$ ordered by~$\prec$ \[rankdef]} +\n{$R^{-1}_{C,\prec}(i)$}{unrank of~$i$: the $i$-th smallest element of a~set~$C$ ordered by~$\prec$ \[rankdef]} +\n{$V(G)$}{set of vertices of a graph~$G$} +\n{$V$}{$V(G)$ when the graph~$G$ is clear from context} +\n{$W$}{word size of the RAM \[wordsize]} +\n{$w(e)$}{weight of an edge $e$} +\n{$\bxor$}{bitwise non-equivalence: $(x\bxor y)[i]=1$ iff $x[i]\ne y[i]$} + +\n{$\alpha(n)$}{diagonal inverse of the Ackermann's function \[ackerinv]} +\n{$\alpha(m,n)$}{$\alpha(m,n) := \min\{ x\ge 1 \mid A(x,4\lceil m/n\rceil) > \log n \}$ \[ackerinv]} +\n{$\beta(m,n)$}{$\beta(m,n) := \min\{i \mid \log^{(i)}n \le m/n \}$ \[itjarthm]} +\n{$\delta_G(U)$}{all edges connecting $U\subset V(G)$ with $V(G)\setminus U$; we usually omit the~$G$} +\n{$\delta_G(v)$}{edges of a one-vertex cut, i.e., $\delta_G(\{v\})$} +\n{$\Theta(g)$}{asymptotic~$\Theta$: $f=\Theta(g)$ iff $f=\O(g)$ and $f=\Omega(g)$} +\n{$\lambda_i(n)$}{inverse of the $i$-th row of the Ackermann's function \[ackerinv]} +\n{$\varrho({\cal C})$}{edge density of a graph class~$\cal C$ \[density]} +\n{$\Omega(g)$}{asymptotic~$\Omega$: $f=\Omega(g)$ iff $\exists c>0: f(n)\ge g(n)$ for all~$n\ge n_0$} + +\n{$T[u,v]$}{the path in a tree~$T$ joining vertices $u$ and $v$ \[heavy]} \n{$T[e]$}{the path in a tree~$T$ joining the endpoints of an~edge~$e$ \[heavy]} \n{$A\symdiff B$}{symetric difference of sets: $(A\setminus B) \cup (B\setminus A)$} \n{$G-e$}{graph $G$ with edge $e$ removed} \n{$G+e$}{graph $G$ with edge $e$ added} -\n{$w(e)$}{weight of an edge $e$} -\n{$V(G)$}{set of vertices of a graph~$G$} -\n{$E(G)$}{set of edges of a graph~$G$} -\n{$n(G)$}{number of vertices of a graph~$G$, that is $\vert V(G)\vert$} -\n{$m(G)$}{number of edges of a graph~$G$, that is $\vert E(G)\vert$} -\n{$V,E,n,m$}{when used without $(G)$, they refer to the input of the current algorithm} -\n{MST}{minimum spanning tree \[mstdef]} -\n{MSF}{minimum spanning forest \[mstdef]} -\n{\mst(G)}{the unique minimum spanning tree of a graph~$G$ \[mstnota]} -\n{$X \choose k$}{a set of $k$-element subsets of a set~$X$} +\n{$G[U]$}{subgraph induced by a~set $U\subset V(G)$} +\n{$X \choose k$}{the set of all $k$-element subsets of a set~$X$} \n{$G/e$}{multigraph contraction \[contract]} -\n{$G.e$}{simple graph contraction \[simpcont]} -\n{$\alpha(n)$}{the inverse Ackermann's function} -\n{$f[X]$}{function applied to a set: $f[X]:=\{ f(x) ; x\in X \}$.} +\n{$G\sgc e$}{simple graph contraction \[simpcont]} +\n{$G/X$, $G.X$}{contraction by a~set $X$ of vertices or edges \[setcont]} +\n{$f[X]$}{function applied to a set: $f[X]:=\{ f(x) \mid x\in X \}$} +\n{$f[e]$}{as edges are two-element sets, $f[e]$ maps both endpoints of an edge~$e$} +\n{$f^{(i)}$}{function~$f$ iterated $i$~times: $f^{(0)}(x):=x$, $f^{(i+1)}(x):=f(f^{(i)}(x))$} +\n{$2\tower n$}{the tower function (iterated exponential): $2\tower 0:=1$, $2\tower (n+1):=2^{2\tower n}$} +\n{$\(x)$}{number~$x\in{\bb N}$ written in binary \[bitnota]} +\n{$\(x)_b$}{$\(x)$ zero-padded to exactly $b$ bits \[bitnota]} +\n{$x[i]$}{when $x\in{\bb N}$: the value of the $i$-th bit of~$x$ \[bitnota]} +\n{$x[B]$}{when $x\in{\bb N}$: the values of the bits at positions in the set~$B$ \[qhnota]} +\n{$\pi[i]$}{when $\pi$ is a~sequence: the $i$-th element of~$\pi$, starting with $\pi[1]$ \[brackets]} +\n{$\pi[i\ldots j]$}{the subsequence $\pi[i], \pi[i+1], \ldots, \pi[j]$} +\n{$\sigma^k$}{the string~$\sigma$ repeated $k$~times \[bitnota]} +\n{$\0$, $\1$}{bits in a~bit string \[bitnota]} +\n{$\equiv$}{congruence modulo a~given number} +\n{$\bf x$}{a~vector with elements $x_1,\ldots,x_d$; $x$ is its bitwise encoding \[vecnota]} +\n{$x \shl n$}{bitwise shift of~$x$ by $n$~positions to the left: $x\shl n = x\cdot 2^n$} +\n{$x \shr n$}{bitwise shift of~$x$ by $n$~positions to the right: $x\shr n = \lfloor x/2^n \rfloor$} +\n{$[n]$}{the set $\{1,2,\ldots,n\}$ \[pranksect]} +\n{$n^{\underline k}$}{the $k$-th falling factorial power: $n\cdot(n-1)\cdot\ldots\cdot(n-k+1)$ \[kpranksect]} +\n{$H\minorof G$}{$H$ is a~minor of~$G$ \[minordef]} +\n{$G\crpt R$}{graph~$G$ with edges in~$R$ corrupted \[corrnota]} +\n{$R^C$}{$R^C = R\cap \delta(C)$ \[corrnota]} +\n{$M^{i,j}$}{the matrix $M$ with $i$-th row and $j$-th column deleted \[restnota]} + } +%-------------------------------------------------------------------------------- + \section{Multigraphs and contractions} Since the formalism of multigraphs is not fixed in the literature, we will @@ -35,7 +112,7 @@ better define it carefully, following \cite{diestel:gt}: \defn A~\df{multigraph} is an ordered triple $(V,E,M)$, where $V$~is the set of vertices, $E$~is the set of edges, taken as abstract objects disjoint -with the vertices, and $M$ is a mapping $E\mapsto V \cup {V \choose 2}$ +with the vertices, and $M$ is a mapping $E\rightarrow V \cup {V \choose 2}$ which assigns to each edge either a pair of vertices or a single vertex (if the edge is a loop). @@ -46,26 +123,131 @@ shorthand for $\exists e\in E(G)$ such that $M(G)(e) = \{x,y\}$. Also, we consider multigraphs with no multiple edges nor loops and simple graphs to be the same objects, although they formally differ. -\defn\thmid{contract}% -Let $G=(V,E,M)$ be a multigraph and $e=xy$ its edge. \df{(Multigraph) contraction of~$G$ along~$e$} +\defn\id{contract}% +Let $G=(V,E,M)$ be a multigraph and $e=xy$ its arbitrary edge. +The \df{(multigraph) contraction of~$e$ in~$G$} produces a multigraph $G/e=(V',E',M')$ such that: $$\eqalign{ V' &= (V(G) \setminus \{x,y\}) \cup \{v_e\},\quad\hbox{where $v_e$ is a new vertex,}\cr E' &= E(G) - \{e\},\cr -M'(f) &= \{ m(v) ; v\in M(f) \} \quad\hbox{for every $f=\in E'$, and}\cr +M'(f) &= \{ m(v) \mid v\in M(f) \} \quad\hbox{for every $f=\in E'$, and}\cr m(x) &= \cases{v_e & \hbox{for $v=x,y,$}\cr v & \hbox{otherwise.}} \cr }$$ -Sometimes we need contraction for simple graphs as well. It corresponds to performing -the multigraph contraction, unifying parallel edges and deleting loops. +Sometimes we need contraction for simple graphs as well. It is equivalent to performing +the multigraph contraction and then unifying parallel edges and deleting loops. -\defn\thmid{simpcont}% -Let $G=(V,E)$ a simple graph and $e=xy$ its edge. \df{(Simple graph) contraction of~$G$ along~$e$} -produces a graph $G.e=(V',E')$ such that: +\defn\id{simpcont}% +Let $G=(V,E)$ a simple graph and $e=xy$ its arbitrary edge. +The \df{(simple graph) contraction of~$e$ in~$G$} +produces a graph $G\sgc e=(V',E')$ such that: $$\eqalign{ V' &= (V(G) \setminus \{x,y\}) \cup \{v_e\},\quad\hbox{where $v_e$ is a new vertex,}\cr -E' &= \{ \{m(x),m(y)\} ; xy\in E \land m(x)\ne m(y) \},\cr -m(x) &= \cases{v_e & \hbox{for $v=x,y,$}\cr v & \hbox{otherwise.}} \cr +E' &= \{ \{m(x),m(y)\} \mid xy\in E \land m(x)\ne m(y) \},\cr +m(x) &= \cases{v_e & \hbox{for $v=x,y,$}\cr \noalign{\vskip5pt} v & \hbox{otherwise.}} \cr }$$ +\defn\id{setcont}% +We can also extend the above definitions to contractions of a~set of vertices or edges. +For $F\subseteq E(G)$, the graph $G/F$ is defined as $(G/f_1)/f_2/\ldots/f_k$ where +$f_1,\ldots,f_k$ are the elements of~$F$ (you can observe that the result +does not depend on the order of edges). For $U\subseteq V(G)$, we define $G/U$ +as the graph with all vertices of~$U$ merged to a~single vertex, that is $(G\cup U^*)/U^*$, +where $U^*$ is the complete graph on~$U$. Similarly for $G\sgc F$ and $G\sgc U$. + +%-------------------------------------------------------------------------------- + +\section{Ackermann's function and its inverses}\id{ackersec}% + +The Ackermann's function is an~extremely quickly growing function which has been +introduced by Ackermann \cite{ackermann:function} in the context of +computability theory. Its original purpose was to demonstrate that not every recursive +function is also primitive recursive. At the first sight, it does not +seem related to efficient algorithms at all. Its various inverses however occur in +analyses of various algorithms and mathematical structures surprisingly often: +We meet them in Section \ref{classalg} in the time complexity of the Disjoint Set Union +data structure and also in the best known upper bound on the decision tree +complexity of minimum spanning trees in Section \ref{optalgsect}. Another +important application is in the complexity of Davenport-Schinzel sequences (see +Klazar's survey \cite{klazar:gdss}), but as far as we know, these are not otherwise +related to the topic of our study. + +Various sources differ in the exact definition of both the Ackermann's +function and its inverse, but most of the differences are in factors that +are negligible in the light of the giant asymptotic growth of the function.\foot{% +To quote Pettie \cite{pettie:onlineverify}: ``In the field of algorithms \& complexity, +Ackermann's function is rarely defined the same way twice. We would not presume to buck +such a~well-established precedent. Here is a~slight variant.''} +We will use the definition by double recursion given by Tarjan \cite{tarjan:setunion}, +which is predominant in the literature on graph algorithms: + +\defn\id{ackerdef}% +The \df{Ackermann's function} $A(x,y)$ is a~function on non-negative integers defined as follows: +$$\eqalign{ +A(0,y) &:= 2y, \cr +A(x,0) &:= 0, \cr +A(x,1) &:= 2 \quad \hbox{for $x\ge 1$}, \cr +A(x,y) &:= A(x-1, A(x,y-1)) \quad \hbox{for $x\ge 1$, $y\ge 2$}. \cr +}$$ +The functions $A(x,\cdot)$ are called the \df{rows} of $A(x,y)$, similarly $A(\cdot,y)$ are +its \df{columns.} + +Sometimes, a~single-parameter version of this function is also used. It is defined +as the diagonal of the previous function, i.e., $A(x):=A(x,x)$. + +\example +We can try evaluating $A(x,y)$ in some points: +$$\eqalign{ +A(x,2) &= A(x-1, A(x,1)) = A(x-1,2) = A(0,2) = 4, \cr +A(1,y) &= A(0, A(1,y-1)) = 2A(1,y-1) = 2^{y-1}A(1,1) = 2^y, \cr +A(2,y) &= A(1, A(2,y-1)) = 2^{A(2,y-1)} = 2\tower y \hbox{~~(the tower of exponentials),} \cr +A(3,y) &= \hbox{the tower function iterated $y$~times,} \cr +A(4,3) &= A(3,A(4,2)) = A(3,4) = A(2,A(3,3)) = A(2,A(2,A(3,2))) = \cr + &= A(2,A(2,4)) = 2\tower(2\tower 4) = 2\tower 65536. \cr +}$$ + +\para +Three functions related to the inverse of the function~$A$ are usually considered: + +\defn\id{ackerinv}% +The \df{$i$-th row inverse} $\lambda_i(n)$ of the Ackermann's function is defined by: +$$ +\lambda_i(n) := \min\{ y \mid A(i,y) > \log n \}. +$$ +The \df{diagonal inverse} $\alpha(n)$ is defined by: +$$ +\alpha(n) := \min\{ x \mid A(x) > \log n \}. +$$ +The two-parameter \df{alpha function} $\alpha(m,n)$ is defined for $m\ge n$ by: +$$ +\alpha(m,n) := \min\{ x\ge 1 \mid A(x,4\lceil m/n\rceil) > \log n \}. +$$ + +\example +$\lambda_1(n) = \O(\log\log n)$, $\lambda_2(n) = \O(\log^* n)$, $\lambda_3(n)$ grows even slower +and $\alpha(n)$ is asymptotically smaller than $\lambda_i(n)$ for any fixed~$i$. + +\obs +It is easy to verify that all the rows are strictly increasing and so are all +columns, except the first three columns which are constant. Therefore for a~fixed~$n$, +$\alpha(m,n)$ is maximized at $m=n$. So $\alpha(m,n) \le 3$ when $\log n < A(3,4)$, +which covers all values of~$m$ that are likely to occur in practice. + +\lemma +$\alpha(m,n) \le \alpha(n)+1$. + +\proof +We know that +$A(x,4\lceil m/n\rceil) \ge A(x,4) = A(x-1,A(x,3)) \ge A(x-1,x-1)$, so $A(x,4\lceil m/n\rceil)$ +rises above $\log n$ no later than $A(x-1,x-1)$ does. +\qed + +\lemma\id{alphaconst}% +When $i$~is a~fixed constant and $m\ge n\cdot \lambda_i(n)$, then $\alpha(m,n) \le i$. + +\proof +The choice of~$m$ guarantees that $A(x,4\lceil m/n\rceil) \ge A(x,\lambda_i(n))$, which +is greater than $\log n$ for all $x \ge i$. +\qed + \endpart