-
Notifications
You must be signed in to change notification settings - Fork 0
/
ramsey_theory.tex
1109 lines (938 loc) · 54.7 KB
/
ramsey_theory.tex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
\documentclass{article}
% preamble
\def\npart{III}
\def\nyear{2023}
\def\nterm{Michaelmas}
\def\nlecturer{Dr Julian Sahasrabudhe}
\def\ncourse{Ramsey Theory on Graphs}
\def\draft{Incomplete}
\input{header}
\setcounter{section}{-1}
% and here we go!
\begin{document}
\maketitle
\tableofcontents
\clearpage
\section{Introduction}
\newlec
\begin{notation}
We write
\begin{itemize}
\item $[n] = \{1, \dots, n\}$
\item $K_n$ for the complete graph on $n$ vertices.
\item For $X$ a set, $r \in \N$, $X^{(r)} = \{S \subseteq X | |S| = r\}$
\item $\chi$ for a $k$-coloring of the edges of $K_n$
\begin{align*}
\chi : E(K_n) & \to [k] \\
\chi : E(K_n) & \to \{\text{red}, \text{blue}\} & (\text{if } k = 2)
\end{align*}
\end{itemize}
\end{notation}
Ramsey theory is usually concerned with the following question:
\begin{quotation}
\textit{Can we find some order in enough disorder?}
\end{quotation}
In this course, we will specialise this question to graphs. We are thus interested in the following:
\begin{quotation}
\textit{What can we say about the structure of an arbitrary $2$-coloring of the edges of $K_n$?}
\end{quotation}
\begin{dfn}
Define the {\bf Ramsey number} $R(\ell, k)$ to be the least $n$ for which every $2$-edge coloring contains either a blue $K_\ell$ or a red $K_k$, and the {\bf diagonal Ramsey number} $R(k) = R(k, k)$ to be the least $n$ for which every $2$-edge coloring contains a monochromatic $K_k$.
\end{dfn}
It is unclear that such a $n$ even exists! We shall prove it in due course.
$R(\ell, k) = R(k, \ell)$. By convention, we will usually assume $\ell \le k$.
\begin{eg}
$R(3) = 6$ because
\begin{itemize}
\item The following coloring shows that $R(3) > 5$. TODO: add picture
\item If we have $6$ vertices, we can pick a vertex $v$. By pigeonhole, three of the neighbors of $v$ are connected to $v$ via the same color, say red. Now either two of those neighbors are connected with a red edge, in which case they form a red triangle with $v$, or they are connected with blue edges to each other, in which case they form a blue triangle. As a way to remember this proof, we encourage you to watch the following music video: \href{https://youtu.be/vE7MW2lk55E}{Everybody's looking for Ramsey}
\end{itemize}
\end{eg}
\section{Old bounds on \texorpdfstring{$R(\ell, k)$}{R(l, k)}}
\begin{thm}[Erd\H os-Szekeres, 1935]
$$R(\ell, k) \le \binom{k + \ell - 2}{\ell - 1}$$
In particular, $R(\ell, k)$ is well-defined.
\end{thm}
\begin{lem}
For all $k, \ell \ge 3$,
$$R(\ell, k) \le \underbrace{R(\ell - 1, k)}_a + \underbrace{R(\ell, k - 1)}_b$$
\end{lem}
\begin{proof}
Let $n = a + b$. Pick a vertex $v$. By pigeonhole, either
\begin{itemize}
\item $v$ has at least $a$ red neighbors. Either these neighbors contain a red $K_{\ell - 1}$ (in which case we chuck $v$ in), or contain a blue $K_k$ (in which case we already won).
\item $v$ has at least $b$ blue neighbors. Either these neighbors contain a blue $K_{k - 1}$ (in which case we chuck $v$ in), or contain a red $K_\ell$ (in which case we already won).
\end{itemize}
\end{proof}
\begin{proof}[Proof of Erd\H os-Szekeres]
Use that $R(\ell, 2) = \ell$ and induct on $k$ and $\ell$.
\end{proof}
\begin{cor}
$$R(k) \le \binom{2k} k \le C\frac{4^k}{\sqrt k}$$
for some constant $C$.
\end{cor}
\subsection{Lower bounds}
Can we find edge colorings on many vertices without a monochromatic $K_k$?
Certainly, we can at least do so on $(k - 1)^2$ vertices.
TODO: Insert figure
This polynomial lower bound is eons away from our exponential upper bound. For quite some time (in the 1930s), people thought that the lower bound was closer to the truth than the upper bound. Surprisingly, it is possible to show an exponential lower bound without actually exhibiting such a coloring!
\begin{thm}[Erd\H os, 1948]
$$R(k) \ge \frac{k - 1}{e\sqrt 2}2^{\frac k 2}$$
\end{thm}
\begin{fact}
$$\left(\frac n k\right)^k \le \binom n k \le \left(\frac{en}k\right)^k$$
\end{fact}
\begin{proof}
Let $n = \ceil{\frac{k - 1}{e\sqrt 2}2^{\frac k 2}}$ and $\chi$ be a random red/blue edge coloring of $K_n$ (each edge is independently colored red or blue with probability $\frac 1 2$). We see that
\begin{align*}
\P(\chi \text{ contains a monochromatic } K_k)
& = \P\left(\bigcup_{S \in [n]^{(k)}} \{S \text{ monochromatic}\}\right) \\
& \le \binom n k \P([k] \text{ monochromatic}) \\
& = \binom n k 2^{-\binom k 2 + 1} \\
& \le 2 \left(\frac{en}k\right)^k 2^{-\frac{k(k - 1)}2} \\
& = 2 \left(\frac{en}k 2^{-\frac{k - 1}2}\right)^k \\
& \le 2\left(1 - \frac 1 k\right)^k \\
& < 1
\end{align*}
Hence
$$2^{\frac k 2} \le R(k) \le 4^k$$
\end{proof}
This proof is remarkable by the fact that it proves that the probability of some object existing is high, without actually constructing such an object. In fact it is still an important open problem to explicitly construct a $K_k$-free edge-coloring of $K_n$ with $n$ exponential in $k$. In other words, {\it even though $K_k$-free edge-colorings are abundant, we don't know how to write down a single one}.
\begin{rmk}
The use of ``constructive'' here is quite different to that in other areas of mathematics. We do not mean that the proof requires the Law of Excluded Middle or the Axiom of Choice, nor that we do not provide an algorithm to find a graph without monochromatic $K_k$. \\
Since there are only finitely many red/blue edge-colorings of $K_n$ for a fixed $n$, there trivially is an algorithm to find such a coloring: enumerate them all and try them one by one. Less obviously, there is a procedure to systematically remove any use of the axiom of choice from the proofs of most of the results in this course. Excluded Middle is also redundant since the case splits we consider can be decided in finite time (again, everything is finite). \\
A more careful definition of ``constructive'' here is about complexity of the description of the object: Erd\H os' lower bound does not provide any better {\it deterministic} algorithm than "Try all edge-colorings", and this has complexity $\Omega\left(2^{\binom n 2}\right)$ (without even accounting for the time it takes to check whether a coloring contains a monochromatic $K_k$). In contrast, we would expect a constructive lower bound to yield an edge-coloring in a polynomial number of operations in $n$.
\end{rmk}
\begin{question}
What's the base of the exponent here? Is there even such a base?
\end{question}
\newlec
We know
$$R(3, k) \le \binom{k + 1}2 \le (k + 1)^2$$
\begin{dfn}
An {\bf independent set} in a graph is a set of vertices that does not contain an edge. The {\bf independence number} $\alpha(G)$ is the maximum size of an independent set of $G$.
\end{dfn}
\begin{dfn}[Binomial Random Graph]
For $n \in \N, 0 \le p \le 1$, we define $G(n, p)$ the probability space of graphs where each edge is independently present with probability $p$.
\end{dfn}
\begin{thm}[Erd\H os]
$$R(3, k) \ge c\left(\frac k{\log k}\right)^{\frac 32}$$
for some constant $c > 0$.
\end{thm}
\begin{idea}
We will look at a binomial random graph and choose the parameters so that there are very few red $K_k$ and the number of blue $K_3$ is at most some fixed proportion of $n$. Then we will remove one vertex from each red $K_k$ and one vertex from each blue $K_3$. The resulting graph will have neither and most likely will still contain a fixed proportion of the vertices we started with.
\end{idea}
\begin{proof}
Change the language. Discuss the blue graph. We are now looking for the maximum number of edges of a graph with no triangles and no independent set of size $k$. \\
Take $n = \left(\frac k{\log k}\right)^{\frac 32}, p = n^{-\frac 23} = \frac{\log k}k$. Now sample $G \sim G(n, p)$ and define $\tilde G$ to be $G$ with one vertex removed from each triangle and independent set of size $k$. By construction, $K_3 \not\subseteq \tilde G$ and $\alpha(\tilde G) < k$. We will show $\bbE |\tilde G| \ge \frac n2$ using
$$|\tilde G| \ge n - \#\text{triangles in } G - \#\text{independent sets of size $k$ in } G$$
First,
$$\bbE \#\text{triangles in } G
= \sum_{T \in [n]^(3)} \P(T \text { triangle in } G)
= \binom n3 p^3 \le \frac{(np)^3}6 = \frac n6$$
Second,
\begin{align*}
\bbE \#\text{independent sets of size $k$ in } G
& = \binom nk (1 - p)^{\binom k2} \\
& \le \left(\frac{en}k\right)^k e^{-p\binom k2} \\
& \sim \left(\frac{en}k e^{-\frac{pk}2}\right)^k \\
& = \left(\frac{ek^{\frac 32}}{k\log^{\frac 32} k} e^{-\frac{\log k}2}\right)^k \\
& = \left(\frac e{\log^{\frac 32} k}\right)^k \longrightarrow 0
\end{align*}
Hence, for large enough $k$,
$$\bbE |\tilde G| \ge n - \frac n6 - 1 \ge \frac n2 = \frac 12 \left(\frac k{\log k}\right)^{\frac 32}$$
By adjusting $c > 0$, we have proved the theorem.
\end{proof}
\begin{rmk}
The values of $n$ and $p$ come from the constraints
$$n^3p^3 \ll n, \quad \frac{\log n}k \ll p$$
\end{rmk}
We are being wasteful here. Why throw an entire vertex away when we could get away with removing a single edge? Because we might accidentally create an independent set of size $k$. But we can be smarter...
\begin{idea}
Take a maximal collection of edge-disjoint triangles in $G \sim G(n, p)$ and remove all edges from these triangles.
\end{idea}
\begin{thm}[Erd\H os]
$$R(3, k) \ge c\left(\frac k{\log k}\right)^2$$
for some constant $c > 0$.
\end{thm}
\begin{lem}
Let $\mathcal F = \{A_1, \dots, A_m\}$ be a family of events in a probability space. Let $\Eps_t$ be the event that $t$ {\it independent} events from $\mathcal F$ occur. then
$$\P(\Eps_t) \le \frac 1{t!}\left(\sum_{i = 1}^m \P(A_i)\right)^t$$
\end{lem}
\begin{proof}
Note that
$$1_{\Eps_t} \le \frac 1{t!} \sum_{\substack{i \in [m]^t \\ A_{i_1}, \dots, A_{i_t}\text{ independent}}} 1_{A_{i_1}} \dots 1_{A_{i_t}}$$
So
\begin{align*}
\P(\Eps_t)
& \le \frac 1{t!} \sum_{\substack{i \in [m]^t \\ A_{i_1}, \dots, A_{i_t}\text{ independent}}} \P(A_{i_1}) \dots \P(A_{i_t}) \\
& \le \frac 1{t!} \sum_{i \in [m]^t} \P(A_{i_1}) \dots \P(A_{i_t}) \\
& = \frac 1{t!} \left(\sum_{i = 1}^m \P(A_i)\right)^t
\end{align*}
\end{proof}
\newlec
\begin{idea}
Instead of killing vertices, we will kill edges. We only need to kill edges from a maximal set of edge-disjoint triangles. For this killing to create an independent set $I$ of size $k$, we must have killed all edges in $I$. But with high probability all sets of size $k$ contain a fixed fraction of the expectation $p\binom k2$ of the number of edges, and having so many edge-disjoint triangles each with two vertices among $k$ fixed vertices is unlikely.
\end{idea}
\begin{lem}
Let $n, k \in \N, p \in [0, 1]$ be such that $pk \ge 16\log n$. Then with high probability every subset of size $k$ of $G \sim G(n, p)$ contains at least $\frac{pk^2}8$ edges.
\end{lem}
\begin{proof}[Proof of Erd\H os' bound]
We fix $n = \left(\frac{c_1 k}{\log k}\right)^2, p = c_2n^{-\frac 12} = \frac{c_2}{c_1}\frac{\log k}k$. Let $G \sim G(n, p), \mathcal T$ a maximal collection of edge-disjoint triangles in $G$, $\tilde G$ be $G$ with all edges of $\mathcal T$ removed. Note, $\tilde G$ contains no triangle. We show
$$\P(\alpha(\tilde G) \ge k) < 1$$
Let $Q$ be the event that every set of $k$ vertices of $G$ contains $\ge \frac{pk^2}8$ edges. Setting $\frac{c_2}{c_1} = 48$, we get
$$pk = \frac{c_2}{c_1}\frac{\log k}k k = 48\log k > 16\log n$$
so that $\P(Q) = 1 - o(1)$ by the lemma. Now note that
$$\P(\alpha(\tilde G) \ge k) \le \P(\alpha(\tilde G) \ge k, Q) + \cancelto 0{\P(Q^c)}$$
So we focus on $\P(\alpha(\tilde G) \ge k, Q)$. Observe that if $\tilde G$ contains an independent set $I$ of size $k$ and $Q$ holds, then $I$ contains $\ge \frac{pk^2}8$ edges of $G$ by assumption. But $I$ is an independent set in $\tilde G$, so all $\frac{pk^2}8$ edges must have belonged to some triangle $T \in \mathcal T$ and been removed. Therefore
\begin{align*}
\P(\alpha(\tilde G) \ge k, Q)
& \le \P\left(\exists S \in [n]^{(k)}, \mathcal T \text{ meets $S$ in $\ge \frac{pk^2}8$ edges}\right) \\
& \le \binom n k \P\left(\underbrace{\substack{\text{at least $t$ triangles of $\mathcal T$} \\ \text{meet $[k]$ in at least two vertices}}}_B\right)
\end{align*}
where $t = \frac{pk^2}{24}$. Let $\{T_i\}$ be the collection of triangles in $K_n$ that meet $[k]$ in at least two vertices. Let $A_i = \{T_i \subseteq G\}$. Note that if $T_{i_1}, \dots, T_{i_k}$ are edge-disjoint, then $A_{i_1}, \dots, A_{i_k}$ are independent. So
\begin{align*}
\P(B)
& \le \P(\Eps_t) \\
& \le \frac 1{t!}\left(\sum_{\substack{T_i \subseteq K_n \text{ intersects } [k] \\ \text{ in at least two vertices}}} \P(T_i \subseteq G)\right)^t \\
& \le \frac 1{t!} (k^2np^3)^t \\
& \le \left(\frac{ek^2np^3}t\right)^t \\
& = (24enp^2)^t = (24ec_2^2)^t = e^{-t}
\end{align*}
by choosing $c_2 = \frac 1{\sqrt{24} e}$. To finish, observe that
$$t = \frac{pk^2}{24} = 2k\log k \ge k\log n$$
Hence
$$\binom n k \P(B) \le \binom n k e^{-t} \le \left(\frac{en}k e^{-\log n}\right)^k = \left(\frac ek\right)^k \to 0$$
\end{proof}
\subsection{Large deviation inequalities}
Let $Z$ be a gaussian random variable.
$$\P(Z - \bbE Z \ge t) \le e^{\frac{-t}{2\Var Z}}$$
Let $X_1, \dots, X_n$ be iid Bernoulli random variables. We denote this $X_i \sim \Ber(p)$. Write $S_n = X_1 + \dots + X_n$. Note $\bbE S_n = np, \Var(S_n) = np(1 - p)$.
\begin{idea}
Often, the tail of $S_n$ looks like a gaussian tail.
\end{idea}
\begin{thm}[Chernoff inequality]
Let $X_1, \dots, X_n \sim \Ber(p)$. Then
$$\P\left(\abs{S_n - pn} \ge t\right) \le 2\exp\left(\underbrace{-\frac{t^2}{2pn}}_{\text{meat}} + \underbrace{\frac{t^3}{(pn)^2}}_{\text{error term}}\right)$$
\end{thm}
\newlec
\begin{proof}[Proof of the $\frac{pk^2}8$ lemma]
Using Chernoff on $e(G[[k]])$, namely with $p := p, n := \binom k2, t := \frac{pk^2}4$, we get
\begin{align*}
\P(G \text{ fails the statement})
& = \P\left(\exists S \in [n]^{(k)}, e(G[s]) < \frac{pk^2}8\right) \\
& \le \binom nk \P\left(e(G[[k]]) < \frac{pk^2}8\right) \\
& \le \binom nk \P\left(\frac{pk^2}4 < \abs{e(G[[k]]) - p\binom k2}\right) \\
& \le 2\left(\frac{en}k\right)^k \exp\left(-\frac{pk^2}{16} + \frac 18\right) \\
& \ll \left(\frac{en}k\right)^k \exp\left(-k\log n\right) \\
& = \left(\frac ek\right)^k
\end{align*}
which tends to 0 as $k$ tends to infinity.
\end{proof}
\subsection{The Local Lemma}
The probabilistic method is like finding the hay in the hay stack. What if we want to find the needle?
\begin{dfn}
Let $\mathcal F = \{A_1, \dots, A_n\}$ be a family of events in a probability space. A {\bf dependency graph} $\Gamma$ is a graph with vertices $\mathcal F$ such that the event $A_i$ is independent of $\sigma(A_j \mid j \not\sim i)$ for all $i \in [n]$.
\end{dfn}
\begin{rmks}~
\begin{itemize}
\item A dependency graph is not unique.
\item The complete graph is always a dependency graph.
\item The empty graph is a dependency graph iff the $A_i$ are globally independent.
\end{itemize}
\end{rmks}
\begin{thm}[The Local Lemma, symmetric version]
Let $\mathcal F = \{A_1, \dots, A_n\}$ be a family of events in a probability space, let $\Gamma$ be a dependency graph for $\mathcal F$ with maximum degree $\Delta$. If $\P(A_i) \le \frac 1{e(\Delta + 1)}$ for all $i$, then
$$\P\left(\Inter_i A_i^c\right) > 0$$
\end{thm}
\begin{thm}[Spencer]
$$R(k) \ge (1 - o(1)) \frac{\sqrt 2k}e 2^{\frac k2}$$
\end{thm}
\begin{proof}
Let $n = (1 - \eps)\frac{\sqrt 2k}e2^{\frac k2}$ for some $\eps > 0$. Let $\chi$ be a random edge-coloring of $K_n$ uniformly over all colorings. Define, for $S \in [n]^{(k)}$, the event
$$A_S = \{S \text{ is monochromatic in } \Z\}$$
Note we want $\P\left(\Inter_{S \in [n]^{(k)}} A_S^c\right) > 0$. Define the dependency graph $\Gamma$ by
$$S \sim T \iff 1 < \abs{S \inter T} < k$$
The maximum degree of $\Gamma$ Is
$$\Delta = \sum_{t = 2}^{k - 1}\binom kt\binom{n - k}{k - t} = \binom nk - k\binom{n - k}{k - 1} - \binom{n - k}k - 1$$
To apply the Local Lemma, we just check
$$\P(A_S) = 2^{-\binom k2 + 1} \le \frac 1{e(\Delta + 1)}$$
\end{proof}
\begin{thm}[Lopsided Local Lemma]
Let $\mathcal F = \{A_1, \dots, A_n\}$ be a family of events on a probability space, $\Gamma$ a dependency graph for $\mathcal F$, $0 \le x_1, \dots, x_n < 1$ satisfying
$$\P(A_i) \le x_i \prod_{j \sim i}(1 - x_j)$$
Then
$$\P\left(\Inter_i A_i^c\right) \ge \prod_i (1 - x_i) > 0$$
\end{thm}
\begin{thm}[Erd\H os, 1961]
$$R(3, k) \ge c\left(\frac k{\log k}\right)^2$$
for some $c > 0$.
\end{thm}
\begin{proof}
Let $n = \eps^4\left(\frac k{\log k}\right)^2, p = \frac\eps{\sqrt n} = \frac{\log k}{\eps k}, G \sim G(n, p)$. For all $T \in [n]^{(3)}$ and $I \in [n]^{(k)}$, define $A_T = \{T \subseteq G\}$ and $B_I = \{I \subseteq G^c\}$. We want
$$\P\left(\Inter_{T \in [n]^{(3)}} A_T^c \inter \Inter_{I \in [n]^{(k)}} B_I^c\right) > 0$$
Define the dependency graph $\Gamma$ by
\begin{align*}
T \sim T' & \iff \abs{T \inter T'} = 2 \\
T \sim I & \iff 2 \le \abs{I \inter T} \\
I \sim I' & \iff 2 \le \abs{I \inter I'} < k
\end{align*}
We see that
\begin{align*}
\#\{T' \in [n]^{(3)} \mid T' \sim T\} \le 3n & , \#\{I \in [n]^{(k)} \mid I \sim T\} \le 3n^{k - 2} \\
\#\{T \in [n]^{(3)} \mid T \sim I\} \le k^2n & , \#\{I' \in [n]^{(k)} \mid I' \sim I\} \le k^2n^{k - 2}
\end{align*}
We therefore check that
\begin{enumerate}
\item $\P(A_T) \le x_T \prod_{T' \sim T} (1 - x_{T'}) \prod_{I \sim T} (1 - x_I)$. Indeed,
\begin{align*}
\lhs & = p^3 \\
\rhs & \ge 3p^3 (1 - 3p^3)^{3n}(1 - n^{-k})^{3n^{k - 2}}
\end{align*}
and, using $1 - x \ge e^{-2x}$ for small enough $x$,
$$(1 - 3p^3)^{3n}(1 - n^{-k})^{3n^{k - 2}} \ge \exp(-18p^3n - 6n^{-2}) = \exp(-18\eps^2 p - 6n^{-2}) \to 1$$
\newlec
\item $\P(B_I) \le x_I \prod_{T \sim I}(1 - x_T) \prod_{I' \sim I} (1 - x_{I'})$. Indeed,
\begin{align*}
\lhs & = (1 - p)^{\binom k2} \\
\rhs & = n^{-k}(1 - n^{-k})^{k^2n^{k - 2}}(1 - 3p^3)^{k^2n}
\end{align*}
and, using $1 - x \ge e^{-2x}$ for small enough $x$,
\begin{align*}
\log\rhs
& \ge -k\log n - 2k^2n^{-2} - 6k^2p^3n \\
& \ge -2k\log k - 6k^2p\eps^2 + o(1) \\
& \ge -\frac{k\log k}{4\eps} + o(1) \\
& \ge -p\binom k2 \\
& \ge \log\lhs
\end{align*}
\end{enumerate}
\end{proof}
\begin{proof}[Proof of the Local Lemma]
Applying $\P(A \inter B) = \P(A) \P(B \mid A)$ repeatedly, write
\begin{align*}
\P\left(\Inter_{i = 1}^n A_i^c\right)
& = \prod_{i = 1}^n \P(A_i^c \mid A_1^c \inter \dots \inter A_{i - 1}^c) \\
& = \prod_{i = 1}^n (1 - \P(A_i \mid A_1^c \inter \dots \inter A_{i - 1}^c))
\end{align*}
It is enough to show that $\P(A_i \mid A_1^c \inter \dots \inter A_{i - 1}^c) \le x_i$. We prove
$$\P(A_i \mid \Inter_{j \in S} A_j^c) \le x_i$$
for all $S \subseteq [n]$ by induction:
\begin{itemize}
\item $S = \emptyset$. Done by assumption.
\item Write
$$I = \Inter_{\substack{j \in S \\ j \not\sim i}} A_j^c, D = \Inter_{\substack{j \in S \\ j \sim i}} A_j^c$$
So
$$\P(A_i \mid I \inter D) = \frac{\P(A_i \inter I \inter D)}{\P(I \inter D)} \le \frac{\P(A_i \inter I)}{\P(I \inter D)} = \frac{\P(A_i)\P(I)}{\P(I \inter D)} = \frac{\P(A_i)}{\P(D \mid I)}$$
Now, write $D = A_{i_1}^c \inter \dots \inter A_{i_m}^c$ and
$$\P(D \mid I) = \prod_{j = 1}^m (1 - \P(A_{i_j} \mid I \inter A_{i_1}^c \inter \dots \inter A_{i_{j - 1}}^c)) \ge \prod_{j = 1}^m (1 - x_{i_j}) \ge \prod_{j \sim i} (1 - x_j)$$
\end{itemize}
\end{proof}
\begin{thm}[Lovasz Local Lemma, symmetric version]
For $\mathcal F = \{A_1, \dots, A_n\}$, $\Gamma$ a dependency graph with maximum degree $\Delta$, if $\P(A_i) \le \frac 1{e(\Delta + 1)}$ for all $i$, then
$$\P\left(\Inter_i A_i^c\right) \ge \left(1 - \frac 1{e(\Delta + 1)}\right)^n > 0$$
\end{thm}
\begin{proof}
We use the Lopsided Local Lemma with $x_i = \frac 1{e(\Delta + 1)}$. Note
$$x_i \prod_{j \sim i} (1 - x_j) \ge \frac 1{\Delta + 1}\left(1 - \frac 1{\Delta + 1}\right)^\Delta \ge \frac 1{e(\Delta + 1)} \ge \P(A_i)$$
So Lopsided Local Lemma applies.
\end{proof}
We now know
$$\frac{ck^2}{(\log k)^2} \le R(3, k) \le (k + 1)^2$$
\begin{thm}[State of the art on $R(3, k)$]
$$\underbrace{\left(\frac 14 + o(1)\right)\frac{k^2}{\log k}}_{\substack{\text{Fiz Pontiveros} \\ \text{Griffiths} \\ \text{Morris}} + \substack{\text{Bohman} \\ \text{Keevash}}} \le R(3, k) \le \underbrace{(1 + o(1))\frac{k^2}{\log k}}_{\substack{\text{Ajtai} \\ \text{Komlós} \\ \text{Szemerédi}} + \text{Shearer}}$$
\end{thm}
\clearpage
\subsection{Upper bounds on \texorpdfstring{$R(3, k)$}{R(3, k)}}
\newlec
\begin{thm}[Ajtai, Komlós, Szemerédi]
$$R(3, k) \le c \frac{k^2}{\log k}$$
for some $c > 0$. In fact, we will see $c = 1 + o(1)$.
\end{thm}
\begin{thm}[Ajtai, Komlós, Szemerédi]
Let $G$ be a triangle-free graph on $n$ vertices with maximum degree $\Delta$. Then
$$\alpha(G) \ge c \frac n\Delta \log\Delta$$
for some absolute constant $c > 0$.
\end{thm}
\begin{rmk}
For general graphs, we know $\alpha(G) \ge \frac n{\chi(G)} \ge \frac n{\Delta + 1}$ by the naïve greedy algorithm and this is basically best possible. The extra $\log d$ factor will come from tracking how sparse our graph is becoming as we remove vertices from it.
\end{rmk}
We apply a random greedy algorithm to prove the following theorem due to Shearer.
Define
$$f(x) = \frac{x \log x - x + 1}{(x - 1)^2}$$
extending continuously to $[0, 1]$ by $f(0) = 1, f(1) = \frac 12$. We remark that
\begin{itemize}
\item $f$ is continuous and differentiable.
\item $f$ is antitone and convex.
\item $0 < f(x) < 1$
\item $(x + 1)f(x) = 1 + (x - x^2)f(x)$
\end{itemize}
\begin{thm}[Shearer]
Let $G$ be a triangle-free graph on $n$ vertices with average degree $d > 0$.
Then
$$\alpha(G) \ge nf(d) \ge (1 - o(1)) \frac n\Delta \log\Delta$$
\end{thm}
\begin{proof}
Induction on $n$.
Let $x$ a vertex to be chosen later and $G' := G - x - N(x)$. Writing $d'$ the average degree of $G'$ and applying induction to $G'$, we get
\begin{align*}
\alpha(G)
& \ge 1 + \alpha(G') \\
& \ge 1 + (n - \deg x - 1)f(d') \\
& \ge 1 + (n - \deg x - 1)(f(d) + (d' - d)f'(d)) \\
& = nf(d) + 1 - (\deg x + 1)f(d) + (d\deg x + d + n(d' - d) - d'\deg x - d')f'(d) \\
& = nf(d) + 1 - (\deg x + 1)f(d) + \left(d\deg x + d - 2\sum_{y \sim x} \deg y\right)f'(d)
\end{align*}
where we used that $N(x)$ is an independent set to get that
\begin{align*}
n(d - d')
& = n \left(\frac{2e(G)}n - \frac{2e(G')}{n - \deg x - 1}\right) \\
& = 2(e(G) - e(G')) - (\deg x + 1)d' \\
& = 2\sum_{y \sim x} \deg y - (\deg x + 1)d'
\end{align*}
So we want to choose $x$ such that
$$(\deg x + 1)f(d) \le 1 + (d\deg x + d - 2\sum_{y \sim x} \deg y)f'(d)$$
We average over $x$:
\begin{align*}
\E_x \lhs & = (d + 1)f(d) \\
\E_x \rhs & = 1 + (d^2 + d - 2\E_x\sum_{y \sim x} \deg y)f'(d)
\end{align*}
Notice that
$$\E_x\sum_{y \sim x} \deg y = \frac 1n \sum_x\sum_{y \sim x} \deg y = \frac 1n \sum_x \deg x^2 \ge \left(\frac 1n\sum_x \deg x\right)^2$$
Since $f'(d) \le 0$, we get
$$\E_x \rhs \ge 1 + (d^2 + d - 2d^2)f'(d) = 1 + (d - d^2)f'(d) = (d + 1)f(d) = \E_x \lhs$$
So such $x$ exists.
\end{proof}
\begin{proof}[Proof of the AKS bound using Shearer]
Let $G$ be a graph on $n$ vertices with neither a triangle nor an independent set of size $k$. We have
$$d \le \Delta \le \alpha(G) < k$$
where the middle inequality holds by triangle freeness. Hence Shearer says
$$k > \alpha(G) \ge \frac nd \log d \ge \frac nk \log k$$
And $n \le \frac{k^2}{\log k}$ as wanted.
\end{proof}
\newlec
\begin{proof}[Second proof of AKS]
Let $I$ be an independent set in $G$ sampled uniformly among all independent sets of $G$. We will show
$$\bbE\abs I \ge c\frac nd\log n$$
Let $v$ be a vertex. We define the random variable
$$X_v = d1_{v \in I} + \abs{N(v) \inter I}$$
For any independent set $I$,
$$\sum_v X_v \le 2d\abs I$$
So
$$\sum_v \E_I X_v \le 2d \E_I \abs I$$
So we want to show that $\E_I X_v \ge c \log d$ for all $v$. \\
Let $G' = G - v - N(v)$, find $J$ an independent set in $G'$ minimising $\E_I[X_v \mid I \setminus (N(v) \union \{v\}) = J]$ and let $F = \{w \in N(v) \mid N(w) \inter J = \emptyset\}$ and $t = \abs F$. Note carefully that $I \inter (F \union \{v\})$ is uniform over all independent sets in $G[F \union {v}]$, and that the independent sets of $G[F \union {v}]$ are exactly $\{v\}$ and the $2^t$ subsets of $F$. Hence
$$\E_I X_v \ge \E_{I \setminus (N(v) \union \{v\}) = J} X_v = \E_{I \subseteq F \union \{v\}} X_v = \frac 1{2^t + 1}d + \frac{2^t}{2^t + 1}\frac t2 \ge c \log d$$
for some $c > 0$ by optimising over $t$.
\end{proof}
\begin{thm}[Ajtai-Komlós-Szemerédi]
Let $\ell \in \N$. Then for sufficiently large $k \in \N$ we have
$$R(\ell, k) \le \left(\frac 4{\log k}\right)^{\ell - 2}k^{\ell - 1}$$
\end{thm}
We have seen that the power of $k$ is correct for $\ell = 3$. As of recently, we also know that it is correct for $\ell = 4$.
\clearpage
\section{3-uniform hypergraph Ramsey numbers}
We define $K_n^{(r)}$ to be the {\bf complete $r$-uniform hypergraph} on $n$ vertices. The {\bf $r$-uniform hypergraph Ramsey number} $R^{(r)}(\ell, k)$ is the minimal $n$ such that every edge coloring of $K_n^{(r)}$ contains either a blue $K_\ell^{(r)}$ or a red $K_k^{(r)}$. As before, the {\bf hypergraph diagonal Ramsey number} is $R^{(r)}(k) = R^{(r)}(k, k)$.
\begin{thm}[Erd\H os-Rado]
$$R^{(3)}(\ell, k) \le 2^{\binom{R(\ell - 1, k - 1)}2}$$
Eg $R^{(3)}(k) \le 2^{16^k}$.
\end{thm}
\begin{proof}
Let $t = R(\ell - 1, k - 1)$ and $n = 2^{\binom t2}$. Let $\chi$ be a red/blue edge coloring of $K_n^{(3)}$. Let $v_1, v_2 \in [n]$. Define
$$A_{1, 2} = \{w \in [n] \mid \chi({v_1, v_2, w}) = c_{1, 2}\}$$
where $c_{1, 2}$ is the {\bf majority color}, chosen so that
$$\abs{A_{1, 2}} \ge \frac n2 - 1$$
Let $v_3 \in A_{1, 2}$. Define
$$A_{1, 3} = \{w \in A_{1, 2} \mid \chi({v_1, v_3, w}) = c_{1, 3}\}$$
where $c_{1, 3}$ is the majority color. Now define
$$A_{2, 3} = \{w \in A_{1, 3} \mid \chi({v_2, v_3, w}) = c_{2, 3}\}$$
where $c_{2, 3}$ is the majority color, and so on... \\
After $t$ steps, our world has size $\abs{A_{t - 1, t}} \ge n2^{-\binom t2} \ge 1$. We thus have $\{v_1, \dots, v_t\}$ such that $\chi(\{v_i, v_j, v_k\}) = c_{i, j}$ if $k > i, j$. $c$ is a coloring of $\{v_1, \dots, v_t\}^{(2)}$. By definition of Ramsey, we're done.
\end{proof}
\clearpage
\subsection{Off-diagonal}
\newlec
Erd\H os-Rado gives
$$R^{(3)}(4, k) \le 2^{ck^4}$$
\begin{thm}[Conlon, Fox, Sudakov, 2010]
$$R^{(3)}(4, k) \le k^{ck^2}$$
\end{thm}
Erd\H os-Rado makes us shrink our world by a factor of $2$ at every query. Can we ask fewer questions? This suggests the following game.
\begin{dfn}[The Ramsey game]
At each question, we expose a new vertex $v_i$ and get to ask our adversary to expose the color of a collection of edges $v_j v_i$ where $j < i$. Our goal is to force a blue $K_3$ or a red $K_k$ with as few queries as possible.
\end{dfn}
\begin{lem}
In the Ramsey game, we can force a blue $K_3$ or a red $K_k$ in at most $2k^3$ queries whose answer is ``red'' and $k^2$ queries whose answer is ``blue''.
\end{lem}
\begin{proof}
As we expose vertices, we sort them into {\bf levels} $1, 2, 3, \dots$ The first vertex at level $i$ is called the {\bf root} of level $i$ and denoted $r_i$.
We start by putting $v_1$ into level $1$ and setting $r_1 := v_1$. When we expose vertex $v_i$, we ask for the color of $v_ir_1, \dots, v_ir_r$ until we get replied ``blue''.
\begin{itemize}
\item If we get a blue response to $v_ir_j$ for some $j$, stick $v_i$ in level $j$ and expose all edges to previous vertices of level $j$.
\item If all $v_ir_j$ get replied ``red'', make $v_i$ a new root.
\end{itemize}
TODO: Insert picture
Assuming we have not encountered a blue $K_3$ or red $K_k$, every level contains at most $k$ vertices and there are at most $k$ levels. We have exposed at most $k^2$ red edges and $k$ blue edges in each level, and $k^3$ red edges between levels, so in total at most $2k^3$ red edges and $k^2$ blue edges.
\end{proof}
The idea now is that our adversary wants to reply ``red'' most of the time, so we're willing to shrink our world much more when they reply ``blue''.
\begin{proof}[Proof that $R^{(3)}(4, k) \le k^{ck^2}$]
The proof follows the proof of Erd\H os-Rado but we now only refine our world based on the pairs that we query in the Ramsey game. We also have a different rule about when to refine our world to be blue vs red.
Start with $A_0 = [n]$ where $n = k^{ck^2}$ and define
$$A_0 \supseteq A_1 \supseteq A_2 \supseteq \dots \supseteq A_n$$
where $e_j$ is the edge coming from the Ramsey game and
\begin{align*}
A_j^B & = \{x \in A_j \mid \chi(e_{j + 1} \union \{x\}) = \text{blue}\} \\
A_j^R & = \{x \in A_j \mid \chi(e_{j + 1} \union \{x\}) = \text{red}\} \\
A_{j + 1} & =
\begin{cases}
A_j^B & \text{ if } \abs{A_j^B} \ge \frac 1k \abs{A_j} \\
A_j^R & \text{ if } \abs{A_j^R} \ge \left(1 - \frac 1k\right) \abs{A_j}
\end{cases}
\end{align*}
At the end of time,
$$\abs{A_m} \ge n\left(\frac 1k\right)^{k^2}\left(1 - \frac 1k\right)^{2k^3} \ge k^{(c - 1)k^2}e^{-4k^2} \ge 1$$
if we pick $c$ large enough.
\end{proof}
What about lower bounds? Let's try the probabilistic method.
Color triples blue with probability $p$.
$$\bbE\left[\#\text{red }K_k^{(3)}\right] = \binom nk (1 - p)^{\binom k3} \le \left(\frac{en}k\right)^k e^{-p\binom k3} = \left(\frac{en}k e^{-\frac{pk^2}6}\right)^k$$
This is nontrivial if $p \gg \frac 1{k^2}$. Then
$$\bbE\left[\#\text{blue }K_4^{(3)}\right] = \binom n4 p^4 \ge \left(\frac n4\right)^4 \gg \left(\frac n{k^2}\right)^4$$
So the (naïve) probabilistic approach looks useless for anything better than polynomial in $k$.
\newlec
\begin{thm}
$$R^{(3)}(4, k) \ge 2^{\frac{k - 1}2}$$
\end{thm}
\begin{proof}
Let $n = 2^{\frac{k - 1}2} - 1$ and $T$ a random tournament on $[n]$. For $x, y \in [n]$ distinct, define
$$\chi(\{x, y, z\}) =
\begin{cases}
\text{blue} & \text{if $x, y, z$ oriented} \\
\text{red} & \text{if $x, y, z$ acyclic}
\end{cases}$$
TODO: Insert oriented and acyclic pictures
\begin{obs}
A tournament on $4$ points has at least one transitive triple.
\end{obs}
This immediately implies there is no blue $K_4^{(3)}$.
\begin{obs}
If $K$ is a tournament where every triple is transitive, then $K$ itself is transitive.
\end{obs}
Hence
$$\bbE \#\text{red } K_k^{(3)} = \bbE \#\text{transitive } K_k = \binom nk k! 2^{-\binom k2} \le n^k 2^{-\binom k2} = \left(n2^{-\frac{k - 1}2}\right)^k < 1$$
Hence there exists a tournament with no red $K_k^{(3)}$.
\end{proof}
\begin{thm}[Conlon, Fox, Sudakov, 2010]
$$R^{(3)}(4, k) \ge k^{\frac k5}$$
for sufficiently large $k$.
\end{thm}
\begin{proof}[Proof (Stepping up)]
Set $n = k^{\frac k5}, r = R(3, \frac k4) - 1$. Let $\theta$ be an edge coloring of $K_r$ with no blue $K_3$ or red $K_{\frac k4}$. Now let $\sigma$ be a random edge coloring in $r$ colors. For $x < y < z$, define
$$\chi(\{x, y, z\}) =
\begin{cases}
\theta(\{\sigma(xy), \sigma(xz)\}) & \text{ if } \sigma(xy) \ne \sigma(xz) \\
\text{ red} & \text{ if } \sigma(xy) = \sigma(xz)
\end{cases}$$
\begin{idea}
The fact that $\theta$ has no blue triangle will imply that $\chi$ has no blue $K_4^{(3)}$. The fact that $\theta$ has no red $K_{\frac k4}$ will imply that $\chi$ has no red $K_k^{(3)}$ with high probability.
\end{idea}
Assume that $x < y_1 < y_2 < y_3$ form a blue $K_4^{(3)}$. Note that $\sigma(xy_1)$, $\sigma(xy_2)$, $\sigma(xy_3)$ are distinct. So $\sigma(xy_1)\sigma(xy_2), \sigma(xy_2)\sigma(xy_3), \sigma(xy_3)\sigma(xy_1)$ are the edges of a triangle in $K_r$ which is blue in $\theta$. Contradiction.
TODO: Insert figure
Now assume that $K = \{x_1, \dots, x_k\}$ is a red $K_k^{(3)}$ in $\chi$. For each $i \in [k]$,
$$\abs{\{\sigma(x_ix_j) \mid j > i\}} < \frac k4$$
as otherwise one can find $i < j_1 < \dots < j_{\frac k4}$ such that $\sigma(x_ix_{j_1}), \dots, \sigma(x_ix_{j_{\frac k4}})$ are all distinct, meaning that they are vertices of a red $K_{\frac k4}$ in $\theta$.
TODO: Insert figure
Call such a set $K \in [n]^{[k]}$ {\bf sad}. We consider
\begin{align*}
\bbE \#\text{ sad sets}
& \le n^k \prod_{i = 1}^k \binom r{\frac k4} \left(\frac k{4r}\right)^{k - i} \\
& = n^k \binom r{\frac k4}^k \left(\frac k{4r}\right)^{\sum_{i = 1}^k k - i} \\
& \le n^k \left(\frac{4er}k\right)^{\frac{k^2}4} \left(\frac k{4r}\right)^{\frac{k^2}4} \\
& = \left(n\left(\frac e4 \frac kr\right)^{\frac k4}\right)^k \\
& \le \left(nk^{-\frac k4 + o(k)}\right)^k \text{ since } r > k^{2 - o(1)} \\
& < \left(nk^{-\frac k5}\right)^k \\
& = 1 \text{ since } n = k^{\frac k5}
\end{align*}
Hence find some $\sigma$ such that no set is sad. We're done.
\end{proof}
\subsection{Diagonal}
\newlec
The state of the art for $R^{(3)}(k)$ is
$$2^{ck^2} \le R^{(3)}(k) \le 2^{2^{2k + 1}}$$
for some $c > 0$.
\begin{conj}[Erd\H os, Hajnal, Rado, 1965]
$$R^{(3)}(k) \ge 2^{2^{ck}}$$
for some $c > 0$.
\end{conj}
\begin{thm}[Erd\H os, Hajnal, 1980]
$$\underbrace{R_4^{(3)}(k)}_{4\text{ colors}} \ge 2^{2^{ck}}$$
for some $c > 0$.
\end{thm}
This is an example of ``stepping up''.
\begin{lem}
For all $k$,
$$R_4^{(3)}(k) \ge 2^{R(k - 1) - 1}$$
\end{lem}
\begin{proof}
Let $r = R(k - 1) - 1$ and let $\theta$ be a red-blue edge coloring with no monochromatic $K_{k - 1}$. We now color triples on the ground set $\{0, 1\}^r$. For $x, y \in \{0, 1\}^r, x \ne y$, define
$$f(x, y) = \max \{i \mid x_i \ne y_i\}$$
so that
$$x < y \iff x_{f(x, y)} < y_{f(x, y)}$$
This is the reverse lexicographic order. Note that $f(x, y) \ne f(y, z)$ if $x < y < z$. We now define our coloring $\xi$ of $\{x, y, z\}, x < y < z$ to be one of four colors depending on which of the following hold:
$$f(x, y) < f(y, z), \quad \theta(f(x, y), f(y, z)) = \text{red}$$
Let $x_1 < \dots < x_k$ be the vertices of a monochromatic $K_k^{(3)}$, WLOG of the color where both conditions hold. We claim that $f(x_1, x_2) < f(x_2, x_3) < \dots f(x_{k - 1}, x_k)$ are the vertices of a monochromatic, in fact red, $K_{k - 1}$ in $\theta$. Indeed, since the $x_i$ are increasing,
$$f(x_{i + 1}, x_{j + 1}) = \max_{i < \ell \le j} f(x_\ell, x_{\ell + 1}) = f(x_j, x_{j + 1})$$
So
$$\theta(f(x_i, x_{i + 1}), f(x_j, x_{j + 1})) = \theta(f(x_i, x_{i + 1}), f(x_{i + 1}, x_{j + 1})) = \text{red}$$
\end{proof}
\begin{rmk}
We know a version of this stepping up (due to Erd\H os-Hajnal) for 2-colorings when the uniformity is $\ge 4$.
\end{rmk}
\begin{thm}[Conlon, Fox, Sudakov, 2010]
$$R^{4}(2k + 1) \ge 2^{R^{(3)}(k - 1) - 1}$$
and for $s \ge 4$ we have
$$R^{(s + 1)}(k + 1) \ge 2^{R^{(s)}(k) - 1}$$
\end{thm}
The state of the art for $R^{(3)}(k)$ is
$$\underbrace{2^{\dots^{2^{c_0 k}}}}_{s - 2} \le R^{(s)}(k) \le \underbrace{2^{\dots^{2^{c_1 k}}}}_{s - 1}$$
\section{The Szemerédi Regularity Lemma}
{\bf Informal Statement} \\
For all $\eps > 0$, there exist numbers $\ell(\eps), L(\eps)$ such that every graph can be partitioned into $V_1, \dots, V_k$, where $\ell(\eps) \le k \le L(\eps)$, $\abs{\abs{V_i} - \abs{V_j}} \le 1$ such that, for all but $\eps\binom k2$ pairs $(i, j)$, the graph between $V_i$ and $V_j$ is ``random-like'' up to some coarseness $\eps$.
\begin{dfn}
In a graph $G$, let $X, Y$ be disjoint sets of vertices. We say that $(X, Y)$ is {\bf $\eps$-uniform} (aka {\bf $\eps$-regular}) if for all $X' \subseteq X, Y' \subseteq Y$ such that $\abs{X'} \ge \eps\abs X, \abs{Y'} \ge \eps\abs Y$ we have
$$\abs{d(X', Y') - d(X, Y)} < \eps$$
where
$$d(X', Y') = \frac{e(X, Y)}{\abs X\abs Y}$$
is the {\bf density} of edges.
\end{dfn}
\begin{rmk}
We need some lower bound on $\abs{X'}, \abs{Y'}$ in the definition, else uniformity will basically never hold.
\end{rmk}
\newlec
\begin{prop}
For $\eps > 0$, let $(X, Y)$ be an $\eps$-regular pair in a graph $G$. Let $p = d(X, Y)$. Then
\begin{align*}
\#\{x \in X \mid \abs{N(x) \inter Y} < (p - \eps)\abs Y\} & < \eps\abs X \\
\#\{x \in X \mid \abs{N(x) \inter Y} > (p + \eps)\abs Y\} & < \eps\abs X
\end{align*}
\end{prop}
\begin{proof}
Let $X' = \{x \in X \mid \abs{N(x) \inter Y} < (p - \eps)\abs Y\}$. By definition of $X'$,
$$d(X', Y) = \frac{e(X', Y)}{\abs X\abs Y} < \frac{(p - \eps)\abs Y\abs{X'}}{\abs{X'}\abs Y} = p - \eps$$
So $\abs{X'} < \eps\abs X$ by definition of $\eps$-uniformity.
\end{proof}
\begin{lem}[Embedding lemma for triangles]
Let $\eps > 0$ and $p \ge 2\eps$. Let $G$ be a graph on $V = V_1 \union V_2 \union V_3$ where $V_1, V_2, V_3$ are disjoint of size $m \ge 1$, $(V_i, V_j)$ are $\eps$-uniform for $i \ne j$ and $d(V_i, V_j) \ge p \ge 2\eps$. Then there are at least
$$(1 - 2\eps)(p - \eps)^3m^3$$
triangles in $G$.
\end{lem}
\begin{proof}
We look at triangles with one vertex in each $V_i$. The number of $x \in V_1$ such that
$$\abs{N(x) \inter V_2} \ge (p - \eps)m, \quad \abs{N(x) \inter V_3} \ge (p - \eps)m$$
is at least $(1 - 2\eps)m$. For each such $x$, the number of $V_1, V_2, V_3$ triangles containing $x$ is at least
$$e(N(x) \inter V_2, N(x) \inter V_3) \ge (p - \eps)\abs{N(x) \inter V_2} \abs{N(x) \inter V_3} \ge (p - \eps)^3m^2$$
since $\abs{N(x) \inter V_2}, \abs{N(x) \inter V_3} \ge (p - \eps)m \ge \eps m$. So we get $(1 - 2\eps)(p - \eps)^3m^3$ triangles in total.
TODO: Insert figure
\end{proof}
\begin{dfn}
We say a partition $V = V_1 \union \dots \union V_k$ is an {\bf equipartition} if $\abs{\abs{V_i} - \abs{V_j}} \le 1$ for all $i, j$. Such a partition is {\bf $\eps$-uniform} if $(V_i, V_j)$ is $\eps$-uniform for all but $\eps\binom k2$ pairs $\{i, j\}$.
\end{dfn}
\begin{thm}[Szemerédi Regularity Lemma]
For all $\eps > 0, \ell \in \N$, there exists $L \in \N$ such that every graph has an $\eps$-uniform equipartition in $k \in [\ell, L]$ parts.
\end{thm}
\begin{rmks}~
\begin{itemize}
\item It is really important that $L$ does not depend on $\abs G$.
\item This does not directly say anything about graphs with $e(G) = o(n^2)$
\item The proof of the regularity lemma gives
$$L \le \underbrace{2^{\dots^2}}_{\eps^{-5}}$$
\item Gowers showed that
$$L \ge \underbrace{2^{\dots^2}}_{\eps^{-1/16}}$$
is needed.
\end{itemize}
\end{rmks}
\begin{lem}[Triangle removal lemma]
For all $\eps > 0$, there exists $\delta > 0$ such that every graph with at most $\delta n^3$ triangles contains $\eps n^2$ edges that together kill all triangles.
\end{lem}
\begin{proof}
Let $\eps' = \eps/4, \ell = 10\eps^{-1}, L = L(\eps, \ell), \delta = 2^{-16}\eps^4L^{-3}$. By Szemerédi Regularity Lemma, find a $\eps'$-uniform equipartition
$$V = V_1 \union \dots \union V_k$$
where $\ell \le k \le L$. We say $(i, j)$ is {\bf bad} if either of the following holds:
\begin{enumerate}
\item $d(V_i, V_j) \le \eps/2$
\item $(V_i, V_j)$ is not $\eps'$-uniform
\item $i = j$
\end{enumerate}
Now define
$$T = \{xy \in E(G) \mid x \in V_i, y \in V_j, (i, j) \text{ bad}\}$$
Then
\begin{align*}
\abs T \le
& \#\text{edges between pairs with } d(V_i, V_j) \le \eps/2 \\
& + \#\text{edges between $\eps'$-non-uniform pairs} \\
& + \sum_i \#\text{edges inside } V_i \\
\le{} & \frac\eps 2 \binom k2 \left(\frac nk\right)^2 + \frac\eps 4 \left(\frac nk\right)^2 \binom k2 + k\left(\frac nk\right)^2 \\
\le{} & \frac\eps 4 n^2 + \frac\eps 8 n^2 + \frac{n^2}k \\
\le{} & \left(\frac 14 + \frac 18 + \frac 1{10}\right)\eps n^2 \\
\le{} & \eps n^2
\end{align*}
Now, $G - T$ is triangle-free. Indeed if $x, y, z$ is a triangle then there must be $a, b, c$ distinct such that $(V_a, V_b), (V_b, V_c), (V_c, V_a)$ are $\eps/4$-uniform and
$$d(V_a, V_b), d(V_b, V_c), d(V_c, V_a) \ge \frac\eps 2$$
So the triangle embedding lemma tells us that there are at least
$$\left(1 - 2 \frac\eps 4\right) \left(\frac\eps 4\right)^2 \left(\frac nk\right)^3 \ge \left(1 - \frac\eps 2\right)2^{-6}L^{-3}\eps^3n^3 > \delta n^3$$
triangles. Contradiction.
\end{proof}
\newlec
\begin{thm}[Roth's theorem]
For $\eps > 0$, there exists $N$ such that for all $n \ge N$ any $A \subseteq [n]$ of density at least $\eps$ contains a non-trivial arithmetic progression of length three.
\end{thm}
\begin{proof}
Assume $\eps > 0$. Let $\delta$ be the $\delta$ from the triangle removal lemma applied to $3\eps$, and set $N = 3\delta^{-1}$. Define a tripartite graph on $[3n], [3n], [3n]$ by declaring that $(x, x + a, x + 2a)$ are the edges of a triangle for each $x \in [3n]$ and $a \in A$. We call such a triangle {\bf explicit}. There are $3n\abs A \ge 3\eps n^2$ explicit triangles in our graph and they are edge-disjoint. Therefore triangle removal tells us that there are at least $\delta n^3 = 3N^{-1}n^3 > 3n^2 \ge 3n\abs A$ triangles in our graph. Hence there must be some triangle that's not explicit. But a non-explicit triangle $(x, x + a, x + a + b) = (x, x + a, x + 2c)$ exactly corresponds to a non-trivial arithmetic progression.
\end{proof}
\begin{thm}[Tur\' an]
Let $G$ be a $K_{r + 1}$-free graph. Then
$$e(G) \le \left(1 - \frac 1r\right)\frac{n^2}2$$
and this bound is sharp for the Tur\' an graph.
\end{thm}
But the Tur\' an graph is a bit pathological in the sense that it has huge independence number $\alpha = \frac nr$. What if we require $\alpha(G) = o(n)$?
For $r = 2$, the neighborhood of any vertex is an independent set, so $\deg v = o(n)$ and $e(G) = o(n^2)$. For $r = 3$, we get an interesting problem.
\begin{thm}[Szemerédi]\label{thm:k4-free-small-indep}
For $\eps > 0$, there exists $\delta > 0$ such that any $K_4$-free graph $G$ on $n$ vertices with $\alpha(G) < \delta n$ has
$$e(G) \le \frac{n^2}8 + \eps n^2$$
In other words, if $G$ is $K_4$-free and $\alpha(G) = o(n)$, then $e(G) \le \frac{n^2}8 + o(n^2)$.
\end{thm}
\begin{rmks}
The $\frac 18$ is sharp!
\end{rmks}
\begin{dfn}
For $0 < \alpha < 1, \eps > 0$ and a partition
$$V(G) = V_1 \union \dots \union V_k$$
the {\bf reduced graph} $R_{\alpha, \eps}$ is the graph whose vertices are $[k]$ and
$$i \sim j \iff (V_i, V_j) \eps-\text{uniform and } d(V_i, V_j) \ge \alpha$$
\end{dfn}
\begin{proof}[Proof of Theorem \ref{thm:k4-free-small-indep}]
Let $\eps > 0$. We may assume $\eps < 1/4$. Let $L = L(10\eps^{-1}, \eps/4), \delta = \eps^2L^{-1}/10$. Assume $G$ is $K_4$-free and $\alpha(G) < \delta n$. We want
$$e(G) \le \frac{n^2}8 + \eps n^2$$
Regularity with $\eps/4$ and $\ell = 10/\eps$ gives a partition $V = V_1 \union \dots \union V_k$ where $\ell \le k \le L$. Let $R = R_{\eps, \eps/4}(V_1, \dots, V_k)$ be the corresponding reduced graph and $m = n/k$.
\begin{claim}
$R$ is triangle-free.
\end{claim}
\begin{proof}
Assume $V_a, V_b, V_c$ form a triangle in $R$. Since the pairs are $\eps/4$-uniform and have density at least $\eps$, the triangle embedding lemma tells us that there are at least
$$(1 - 2\eps)\eps^3 m^3$$
triangles on $V_a, V_b, V_c$, where $m = \frac nk$. Find therefore $x \in V_a, y \in V_b$ such that $x \sim y$ and
$$\#\{z \in V_c \mid x, y, z\text{ is a triangle}\} \ge (1 - 2\eps)\eps^3m \ge \frac 12 \eps^3 \frac nL > \delta n$$
Hence $Z$ can't be an independent set and there are some $u, v \in Z$ such that $u \sim v$. But then $x, y, u, v$ is a $K_4$. Contradiction.
\end{proof}
\newlec
\begin{claim}
All $\eps/4$-uniform pairs $(V_i, V_j)$ have $d(V_i, V_j) \le 1/2 + \eps$.
\end{claim}
\begin{proof}
Assume $(V_i, V_j)$ contradicts the claim. Then let
$$U = \{v \in V_i \mid \abs{N(v) \inter V_j} \ge \frac 12 + \frac\eps 2\}$$
so that $\abs U \ge (1 - \eps/4)m > \delta n$ by uniformity. By assumption, there therefore exist $x, y \in U$ such that $x \sim y$. Now note that
$$\abs{N(x) \inter N(y)} \ge \eps\abs{V_j} = \eps m \ge \delta n$$
So there exist $u, v \in N(x) \inter N(y)$ such that $u \sim v$. But now $x, y, u, v$ is a $K_4$. Contradiction.
\end{proof}
The first claim gives us a bound on the number of edges in the reduced graph, the second claim gives us a bound on the density of those edges, and all other edges have negligible density. So we are morally done. Formally, by splitting the edges of $G$ according to which kind of pair of the partition it belongs to,
\begin{align*}
e(G)
& \le \underbrace{e(R_{\eps, \eps/4})\left(\frac 12 + \eps\right)m}_{\text{in the reduced graph}}
+ \underbrace{\frac\eps 4 \binom k2 m^2}_{\text{not $\eps/4$-uniform}}
+ \underbrace{\eps \binom k2 m^2}_{\text{low density}}
+ \underbrace{k m^2}_{\text{within a part}} \\
& \le \frac 12 e(R_{\eps, \eps/4}) m^2 + C\eps n^2 \\
& \le \frac{n^2}8 + C'\eps n^2 \text{ by Tur\' an}
\end{align*}
\end{proof}
\begin{thm}[Chv\' atal-Rödl-Szemerédi-Trotter, 1983]
$$r(H) \le C_d \abs H$$
for some constant $C_d$ where $H$ has max degree $d$.
\end{thm}
\begin{rmk}
The naïve Ramsey bound gives
$$r(H) \le R(\abs H) \le 4^{\abs H}$$
\end{rmk}
\begin{lem}[Embedding lemma]
Let $H$ be a $r$-partite graph with max degree $d$ and $r$-partition
$$V(H) = W_1 \union \dots \union W_r$$
where $\abs{W_i} \le s$. Now let $m \in \N, \eps, \lambda \in ]0, 1[$ be such that
$$\eps(d + 1) < (\lambda - \eps)^d, \quad s \le \eps m$$
where $\abs{V_i} = m$, $(V_i, V_j)$ is $\eps$-uniform and $d(V_i, V_j) \ge \lambda$. Then $H \subseteq G$.
\end{lem}
\begin{proof}
We define an algorithm to embed $H$ into $G$. In each step of the algorithm, we will choose a new vertex in $V(H)$ that has not been embedded yet and find some vertex in $V(G)$ to map it to.
At step $t$, for each vertex $u \in V(H)$, define
$$E_t = \text{ already embedded vertices}, \quad \mathcal C_t(u) = V_\ell \inter \Inter_{\substack{w \in E_t \\ w \sim_H u}} N_G(w) \text{ where } u \in V_\ell$$
At each step of the algorithm, ensure the following:
\begin{enumerate}
\item If $x, y \in E_t$ and $x \sim_H y$, then $x \sim_G y$.
\item For all $u \nin E_t$,
$$\mathcal C_t(u) \ge m(\lambda - \eps)^{\abs{N(u) \inter E_t}}$$
\end{enumerate}
At step $t + 1$, let $v \nin E_t$. We want to find a vertex in $\mathcal C_t(v)$ to map $v$ to. This ensure condition 1. Now, for $u \nin E_t \union \{v\}$,
$$\mathcal C_{t + 1}(u) = \begin{cases}
\mathcal C_t(u) \inter N_G(v) & \text{ if } u \sim_H v \\
\mathcal C_t(u) & \text{ if } u \not\sim_H v
\end{cases}$$
So we just need to exclude vertices from $\mathcal C_t(u)$ that have
\begin{enumerate}
\item $\abs{\mathcal C_t(u) \inter N_G(v)} < (\lambda - \eps)\abs{\mathcal C_t(u)}$
\item are already identified with vertices in $E_t$.
\end{enumerate}
By uniformity, there are at most $d\eps m$ vertices of the first type. Since $\abs{W_\ell} \le s \le \eps m$, there are at most $\eps m$ vertices of the second type. Thus there are at least
$$\abs{\mathcal C_t(u)} - \eps(d + 1)m \ge (\lambda - \eps)^d m - \eps(d + 1)m > 0$$
good choices for $v$. Pick one. Done.
\end{proof}
\newlec
\begin{lem}
Let $H$ be a $n$-vertex graph with max degree $d$. Then there exists a partition
$$V(H) = V_1 \union \dots \union V_{10d^2}$$
so that all edges are between parts and
$$\abs{V_i} \le \frac{100n}d$$
\end{lem}
\begin{proof}
$H$ is $d + 1$-colorable, say with parts $W_1, \dots, W_{d + 1}$. Split each $W_i$ into at most $\ceil{d/100}$ parts of size at most $100n/d$. This gives at most
$$(d + 1)\ceil{\frac d{100}} \le \frac{d^2}{25} \le 10d^2$$
parts for big enough $d$.
\end{proof}
\begin{proof}[Proof of the theorem]
Let $r = 10d^2, t = R(10d^2)$. Let $\eps < \frac 1{t + 1}$ be such that
$$(d + 1)\eps < \left(\frac 12 - \eps\right)^d$$
Let $\ell > t + 1$ and $L(\ell, \eps)$ be the Regularity Lemma constant. Writing $\abs H = n$. We will show that
$$r(H) \le \frac{Ln}\eps$$
Let $N > \max(100/d, 1) Ln/\eps$ and $\chi$ be a red/blue edge coloring of $K_N$. Let $G$ be the red graph. Apply the Regularity Lemma with parameters $\eps, \ell$ to get a partition $V_1, \dots, V_k$ with $\ell \le k \le L$.
{\bf Step 1} \\
There are at least $(1 - \eps)\binom k2 > (1 - \frac 1{t + 1})\binom k2$ $\eps$-uniform pairs, so we can find a $K_t$ in the graph of $\eps$-uniform pairs. WLOG $V_1, \dots, V_t$ is that $K_t$.
{\bf Step 2} \\
Color each pair $(V_i, V_j)$ with the majority color between $V_i$ and $V_j$ in $G$. Apply Ramsey to find a monochromatic $K_{10d^2}$. WLOG $V_1, \dots, V_{10d^2}$ are all majority red. Partition $H$ into parts $W_1, \dots, W_{10d^2}$ of size at most $100n/d$. Since
$$\abs{W_i} \le \frac{100n}d \le \frac{\eps N}L \le \frac{\eps N}k$$
we can apply the Embedding Lemma with $\lambda = 1/2, m = N/L, s = \eps N/L$ to finish.
\end{proof}
\section{Dependent Random Choice}
\begin{dfn}
Let $G$ be a graph. We say $R \subseteq V(G)$ is {\bf $(s, k)$-rich} if, for all $x_1, \dots, x_k \in R$,
$$k \le \abs{N(x_1) \inter \dots \inter N(x_k)}$$
\end{dfn}
TODO: Insert picture
\begin{thm}
Let $G$ be a graph on $n$ vertices with $m$ edges. Let $t, s, r, k > 0$ satisfy
$$\frac{(2m)^t}{n^{2t - 1}} - \binom ns \left(\frac kn\right)^t \ge r$$
Then $G$ contains a $(s, k)$-rich set of size at least $r$.
\end{thm}
\begin{rmk}
$t$ is a free parameter in the statement, so we get to optimise over $t$ in applications.
\end{rmk}
\begin{idea}
Choose $v_1, \dots, v_t \in V(G)$ uniformly at random and consider $R = N(v_1) \inter \dots \inter N(v_t)$. neighborhoods of high degree often are in $R$, so $R$ is likely to be rich.
\end{idea}
\begin{proof}
Let $v_1, \dots, v_t \in V(G)$ be chosen uniformly at random. First consider
\begin{align*}
\E_{v_1, \dots, v_t} \abs{N(v_1) \inter \dots \inter N(v_t)}
& = \E_{v_1, \dots, v_t} \sum_y 1_{y \sim v_1, \dots, v_t} \\
& = \sum_y \P(y \sim v_1, \dots, v_t) \\
& = \sum_y \P(y \sim v_1) \dots P(y \sim v_t) \\
& = \sum_y \left(\frac{d(y)}n\right)^t \\
& \ge n \left(\frac{2m}{n^2}\right)^t \\
& = \frac{(2m)^t}{n^{2t - 1}}
\end{align*}
Let
$$Y = \{(y_1, \dots, y_s) \in (N(v_1) \inter \dots \inter N(v_t))^s \mid \abs{N(y_1) \inter \dots \inter N(y_s)} < k\}$$
We compute
\begin{align*}
\E_{v_1, \dots, v_t} \abs Y
& = \E_{v_1, \dots, v_t} \sum_{\abs{N(y_1) \inter \dots \inter N(y_s)} < k} 1_{y_1, \dots, y_s \in N(v_1) \inter \dots \inter N(v_t)} \\
& = \sum_{\abs{N(y_1) \inter \dots \inter N(y_s)} < k} \P(y_1, \dots, y_s \in N(v_1) \inter \dots \inter N(v_t)) \\
& \le \binom ns \left(\frac kn\right)^t
\end{align*}