df = pd.read_csv("FILE_PATH_TO_metadata.csv")
+df = pd.read_csv(
+ "https://static-1300131294.cos.ap-shanghai.myqcloud.com/data/data-science/metadata.csv"
+)
df.head()
diff --git a/assignments/data-science/analyzing-data.html b/assignments/data-science/analyzing-data.html
index df347ea5f1..32f4602e14 100644
--- a/assignments/data-science/analyzing-data.html
+++ b/assignments/data-science/analyzing-data.html
@@ -1620,8 +1620,8 @@ 40.27. Analyzing dataimport pandas as pd
import glob
-#Loading the dataset
-path = '../../data/emails.csv'
+# Loading the dataset
+path = "https://static-1300131294.cos.ap-shanghai.myqcloud.com/data/data-science/emails.csv"
email_df = pd.read_csv(path)
<seaborn.axisgrid.FacetGrid at 0x7fe404455940>
+<seaborn.axisgrid.FacetGrid at 0x7ff0c05b58b0>
diff --git a/data-science/introduction/introduction-to-statistics-and-probability.html b/data-science/introduction/introduction-to-statistics-and-probability.html
index d8eff0f8cc..8e7d83f702 100644
--- a/data-science/introduction/introduction-to-statistics-and-probability.html
+++ b/data-science/introduction/introduction-to-statistics-and-probability.html
@@ -1912,10 +1912,10 @@ 4.4.6. Normal distribution
-array([11.28811917, 9.17295329, 6.56371529, 9.84241983, 7.27205865,
- 8.01697559, 12.43056531, 9.00755935, 10.91226239, 8.97359615,
- 10.02707036, 10.79299852, 11.5942073 , 7.83968173, 7.2937034 ,
- 12.07886138, 13.57946497, 10.92063479, 8.77085797, 10.20937492])
+array([10.24742603, 9.18504561, 6.94582554, 12.90321193, 10.31306506,
+ 13.29698784, 10.99021735, 7.11176853, 9.49297535, 10.57585147,
+ 12.36956393, 10.47596834, 12.64111873, 9.62806819, 11.70137462,
+ 7.26499903, 9.51998705, 6.31077483, 10.7870969 , 11.51963014])
diff --git a/data-science/working-with-data/numpy.html b/data-science/working-with-data/numpy.html
index 2225a65060..bcf4566e04 100644
--- a/data-science/working-with-data/numpy.html
+++ b/data-science/working-with-data/numpy.html
@@ -2670,7 +2670,7 @@ 5.3.1.3. NumPy array attributes
-<memory at 0x7f806c85fd40>
+<memory at 0x7f12d2318d40>
diff --git a/data-science/working-with-data/pandas/data-selection.html b/data-science/working-with-data/pandas/data-selection.html
index bd10b561c9..cd6dc9bc74 100644
--- a/data-science/working-with-data/pandas/data-selection.html
+++ b/data-science/working-with-data/pandas/data-selection.html
@@ -1834,24 +1834,24 @@ 5.4.2.2. Selection by label5.4.2.2. Selection by label
-c 0.038589
-d -0.197195
-e -0.130250
-f 0.067106
+c 1.284624
+d -0.508137
+e 1.510474
+f 0.757249
dtype: float64
@@ -1978,7 +1978,7 @@ 5.4.2.2. Selection by label
--0.9518306057687721
+1.64675837211757
@@ -2035,8 +2035,8 @@ 5.4.2.2. Selection by label
-a 0.004982
-b -0.951831
+a -0.579541
+b 1.646758
c 0.000000
d 0.000000
e 0.000000
@@ -2123,24 +2123,24 @@ 5.4.2.2. Selection by label5.4.2.2. Selection by label5.4.2.2. Selection by label
-A 1.699244
-B -1.280140
-C 0.683763
-D -0.078884
+A -0.261255
+B 0.563448
+C -0.587615
+D 0.125141
Name: a, dtype: float64
@@ -2307,10 +2307,10 @@ 5.4.2.2. Selection by label
-A True
-B False
-C True
-D False
+A False
+B True
+C False
+D True
Name: a, dtype: bool
@@ -2380,40 +2380,40 @@ 5.4.2.2. Selection by label
- A
- C
+ B
+ D
a
- 1.699244
- 0.683763
+ 0.563448
+ 0.125141
b
- 0.210853
- -0.302834
+ -1.116184
+ 1.314941
c
- -2.148546
- -0.813316
+ 1.297326
+ -0.802559
d
- -0.639539
- 1.754627
+ -2.411222
+ 0.860725
e
- 2.519738
- 0.338291
+ -1.138838
+ 0.100947
f
- 1.627156
- -0.872900
+ -0.202684
+ 0.818771
@@ -2512,17 +2512,17 @@ 5.4.2.2. Selection by label5.4.2.2. Selection by label
-1.699243786401637
+-0.2612545763775011
@@ -2903,9 +2903,9 @@ 5.4.2.4. Selection by position
-0 0.608692
-2 1.039676
-4 0.704550
+0 0.547894
+2 -1.356373
+4 2.255724
dtype: float64
@@ -2957,7 +2957,7 @@ 5.4.2.4. Selection by position
-1.6168817892052105
+0.8768241394770284
@@ -3013,8 +3013,8 @@ 5.4.2.4. Selection by position0 0.000000
2 0.000000
4 0.000000
-6 1.616882
-8 -0.946230
+6 0.876824
+8 0.568045
dtype: float64
@@ -3098,24 +3098,24 @@ 5.4.2.4. Selection by position5.4.2.4. Selection by position5.4.2.4. Selection by position5.4.2.4. Selection by position5.4.2.4. Selection by position5.4.2.4. Selection by position
-1.495843419947373
+2.9734688264008327
@@ -3558,10 +3558,10 @@ 5.4.2.4. Selection by position
-0 0.336735
-2 1.495843
-4 -0.695248
-6 0.940683
+0 1.270549
+2 2.973469
+4 3.005324
+6 2.231575
Name: 2, dtype: float64
@@ -3853,23 +3853,23 @@ 5.4.2.4. Selection by position5.4.2.4. Selection by position5.4.2.5. Selection by callable5.4.2.5. Selection by callable5.4.2.5. Selection by callable5.4.2.5. Selection by callable
-a -0.248610
-b 0.920658
-c 0.945481
-d 0.259017
-e 2.184927
-f 0.153795
+a 1.026795
+b 1.602264
+c 0.648559
+d -1.229180
+e 0.171712
+f -0.419989
Name: A, dtype: float64
@@ -4518,11 +4511,10 @@ 5.4.2.5. Selection by callable
-b 0.920658
-c 0.945481
-d 0.259017
-e 2.184927
-f 0.153795
+a 1.026795
+b 1.602264
+c 0.648559
+e 0.171712
Name: A, dtype: float64
diff --git a/data-science/working-with-data/pandas/introduction-and-data-structures.html b/data-science/working-with-data/pandas/introduction-and-data-structures.html
index 4e0909872b..cef6c56810 100644
--- a/data-science/working-with-data/pandas/introduction-and-data-structures.html
+++ b/data-science/working-with-data/pandas/introduction-and-data-structures.html
@@ -2011,11 +2011,11 @@ 5.4.1.2.1.1.1. From ndarray
-a -0.471148
-b -0.093864
-c -0.240233
-d 1.084377
-e 0.740195
+a -0.473693
+b -2.010741
+c 0.446526
+d 0.111314
+e -1.301965
dtype: float64
@@ -2040,11 +2040,11 @@ 5.4.1.2.1.1.1. From ndarray
-0 0.718215
-1 0.451950
-2 -2.147534
-3 0.749949
-4 0.606131
+0 0.534743
+1 -0.772644
+2 -0.598612
+3 -0.158080
+4 0.605635
dtype: float64
@@ -2156,7 +2156,7 @@ 5.4.1.2.1.2. Series is ndarray-like
-
-a -0.471148
-b -0.093864
-c -0.240233
+a -0.473693
+b -2.010741
+c 0.446526
dtype: float64
@@ -2183,8 +2183,8 @@ 5.4.1.2.1.2. Series is ndarray-like
-d 1.084377
-e 0.740195
+c 0.446526
+d 0.111314
dtype: float64
@@ -2197,9 +2197,9 @@ 5.4.1.2.1.2. Series is ndarray-like
-e 0.740195
-d 1.084377
-b -0.093864
+e -1.301965
+d 0.111314
+b -2.010741
dtype: float64
@@ -2212,11 +2212,11 @@ 5.4.1.2.1.2. Series is ndarray-like
-a 0.624285
-b 0.910406
-c 0.786445
-d 2.957596
-e 2.096344
+a 0.622698
+b 0.133889
+c 1.562874
+d 1.117746
+e 0.271997
dtype: float64
@@ -2244,8 +2244,8 @@ 5.4.1.2.1.2. Series is ndarray-like
<PandasArray>
-[-0.47114789842073596, -0.09386425367761429, -0.24023274525915267,
- 1.0843766433680067, 0.7401950570540115]
+[-0.4736933598313704, -2.010740516507576, 0.44652644441558775,
+ 0.11131448850192575, -1.3019652543697393]
Length: 5, dtype: float64
@@ -2259,7 +2259,7 @@ 5.4.1.2.1.2. Series is ndarray-like
-array([-0.4711479 , -0.09386425, -0.24023275, 1.08437664, 0.74019506])
+array([-0.47369336, -2.01074052, 0.44652644, 0.11131449, -1.30196525])
@@ -2276,7 +2276,7 @@ 5.4.1.2.1.3. Series is dict-like
-
-a -0.471148
-b -0.093864
-c -0.240233
-d 1.084377
+a -0.473693
+b -2.010741
+c 0.446526
+d 0.111314
e 12.000000
dtype: float64
@@ -2422,10 +2422,10 @@ 5.4.1.2.1.4. Vectorized operations and l
-a -0.942296
-b -0.187729
-c -0.480465
-d 2.168753
+a -0.947387
+b -4.021481
+c 0.893053
+d 0.222629
e 24.000000
dtype: float64
@@ -2439,10 +2439,10 @@ 5.4.1.2.1.4. Vectorized operations and l
-a -0.942296
-b -0.187729
-c -0.480465
-d 2.168753
+a -0.947387
+b -4.021481
+c 0.893053
+d 0.222629
e 24.000000
dtype: float64
@@ -2456,10 +2456,10 @@ 5.4.1.2.1.4. Vectorized operations and l
-a 0.624285
-b 0.910406
-c 0.786445
-d 2.957596
+a 0.622698
+b 0.133889
+c 1.562874
+d 1.117746
e 162754.791419
dtype: float64
@@ -2475,9 +2475,9 @@ 5.4.1.2.1.4. Vectorized operations and l
a NaN
-b -0.187729
-c -0.480465
-d 2.168753
+b -4.021481
+c 0.893053
+d 0.222629
e NaN
dtype: float64
@@ -2507,11 +2507,11 @@ 5.4.1.2.1.5. Name attribute
-0 0.857799
-1 0.804311
-2 0.105946
-3 1.011575
-4 0.002346
+0 -1.062093
+1 -0.909160
+2 -0.156057
+3 -1.000891
+4 0.789003
Name: something, dtype: float64
@@ -4523,51 +4523,51 @@ 5.4.1.2.2.5. Data alignment and arithmet
0
- -2.976357
- -1.316627
- 0.672159
+ -0.065012
+ 2.049261
+ -1.751186
NaN
1
- 0.477964
- 0.339977
- 1.442367
+ 0.237794
+ -1.335222
+ -1.459176
NaN
2
- 0.554947
- -0.146307
- 1.304767
+ -0.350036
+ -0.347083
+ -1.255875
NaN
3
- 0.947704
- 0.868173
- 2.125977
+ 1.327497
+ -0.523681
+ 0.255007
NaN
4
- 1.597975
- -0.998993
- -1.187223
+ 2.552969
+ 0.157565
+ 0.546745
NaN
5
- -1.066379
- 1.095593
- -0.988821
+ -1.351615
+ -0.333879
+ -0.272417
NaN
6
- 0.067644
- -0.587502
- 1.405716
+ 0.825050
+ -0.179525
+ -2.036190
NaN
@@ -4637,66 +4637,66 @@ 5.4.1.2.2.5. Data alignment and arithmet
1
- 1.884459
- -0.153521
- 0.043441
- -0.381469
+ 0.209956
+ -1.067719
+ 0.171564
+ -0.940577
2
- 3.101853
- 0.780966
- 1.009441
- -3.495847
+ -0.058460
+ -1.393320
+ -0.737118
+ -4.419007
3
- 1.830482
- 0.956365
- -0.295293
- -0.735077
+ 1.059072
+ -1.374886
+ 2.448379
+ -1.033772
4
- 2.716588
- 1.019025
- -1.171295
- 0.352544
+ 1.760605
+ -0.961366
+ 0.416068
+ -3.815386
5
- 1.565483
- 2.371978
- -1.698923
- -1.818532
+ -0.295168
+ -1.773900
+ 0.350974
+ 0.619974
6
- 0.744948
- -0.013414
- -0.221765
- -1.769200
+ 0.910916
+ -2.385331
+ 0.800347
+ -1.478140
7
- 0.312613
- -0.304193
- 0.816620
- -1.095031
+ 0.557560
+ -0.757185
+ 0.558972
+ -2.264130
8
- 1.756536
- 0.536441
- 1.088142
- -0.657246
+ 1.292969
+ -0.249907
+ 0.502733
+ -0.763507
9
- 2.103343
- -0.251893
- -2.375203
- -1.608417
+ -0.515080
+ 0.039775
+ 1.113023
+ -1.913248
@@ -4737,73 +4737,73 @@ 5.4.1.2.2.5. Data alignment and arithmet
0
- -6.492551
- -0.114228
- 5.049975
- 4.893690
+ 1.447580
+ 8.755727
+ -1.095820
+ 11.971909
1
- 2.929745
- -0.881832
- 5.267180
- 2.986346
+ 2.497361
+ 3.417134
+ -0.238001
+ 7.269023
2
- 9.016714
- 3.790604
- 10.097182
- -12.585544
+ 1.155280
+ 1.789129
+ -4.781408
+ -10.123126
3
- 2.659859
- 4.667597
- 3.573509
- 1.218305
+ 6.742939
+ 1.881295
+ 11.146076
+ 6.803049
4
- 7.090390
- 4.980895
- -0.806500
- 6.656411
+ 10.250605
+ 3.948896
+ 0.984520
+ -7.105023
5
- 1.334864
- 11.745660
- -3.444640
- -4.198972
+ -0.028263
+ -0.113775
+ 0.659050
+ 15.071779
6
- -2.767812
- -0.181299
- 3.941152
- -3.952308
+ 6.002159
+ -3.170930
+ 2.905917
+ 4.581208
7
- -4.929484
- -1.635194
- 9.133073
- -0.581465
+ 4.235380
+ 4.969804
+ 1.699039
+ 0.651259
8
- 2.290130
- 2.567976
- 10.490684
- 1.607458
+ 7.912423
+ 7.506190
+ 1.417844
+ 8.154376
9
- 4.024164
- -1.373693
- -6.826041
- -3.148394
+ -1.127819
+ 8.954603
+ 4.469294
+ 2.405671
@@ -4843,73 +4843,73 @@ 5.4.1.2.2.5. Data alignment and arithmet
0
- -0.588751
- -2.364930
- 1.639358
- 1.727897
+ -9.051079
+ 0.740113
+ -1.615081
+ 0.501409
1
- 5.377818
- -1.735008
- 1.530372
- 5.069215
+ 10.053063
+ 3.528249
+ -2.234136
+ 0.948943
2
- 0.712584
- 2.792353
- 0.617499
- -0.342805
+ -5.919119
+ -23.711214
+ -0.737310
+ -0.412435
3
- 7.577377
- 1.874346
- 3.177611
- -6.396355
+ 1.054199
+ -42.121185
+ 0.546683
+ 1.041005
4
- 0.982243
- 1.677348
- -1.781578
- 1.073788
+ 0.606016
+ 2.565555
+ -4.923781
+ -0.549147
5
- -7.517254
- 0.513049
- -0.918334
- -0.806585
+ -2.465164
+ -2.365436
+ -3.728700
+ 0.382503
6
- -1.048699
- -2.292212
- 2.575790
- -0.840010
+ 1.249326
+ -0.966944
+ 5.519267
+ 1.937077
7
- -0.721554
- -1.375442
- 0.700960
- -1.936885
+ 2.236756
+ 1.683613
+ -16.613452
+ -3.707162
8
- 17.233678
- 8.803186
- 0.588881
- -12.737492
+ 0.845677
+ 0.908069
+ -8.588758
+ 0.812430
9
- 2.470156
- -1.482055
- -0.566505
- -0.971177
+ -1.598558
+ 0.718948
+ 2.024870
+ 12.325266
@@ -4949,73 +4949,73 @@ 5.4.1.2.2.5. Data alignment and arithmet
0
- 8.322860
- 0.031969
- 0.138454
- 0.112183
+ 0.000149
+ 3.332793e+00
+ 0.146968
+ 15.820974
1
- 0.001196
- 0.110356
- 0.182311
- 0.001514
+ 0.000098
+ 6.453023e-03
+ 0.040139
+ 1.233219
2
- 3.878422
- 0.016448
- 6.877897
- 72.411995
+ 0.000815
+ 3.163624e-06
+ 3.383760
+ 34.560378
3
- 0.000303
- 0.081022
- 0.009808
- 0.000597
+ 0.809674
+ 3.176857e-07
+ 11.195903
+ 0.851507
4
- 1.074297
- 0.126330
- 0.099261
- 0.752186
+ 7.414182
+ 2.308205e-02
+ 0.001701
+ 10.996238
5
- 0.000313
- 14.433281
- 1.406037
- 2.362645
+ 0.027078
+ 3.194147e-02
+ 0.005173
+ 46.715265
6
- 0.826793
- 0.036223
- 0.022717
- 2.008453
+ 0.410485
+ 1.143917e+00
+ 0.001078
+ 0.071025
7
- 3.689128
- 0.279403
- 4.142158
- 0.071053
+ 0.039951
+ 1.244604e-01
+ 0.000013
+ 0.005295
8
- 0.000011
- 0.000167
- 8.315544
- 0.000038
+ 1.955158
+ 1.470702e+00
+ 0.000184
+ 2.295388
9
- 0.026860
- 0.207273
- 9.709205
- 1.124105
+ 0.153139
+ 3.742910e+00
+ 0.059486
+ 0.000043
@@ -5271,35 +5271,35 @@ 5.4.1.2.2.6. Transposing5.4.1.3.2. Basics
2000-01-01
- 0.801076
- 1.465342
- -0.166177
- -0.788715
+ 0.266217
+ 1.228785
+ 1.031277
+ -0.095949
2000-01-02
- 0.348102
- -1.058147
- -1.531875
- -1.316167
+ 1.488999
+ 0.037730
+ -0.392898
+ 0.215211
2000-01-03
- 1.736169
- -0.839949
- 1.481805
- -0.264777
+ 0.038978
+ -2.140597
+ 0.568707
+ 1.023702
2000-01-04
- -0.435348
- 0.694244
- 1.922322
- -0.349232
+ -0.079610
+ -0.033805
+ 0.101702
+ 0.303200
2000-01-05
- -0.745863
- -0.692943
- -0.828683
- 2.427089
+ -2.045461
+ -0.369304
+ -0.452429
+ 2.246294
2000-01-06
- 0.738183
- 0.224374
- -0.953017
- -0.067219
+ 0.301222
+ -0.225712
+ 1.916028
+ 0.982501
2000-01-07
- 0.387631
- -0.026705
- -0.647102
- -0.489077
+ -0.566633
+ 0.042583
+ -2.632710
+ 0.073967
2000-01-08
- -1.247095
- -0.649027
- -0.993315
- 0.407961
+ -0.859050
+ -0.604785
+ -1.571164
+ -1.762992
@@ -5494,7 +5494,7 @@ 5.4.1.3.2. Basics
-0.7381831630668674
+0.30122197945169477
@@ -5534,59 +5534,59 @@ 5.4.1.3.2. Basics
2000-01-01
- 0.801076
- 1.465342
- -0.166177
- -0.788715
+ 0.266217
+ 1.228785
+ 1.031277
+ -0.095949
2000-01-02
- 0.348102
- -1.058147
- -1.531875
- -1.316167
+ 1.488999
+ 0.037730
+ -0.392898
+ 0.215211
2000-01-03
- 1.736169
- -0.839949
- 1.481805
- -0.264777
+ 0.038978
+ -2.140597
+ 0.568707
+ 1.023702
2000-01-04
- -0.435348
- 0.694244
- 1.922322
- -0.349232
+ -0.079610
+ -0.033805
+ 0.101702
+ 0.303200
2000-01-05
- -0.745863
- -0.692943
- -0.828683
- 2.427089
+ -2.045461
+ -0.369304
+ -0.452429
+ 2.246294
2000-01-06
- 0.738183
- 0.224374
- -0.953017
- -0.067219
+ 0.301222
+ -0.225712
+ 1.916028
+ 0.982501
2000-01-07
- 0.387631
- -0.026705
- -0.647102
- -0.489077
+ -0.566633
+ 0.042583
+ -2.632710
+ 0.073967
2000-01-08
- -1.247095
- -0.649027
- -0.993315
- 0.407961
+ -0.859050
+ -0.604785
+ -1.571164
+ -1.762992
@@ -5627,59 +5627,59 @@ 5.4.1.3.2. Basics
2000-01-01
- 1.465342
- 0.801076
- -0.166177
- -0.788715
+ 1.228785
+ 0.266217
+ 1.031277
+ -0.095949
2000-01-02
- -1.058147
- 0.348102
- -1.531875
- -1.316167
+ 0.037730
+ 1.488999
+ -0.392898
+ 0.215211
2000-01-03
- -0.839949
- 1.736169
- 1.481805
- -0.264777
+ -2.140597
+ 0.038978
+ 0.568707
+ 1.023702
2000-01-04
- 0.694244
- -0.435348
- 1.922322
- -0.349232
+ -0.033805
+ -0.079610
+ 0.101702
+ 0.303200
2000-01-05
- -0.692943
- -0.745863
- -0.828683
- 2.427089
+ -0.369304
+ -2.045461
+ -0.452429
+ 2.246294
2000-01-06
- 0.224374
- 0.738183
- -0.953017
- -0.067219
+ -0.225712
+ 0.301222
+ 1.916028
+ 0.982501
2000-01-07
- -0.026705
- 0.387631
- -0.647102
- -0.489077
+ 0.042583
+ -0.566633
+ -2.632710
+ 0.073967
2000-01-08
- -0.649027
- -1.247095
- -0.993315
- 0.407961
+ -0.604785
+ -0.859050
+ -1.571164
+ -1.762992
@@ -5720,43 +5720,43 @@ 5.4.1.3.2. Basics
2000-01-01
- 1.465342
- 0.801076
+ 1.228785
+ 0.266217
2000-01-02
- -1.058147
- 0.348102
+ 0.037730
+ 1.488999
2000-01-03
- -0.839949
- 1.736169
+ -2.140597
+ 0.038978
2000-01-04
- 0.694244
- -0.435348
+ -0.033805
+ -0.079610
2000-01-05
- -0.692943
- -0.745863
+ -0.369304
+ -2.045461
2000-01-06
- 0.224374
- 0.738183
+ -0.225712
+ 0.301222
2000-01-07
- -0.026705
- 0.387631
+ 0.042583
+ -0.566633
2000-01-08
- -0.649027
- -1.247095
+ -0.604785
+ -0.859050
@@ -5795,43 +5795,43 @@ 5.4.1.3.2. Basics
2000-01-01
- 1.465342
- 0.801076
+ 1.228785
+ 0.266217
2000-01-02
- -1.058147
- 0.348102
+ 0.037730
+ 1.488999
2000-01-03
- -0.839949
- 1.736169
+ -2.140597
+ 0.038978
2000-01-04
- 0.694244
- -0.435348
+ -0.033805
+ -0.079610
2000-01-05
- -0.692943
- -0.745863
+ -0.369304
+ -2.045461
2000-01-06
- 0.224374
- 0.738183
+ -0.225712
+ 0.301222
2000-01-07
- -0.026705
- 0.387631
+ 0.042583
+ -0.566633
2000-01-08
- -0.649027
- -1.247095
+ -0.604785
+ -0.859050
@@ -5871,43 +5871,43 @@ 5.4.1.3.2. Basics
2000-01-01
- 0.801076
- 1.465342
+ 0.266217
+ 1.228785
2000-01-02
- 0.348102
- -1.058147
+ 1.488999
+ 0.037730
2000-01-03
- 1.736169
- -0.839949
+ 0.038978
+ -2.140597
2000-01-04
- -0.435348
- 0.694244
+ -0.079610
+ -0.033805
2000-01-05
- -0.745863
- -0.692943
+ -2.045461
+ -0.369304
2000-01-06
- 0.738183
- 0.224374
+ 0.301222
+ -0.225712
2000-01-07
- 0.387631
- -0.026705
+ -0.566633
+ 0.042583
2000-01-08
- -1.247095
- -0.649027
+ -0.859050
+ -0.604785
@@ -5944,14 +5944,14 @@ 5.4.1.3.3. Attribute access
-2000-01-01 0.801076
-2000-01-02 0.348102
-2000-01-03 1.736169
-2000-01-04 -0.435348
-2000-01-05 -0.745863
-2000-01-06 0.738183
-2000-01-07 0.387631
-2000-01-08 -1.247095
+2000-01-01 0.266217
+2000-01-02 1.488999
+2000-01-03 0.038978
+2000-01-04 -0.079610
+2000-01-05 -2.045461
+2000-01-06 0.301222
+2000-01-07 -0.566633
+2000-01-08 -0.859050
Freq: D, Name: A, dtype: float64
@@ -6009,58 +6009,58 @@ 5.4.1.3.3. Attribute access5.4.1.3.3. Attribute access5.4.1.3.3. Attribute access
-/tmp/ipykernel_3082/269534380.py:2: UserWarning: Pandas doesn't allow columns to be created via a new attribute name - see https://pandas.pydata.org/pandas-docs/stable/indexing.html#attribute-access
+/tmp/ipykernel_3005/269534380.py:2: UserWarning: Pandas doesn't allow columns to be created via a new attribute name - see https://pandas.pydata.org/pandas-docs/stable/indexing.html#attribute-access
df.two = [4, 5, 6]
@@ -6290,14 +6290,14 @@ 5.4.1.3.4. Slicing ranges
-2000-01-01 0.801076
-2000-01-02 0.348102
-2000-01-03 1.736169
-2000-01-04 -0.435348
-2000-01-05 -0.745863
-2000-01-06 0.738183
-2000-01-07 0.387631
-2000-01-08 -1.247095
+2000-01-01 0.266217
+2000-01-02 1.488999
+2000-01-03 0.038978
+2000-01-04 -0.079610
+2000-01-05 -2.045461
+2000-01-06 0.301222
+2000-01-07 -0.566633
+2000-01-08 -0.859050
Freq: D, Name: A, dtype: float64
@@ -6310,11 +6310,11 @@ 5.4.1.3.4. Slicing ranges
-2000-01-01 0.801076
-2000-01-02 0.348102
-2000-01-03 1.736169
-2000-01-04 -0.435348
-2000-01-05 -0.745863
+2000-01-01 0.266217
+2000-01-02 1.488999
+2000-01-03 0.038978
+2000-01-04 -0.079610
+2000-01-05 -2.045461
Freq: D, Name: A, dtype: float64
@@ -6327,10 +6327,10 @@ 5.4.1.3.4. Slicing ranges
-2000-01-01 0.801076
-2000-01-03 1.736169
-2000-01-05 -0.745863
-2000-01-07 0.387631
+2000-01-01 0.266217
+2000-01-03 0.038978
+2000-01-05 -2.045461
+2000-01-07 -0.566633
Freq: 2D, Name: A, dtype: float64
@@ -6343,14 +6343,14 @@ 5.4.1.3.4. Slicing ranges
-2000-01-08 -1.247095
-2000-01-07 0.387631
-2000-01-06 0.738183
-2000-01-05 -0.745863
-2000-01-04 -0.435348
-2000-01-03 1.736169
-2000-01-02 0.348102
-2000-01-01 0.801076
+2000-01-08 -0.859050
+2000-01-07 -0.566633
+2000-01-06 0.301222
+2000-01-05 -2.045461
+2000-01-04 -0.079610
+2000-01-03 0.038978
+2000-01-02 1.488999
+2000-01-01 0.266217
Freq: -1D, Name: A, dtype: float64
@@ -6371,9 +6371,9 @@ 5.4.1.3.4. Slicing ranges17.1.2. Bootstrapping
-<matplotlib.legend.Legend at 0x7f6aa00320d0>
+<matplotlib.legend.Legend at 0x7ff34ab6c190>
diff --git a/ml-advanced/gradient-boosting/xgboost.html b/ml-advanced/gradient-boosting/xgboost.html
index c0d50b23c8..27dc6bcede 100644
--- a/ml-advanced/gradient-boosting/xgboost.html
+++ b/ml-advanced/gradient-boosting/xgboost.html
@@ -1829,7 +1829,7 @@ 18.3.2. Example
-Mean Absolute Error : 19278.74181292808
+Mean Absolute Error : 19574.44138484589
diff --git a/ml-advanced/kernel-method.html b/ml-advanced/kernel-method.html
index 3e5484c2bd..8c78ed4578 100644
--- a/ml-advanced/kernel-method.html
+++ b/ml-advanced/kernel-method.html
@@ -1857,7 +1857,7 @@ 20.1. Motivating Support Vector Machines
-
-Accuracy is 0.8065054211843202
+Accuracy is 0.8023352793994996
@@ -2243,10 +2243,10 @@ 14.2.4. Exercise - apply logistic regres
-ingredients: Index(['barley', 'bean', 'buckwheat', 'carrot', 'katsuobushi', 'scallion',
- 'seaweed', 'soy_sauce', 'soybean'],
+ingredients: Index(['bell_pepper', 'buckwheat', 'cane_molasses', 'carrot', 'cilantro',
+ 'lime', 'peanut', 'sesame_oil', 'soy_sauce', 'vinegar'],
dtype='object')
-cuisine: japanese
+cuisine: thai
@@ -2296,24 +2296,24 @@ 14.2.4. Exercise - apply logistic regres
- japanese
- 0.991627
-
-
- korean
- 0.004940
+ thai
+ 0.442010
chinese
- 0.002909
+ 0.312328
- thai
- 0.000457
+ japanese
+ 0.240609
indian
- 0.000067
+ 0.004452
+
+
+ korean
+ 0.000601
@@ -2334,15 +2334,15 @@ 14.2.4. Exercise - apply logistic regres
precision recall f1-score support
- chinese 0.76 0.75 0.76 236
- indian 0.88 0.87 0.87 233
- japanese 0.74 0.74 0.74 227
- korean 0.84 0.83 0.83 259
- thai 0.80 0.84 0.82 244
+ chinese 0.71 0.69 0.70 238
+ indian 0.92 0.91 0.91 216
+ japanese 0.79 0.78 0.79 261
+ korean 0.83 0.79 0.81 247
+ thai 0.77 0.86 0.81 237
- accuracy 0.81 1199
- macro avg 0.81 0.81 0.81 1199
-weighted avg 0.81 0.81 0.81 1199
+ accuracy 0.80 1199
+ macro avg 0.81 0.80 0.80 1199
+weighted avg 0.80 0.80 0.80 1199
diff --git a/ml-fundamentals/classification/yet-other-classifiers.html b/ml-fundamentals/classification/yet-other-classifiers.html
index 5d4f35d018..9481700872 100644
--- a/ml-fundamentals/classification/yet-other-classifiers.html
+++ b/ml-fundamentals/classification/yet-other-classifiers.html
@@ -1875,18 +1875,18 @@ 14.3.4.1. Exercise - apply a linear SVC<
-Accuracy (train) for Linear SVC: 79.9%
+Accuracy (train) for Linear SVC: 80.1%
precision recall f1-score support
- chinese 0.69 0.75 0.72 230
- indian 0.91 0.91 0.91 252
- japanese 0.77 0.77 0.77 222
- korean 0.84 0.74 0.78 244
- thai 0.79 0.82 0.81 251
+ chinese 0.66 0.74 0.70 229
+ indian 0.88 0.92 0.90 232
+ japanese 0.83 0.78 0.81 253
+ korean 0.87 0.76 0.81 236
+ thai 0.78 0.80 0.79 249
accuracy 0.80 1199
macro avg 0.80 0.80 0.80 1199
-weighted avg 0.80 0.80 0.80 1199
+weighted avg 0.81 0.80 0.80 1199
@@ -1910,31 +1910,33 @@ 14.3.5.1. Exercise - apply the K-Neighbo
-Accuracy (train) for Linear SVC: 79.9%
+Accuracy (train) for Linear SVC: 80.1%
precision recall f1-score support
- chinese 0.69 0.75 0.72 230
- indian 0.91 0.91 0.91 252
- japanese 0.77 0.77 0.77 222
- korean 0.84 0.74 0.78 244
- thai 0.79 0.82 0.81 251
+ chinese 0.66 0.74 0.70 229
+ indian 0.88 0.92 0.90 232
+ japanese 0.83 0.78 0.81 253
+ korean 0.87 0.76 0.81 236
+ thai 0.78 0.80 0.79 249
accuracy 0.80 1199
macro avg 0.80 0.80 0.80 1199
-weighted avg 0.80 0.80 0.80 1199
+weighted avg 0.81 0.80 0.80 1199
-Accuracy (train) for KNN classifier: 72.6%
- precision recall f1-score support
+Accuracy (train) for KNN classifier: 74.6%
+
+
+ precision recall f1-score support
- chinese 0.63 0.71 0.67 230
- indian 0.87 0.79 0.83 252
- japanese 0.63 0.86 0.73 222
- korean 0.92 0.54 0.68 244
- thai 0.70 0.73 0.72 251
+ chinese 0.67 0.73 0.70 229
+ indian 0.85 0.81 0.83 232
+ japanese 0.66 0.85 0.74 253
+ korean 0.89 0.58 0.71 236
+ thai 0.74 0.75 0.74 249
- accuracy 0.73 1199
- macro avg 0.75 0.73 0.72 1199
-weighted avg 0.75 0.73 0.73 1199
+ accuracy 0.75 1199
+ macro avg 0.76 0.74 0.75 1199
+weighted avg 0.76 0.75 0.75 1199
@@ -1962,41 +1964,41 @@ 14.3.6.1. Exercise - apply a Support Vec
-Accuracy (train) for Linear SVC: 79.9%
+Accuracy (train) for Linear SVC: 80.1%
precision recall f1-score support
- chinese 0.69 0.75 0.72 230
- indian 0.91 0.91 0.91 252
- japanese 0.77 0.77 0.77 222
- korean 0.84 0.74 0.78 244
- thai 0.79 0.82 0.81 251
+ chinese 0.66 0.74 0.70 229
+ indian 0.88 0.92 0.90 232
+ japanese 0.83 0.78 0.81 253
+ korean 0.87 0.76 0.81 236
+ thai 0.78 0.80 0.79 249
accuracy 0.80 1199
macro avg 0.80 0.80 0.80 1199
-weighted avg 0.80 0.80 0.80 1199
+weighted avg 0.81 0.80 0.80 1199
-Accuracy (train) for KNN classifier: 72.6%
+Accuracy (train) for KNN classifier: 74.6%
precision recall f1-score support
- chinese 0.63 0.71 0.67 230
- indian 0.87 0.79 0.83 252
- japanese 0.63 0.86 0.73 222
- korean 0.92 0.54 0.68 244
- thai 0.70 0.73 0.72 251
+ chinese 0.67 0.73 0.70 229
+ indian 0.85 0.81 0.83 232
+ japanese 0.66 0.85 0.74 253
+ korean 0.89 0.58 0.71 236
+ thai 0.74 0.75 0.74 249
- accuracy 0.73 1199
- macro avg 0.75 0.73 0.72 1199
-weighted avg 0.75 0.73 0.73 1199
+ accuracy 0.75 1199
+ macro avg 0.76 0.74 0.75 1199
+weighted avg 0.76 0.75 0.75 1199
-Accuracy (train) for SVC: 83.9%
+Accuracy (train) for SVC: 83.8%
precision recall f1-score support
- chinese 0.76 0.79 0.78 230
- indian 0.91 0.91 0.91 252
- japanese 0.83 0.82 0.83 222
- korean 0.91 0.80 0.85 244
- thai 0.79 0.87 0.83 251
+ chinese 0.74 0.76 0.75 229
+ indian 0.91 0.93 0.92 232
+ japanese 0.86 0.82 0.84 253
+ korean 0.90 0.81 0.85 236
+ thai 0.79 0.87 0.83 249
accuracy 0.84 1199
macro avg 0.84 0.84 0.84 1199
@@ -2025,73 +2027,73 @@ 14.3.7. Ensemble Classifiers
-Accuracy (train) for Linear SVC: 79.9%
+Accuracy (train) for Linear SVC: 80.1%
precision recall f1-score support
- chinese 0.69 0.75 0.72 230
- indian 0.91 0.91 0.91 252
- japanese 0.77 0.77 0.77 222
- korean 0.84 0.74 0.78 244
- thai 0.79 0.82 0.81 251
+ chinese 0.66 0.74 0.70 229
+ indian 0.88 0.92 0.90 232
+ japanese 0.83 0.78 0.81 253
+ korean 0.87 0.76 0.81 236
+ thai 0.78 0.80 0.79 249
accuracy 0.80 1199
macro avg 0.80 0.80 0.80 1199
-weighted avg 0.80 0.80 0.80 1199
+weighted avg 0.81 0.80 0.80 1199
-Accuracy (train) for KNN classifier: 72.6%
+Accuracy (train) for KNN classifier: 74.6%
precision recall f1-score support
- chinese 0.63 0.71 0.67 230
- indian 0.87 0.79 0.83 252
- japanese 0.63 0.86 0.73 222
- korean 0.92 0.54 0.68 244
- thai 0.70 0.73 0.72 251
+ chinese 0.67 0.73 0.70 229
+ indian 0.85 0.81 0.83 232
+ japanese 0.66 0.85 0.74 253
+ korean 0.89 0.58 0.71 236
+ thai 0.74 0.75 0.74 249
- accuracy 0.73 1199
- macro avg 0.75 0.73 0.72 1199
-weighted avg 0.75 0.73 0.73 1199
+ accuracy 0.75 1199
+ macro avg 0.76 0.74 0.75 1199
+weighted avg 0.76 0.75 0.75 1199
-Accuracy (train) for SVC: 83.9%
+Accuracy (train) for SVC: 83.8%
precision recall f1-score support
- chinese 0.76 0.79 0.78 230
- indian 0.91 0.91 0.91 252
- japanese 0.83 0.82 0.83 222
- korean 0.91 0.80 0.85 244
- thai 0.79 0.87 0.83 251
+ chinese 0.74 0.76 0.75 229
+ indian 0.91 0.93 0.92 232
+ japanese 0.86 0.82 0.84 253
+ korean 0.90 0.81 0.85 236
+ thai 0.79 0.87 0.83 249
accuracy 0.84 1199
macro avg 0.84 0.84 0.84 1199
weighted avg 0.84 0.84 0.84 1199
-Accuracy (train) for RFST: 85.2%
+Accuracy (train) for RFST: 85.9%
precision recall f1-score support
- chinese 0.80 0.82 0.81 230
- indian 0.93 0.90 0.92 252
- japanese 0.85 0.83 0.84 222
- korean 0.87 0.81 0.84 244
- thai 0.81 0.89 0.85 251
+ chinese 0.82 0.80 0.81 229
+ indian 0.92 0.94 0.93 232
+ japanese 0.85 0.86 0.85 253
+ korean 0.89 0.82 0.86 236
+ thai 0.82 0.87 0.84 249
- accuracy 0.85 1199
- macro avg 0.85 0.85 0.85 1199
-weighted avg 0.85 0.85 0.85 1199
+ accuracy 0.86 1199
+ macro avg 0.86 0.86 0.86 1199
+weighted avg 0.86 0.86 0.86 1199
-Accuracy (train) for ADA: 71.5%
+Accuracy (train) for ADA: 70.2%
precision recall f1-score support
- chinese 0.67 0.49 0.56 230
- indian 0.89 0.81 0.85 252
- japanese 0.64 0.68 0.66 222
- korean 0.66 0.79 0.72 244
- thai 0.72 0.78 0.75 251
+ chinese 0.60 0.54 0.57 229
+ indian 0.87 0.87 0.87 232
+ japanese 0.68 0.57 0.62 253
+ korean 0.66 0.76 0.70 236
+ thai 0.70 0.78 0.74 249
- accuracy 0.71 1199
- macro avg 0.72 0.71 0.71 1199
-weighted avg 0.72 0.71 0.71 1199
+ accuracy 0.70 1199
+ macro avg 0.70 0.70 0.70 1199
+weighted avg 0.70 0.70 0.70 1199
diff --git a/ml-fundamentals/parameter-optimization/gradient-descent.html b/ml-fundamentals/parameter-optimization/gradient-descent.html
index 749b1c3705..16d24a04e2 100644
--- a/ml-fundamentals/parameter-optimization/gradient-descent.html
+++ b/ml-fundamentals/parameter-optimization/gradient-descent.html
@@ -2065,19 +2065,19 @@ 15.2.5.1. Linear regression With gradien
-{'lr': 0.0003, 'n_iters': 5000, 'weights': array([ 0.45351552, 0.21147914, 0.01485353, 0.08785205, 0.14878446,
- 0.12828472, 0.12230661, -0.00201684]), 'bias': 0.02192744125483242}
-0 -1.952323
-1 -1.731284
-2 -2.137262
-3 -1.628383
-4 -2.072147
+{'lr': 0.0003, 'n_iters': 5000, 'weights': array([ 4.52878699e-01, 2.15398482e-01, -2.58152276e-03, 9.64105213e-02,
+ 1.33677087e-01, 1.29817624e-01, 1.19847164e-01, -4.44435098e-04]), 'bias': 0.020373585739686062}
+0 -1.061494
+1 -0.703395
+2 -0.856630
+3 -0.600482
+4 -0.973848
...
-92 0.318140
-93 0.371237
-94 1.132971
-95 1.159795
-96 0.760089
+92 1.439704
+93 1.105060
+94 2.054375
+95 2.224784
+96 1.926531
Name: lpsa, Length: 97, dtype: float64
diff --git a/ml-fundamentals/regression/linear-and-polynomial-regression.html b/ml-fundamentals/regression/linear-and-polynomial-regression.html
index b058901c84..f8d4246adb 100644
--- a/ml-fundamentals/regression/linear-and-polynomial-regression.html
+++ b/ml-fundamentals/regression/linear-and-polynomial-regression.html
@@ -2262,7 +2262,7 @@ 12.3.3. Correlation
-<matplotlib.collections.PathCollection at 0x7f740b095b80>
+<matplotlib.collections.PathCollection at 0x7fc3d7815b50>
@@ -2448,7 +2448,7 @@ 12.3.5. Simple Linear Regression
-[<matplotlib.lines.Line2D at 0x7f73fa378c70>]
+[<matplotlib.lines.Line2D at 0x7fc3c6b07e20>]
@@ -2694,12 +2694,7 @@ 12.3.8. Putting it all together
Mean error: 2.23 (8.28%)
-
-
-
-
-
-Model determination: 0.9652659492293556
+Model determination: 0.9652659492293556
diff --git a/ml-fundamentals/regression/logistic-regression.html b/ml-fundamentals/regression/logistic-regression.html
index ba7990a36d..fd812b6dfa 100644
--- a/ml-fundamentals/regression/logistic-regression.html
+++ b/ml-fundamentals/regression/logistic-regression.html
@@ -2141,7 +2141,7 @@ 12.4.5.1. Visualization - side-by-side g