adding all work done so far (lessons 1 - 5)

This commit is contained in:
2019-07-10 19:58:53 +01:00
parent 8085149a49
commit b982957daf
37 changed files with 19407 additions and 0 deletions

View File

@@ -0,0 +1,13 @@
import numpy as np
def two_group_ent(first, tot):
return -(first / tot * np.log2(first / tot) +
(tot - first) / tot * np.log2((tot - first) / tot))
tot_ent = two_group_ent(10, 24)
g17_ent = 15 / 24 * two_group_ent(11, 15) + 9 / 24 * two_group_ent(6, 9)
answer = tot_ent - g17_ent
print(answer)

View File

@@ -0,0 +1,25 @@
Species,Color,Length (mm)
Mobug,Brown,11.6
Mobug,Blue,16.3
Lobug,Blue,15.1
Lobug,Green,23.7
Lobug,Blue,18.4
Lobug,Brown,17.1
Mobug,Brown,15.7
Lobug,Green,18.6
Lobug,Blue,22.9
Lobug,Blue,21.0
Lobug,Blue,20.5
Mobug,Green,21.2
Mobug,Brown,13.8
Lobug,Blue,14.5
Lobug,Green,24.8
Mobug,Brown,18.2
Lobug,Green,17.9
Lobug,Green,22.7
Mobug,Green,19.9
Mobug,Blue,14.6
Mobug,Blue,19.2
Lobug,Brown,14.1
Lobug,Green,18.8
Mobug,Blue,13.1
1 Species Color Length (mm)
2 Mobug Brown 11.6
3 Mobug Blue 16.3
4 Lobug Blue 15.1
5 Lobug Green 23.7
6 Lobug Blue 18.4
7 Lobug Brown 17.1
8 Mobug Brown 15.7
9 Lobug Green 18.6
10 Lobug Blue 22.9
11 Lobug Blue 21.0
12 Lobug Blue 20.5
13 Mobug Green 21.2
14 Mobug Brown 13.8
15 Lobug Blue 14.5
16 Lobug Green 24.8
17 Mobug Brown 18.2
18 Lobug Green 17.9
19 Lobug Green 22.7
20 Mobug Green 19.9
21 Mobug Blue 14.6
22 Mobug Blue 19.2
23 Lobug Brown 14.1
24 Lobug Green 18.8
25 Mobug Blue 13.1

View File

@@ -0,0 +1,96 @@
0.24539,0.81725,0
0.21774,0.76462,0
0.20161,0.69737,0
0.20161,0.58041,0
0.2477,0.49561,0
0.32834,0.44883,0
0.39516,0.48099,0
0.39286,0.57164,0
0.33525,0.62135,0
0.33986,0.71199,0
0.34447,0.81433,0
0.28226,0.82602,0
0.26613,0.75,0
0.26613,0.63596,0
0.32604,0.54825,0
0.28917,0.65643,0
0.80069,0.71491,0
0.80069,0.64181,0
0.80069,0.50146,0
0.79839,0.36988,0
0.73157,0.25,0
0.63249,0.18275,0
0.60023,0.27047,0
0.66014,0.34649,0
0.70161,0.42251,0
0.70853,0.53947,0
0.71544,0.63304,0
0.74309,0.72076,0
0.75,0.63596,0
0.75,0.46345,0
0.72235,0.35526,0
0.66935,0.28509,0
0.20622,0.94298,1
0.26613,0.8962,1
0.38134,0.8962,1
0.42051,0.94591,1
0.49885,0.86404,1
0.31452,0.93421,1
0.53111,0.72076,1
0.45276,0.74415,1
0.53571,0.6038,1
0.60484,0.71491,1
0.60945,0.58333,1
0.51267,0.47807,1
0.50806,0.59211,1
0.46198,0.30556,1
0.5288,0.41082,1
0.38594,0.35819,1
0.31682,0.31433,1
0.29608,0.20906,1
0.36982,0.27632,1
0.42972,0.18275,1
0.51498,0.10965,1
0.53111,0.20906,1
0.59793,0.095029,1
0.73848,0.086257,1
0.83065,0.18275,1
0.8629,0.10965,1
0.88364,0.27924,1
0.93433,0.30848,1
0.93433,0.19444,1
0.92512,0.43421,1
0.87903,0.43421,1
0.87903,0.58626,1
0.9182,0.71491,1
0.85138,0.8348,1
0.85599,0.94006,1
0.70853,0.94298,1
0.70853,0.87281,1
0.59793,0.93129,1
0.61175,0.83187,1
0.78226,0.82895,1
0.78917,0.8962,1
0.90668,0.89912,1
0.14862,0.92251,1
0.15092,0.85819,1
0.097926,0.85819,1
0.079493,0.91374,1
0.079493,0.77632,1
0.10945,0.79678,1
0.12327,0.67982,1
0.077189,0.6886,1
0.081797,0.58626,1
0.14862,0.58041,1
0.14862,0.5307,1
0.14171,0.41959,1
0.08871,0.49269,1
0.095622,0.36696,1
0.24539,0.3962,1
0.1947,0.29678,1
0.16935,0.22368,1
0.15553,0.13596,1
0.23848,0.12427,1
0.33065,0.12427,1
0.095622,0.2617,1
0.091014,0.20322,1
1 0.24539 0.81725 0
2 0.21774 0.76462 0
3 0.20161 0.69737 0
4 0.20161 0.58041 0
5 0.2477 0.49561 0
6 0.32834 0.44883 0
7 0.39516 0.48099 0
8 0.39286 0.57164 0
9 0.33525 0.62135 0
10 0.33986 0.71199 0
11 0.34447 0.81433 0
12 0.28226 0.82602 0
13 0.26613 0.75 0
14 0.26613 0.63596 0
15 0.32604 0.54825 0
16 0.28917 0.65643 0
17 0.80069 0.71491 0
18 0.80069 0.64181 0
19 0.80069 0.50146 0
20 0.79839 0.36988 0
21 0.73157 0.25 0
22 0.63249 0.18275 0
23 0.60023 0.27047 0
24 0.66014 0.34649 0
25 0.70161 0.42251 0
26 0.70853 0.53947 0
27 0.71544 0.63304 0
28 0.74309 0.72076 0
29 0.75 0.63596 0
30 0.75 0.46345 0
31 0.72235 0.35526 0
32 0.66935 0.28509 0
33 0.20622 0.94298 1
34 0.26613 0.8962 1
35 0.38134 0.8962 1
36 0.42051 0.94591 1
37 0.49885 0.86404 1
38 0.31452 0.93421 1
39 0.53111 0.72076 1
40 0.45276 0.74415 1
41 0.53571 0.6038 1
42 0.60484 0.71491 1
43 0.60945 0.58333 1
44 0.51267 0.47807 1
45 0.50806 0.59211 1
46 0.46198 0.30556 1
47 0.5288 0.41082 1
48 0.38594 0.35819 1
49 0.31682 0.31433 1
50 0.29608 0.20906 1
51 0.36982 0.27632 1
52 0.42972 0.18275 1
53 0.51498 0.10965 1
54 0.53111 0.20906 1
55 0.59793 0.095029 1
56 0.73848 0.086257 1
57 0.83065 0.18275 1
58 0.8629 0.10965 1
59 0.88364 0.27924 1
60 0.93433 0.30848 1
61 0.93433 0.19444 1
62 0.92512 0.43421 1
63 0.87903 0.43421 1
64 0.87903 0.58626 1
65 0.9182 0.71491 1
66 0.85138 0.8348 1
67 0.85599 0.94006 1
68 0.70853 0.94298 1
69 0.70853 0.87281 1
70 0.59793 0.93129 1
71 0.61175 0.83187 1
72 0.78226 0.82895 1
73 0.78917 0.8962 1
74 0.90668 0.89912 1
75 0.14862 0.92251 1
76 0.15092 0.85819 1
77 0.097926 0.85819 1
78 0.079493 0.91374 1
79 0.079493 0.77632 1
80 0.10945 0.79678 1
81 0.12327 0.67982 1
82 0.077189 0.6886 1
83 0.081797 0.58626 1
84 0.14862 0.58041 1
85 0.14862 0.5307 1
86 0.14171 0.41959 1
87 0.08871 0.49269 1
88 0.095622 0.36696 1
89 0.24539 0.3962 1
90 0.1947 0.29678 1
91 0.16935 0.22368 1
92 0.15553 0.13596 1
93 0.23848 0.12427 1
94 0.33065 0.12427 1
95 0.095622 0.2617 1
96 0.091014 0.20322 1

View File

@@ -0,0 +1,29 @@
# Import statements
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
import pandas as pd
import numpy as np
# Read the data.
data = np.asarray(pd.read_csv('data.csv', header=None))
# Assign the features to the variable X, and the labels to the variable y.
X = data[:, 0:2]
y = data[:, 2]
# TODO: Create the decision tree model and assign it to the variable model.
# You won't need to, but if you'd like, play with hyperparameters such
# as max_depth and min_samples_leaf and see what they do to the decision
# boundary.
model = DecisionTreeClassifier(max_depth=7, min_samples_leaf=10)
# TODO: Fit the model.
model.fit(X, y)
# TODO: Make predictions. Store them in the variable y_pred.
y_pred = model.predict(X)
print(y_pred)
# TODO: Calculate the accuracy and assign it to the variable acc.
acc = accuracy_score(y, y_pred)
print(acc)

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env python
# coding: utf-8
# # Lab: Titanic Survival Exploration with Decision Trees
# ## Getting Started
# In this lab, you will see how decision trees work by implementing a decision tree in sklearn.
#
# We'll start by loading the dataset and displaying some of its rows.
# In[6]:
# Import libraries necessary for this project
import numpy as np
import pandas as pd
# from IPython.display import display # Allows the use of display() for DataFrames
# Pretty display for notebooks
# get_ipython().run_line_magic('matplotlib', 'inline')
# Set a random seed
import random
random.seed(42)
# Load the dataset
in_file = 'titanic_data.csv'
full_data = pd.read_csv(in_file)
# Print the first few entries of the RMS Titanic data
display(full_data.head())
# Recall that these are the various features present for each passenger on the ship:
# - **Survived**: Outcome of survival (0 = No; 1 = Yes)
# - **Pclass**: Socio-economic class (1 = Upper class; 2 = Middle class; 3 = Lower class)
# - **Name**: Name of passenger
# - **Sex**: Sex of the passenger
# - **Age**: Age of the passenger (Some entries contain `NaN`)
# - **SibSp**: Number of siblings and spouses of the passenger aboard
# - **Parch**: Number of parents and children of the passenger
# - **Ticket**: Ticket number of the passenger
# - **Fare**: Fare paid by the passenger
# - **Cabin** Cabin number of the passenger (Some entries contain `NaN`)
# - **Embarked**: Port of embarkation of the passenger (C = Cherbourg; Q = Queenstown; S = Southampton)
#
# Since we're interested in the outcome of survival for each passenger or crew member, we can remove the **Survived** feature from this dataset and store it as its own separate variable `outcomes`. We will use these outcomes as our prediction targets.
# Run the code cell below to remove **Survived** as a feature of the dataset and store it in `outcomes`.
# In[7]:
# Store the 'Survived' feature in a new variable and remove it from the dataset
outcomes = full_data['Survived']
features_raw = full_data.drop('Survived', axis = 1)
# Show the new dataset with 'Survived' removed
display(features_raw.head())
# The very same sample of the RMS Titanic data now shows the **Survived** feature removed from the DataFrame. Note that `data` (the passenger data) and `outcomes` (the outcomes of survival) are now *paired*. That means for any passenger `data.loc[i]`, they have the survival outcome `outcomes[i]`.
#
# ## Preprocessing the data
#
# Now, let's do some data preprocessing. First, we'll remove the names of the passengers, and then one-hot encode the features.
#
# **Question:** Why would it be a terrible idea to one-hot encode the data without removing the names?
# (Andw
# In[8]:
# Removing the names
features_no_names = features_raw.drop(['Name'], axis=1)
# One-hot encoding
features = pd.get_dummies(features_no_names)
# And now we'll fill in any blanks with zeroes.
# In[9]:
features = features.fillna(0.0)
display(features.head())
# ## (TODO) Training the model
#
# Now we're ready to train a model in sklearn. First, let's split the data into training and testing sets. Then we'll train the model on the training set.
# In[15]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(features, outcomes, test_size=0.2, random_state=42)
# In[17]:
# Import the classifier from sklearn
from sklearn.tree import DecisionTreeClassifier
# TODO: Define the classifier, and fit it to the data
model = DecisionTreeClassifier()
model.fit(X_train, y_train)
# ## Testing the model
# Now, let's see how our model does, let's calculate the accuracy over both the training and the testing set.
# In[18]:
# Making predictions
y_train_pred = model.predict(X_train)
y_test_pred = model.predict(X_test)
# Calculate the accuracy
from sklearn.metrics import accuracy_score
train_accuracy = accuracy_score(y_train, y_train_pred)
test_accuracy = accuracy_score(y_test, y_test_pred)
print('The training accuracy is', train_accuracy)
print('The test accuracy is', test_accuracy)
# # Exercise: Improving the model
#
# Ok, high training accuracy and a lower testing accuracy. We may be overfitting a bit.
#
# So now it's your turn to shine! Train a new model, and try to specify some parameters in order to improve the testing accuracy, such as:
# - `max_depth`
# - `min_samples_leaf`
# - `min_samples_split`
#
# You can use your intuition, trial and error, or even better, feel free to use Grid Search!
#
# **Challenge:** Try to get to 85% accuracy on the testing set. If you'd like a hint, take a look at the solutions notebook next.
# In[23]:
# TODO: Train the model
new_model = DecisionTreeClassifier(max_depth=10, min_samples_leaf=6, min_samples_split=8)
new_model.fit(X_train, y_train)
# TODO: Make predictions
new_y_train_pred = new_model.predict(X_train)
new_y_test_pred = new_model.predict(X_test)
# TODO: Calculate the accuracy
new_train_accuracy = accuracy_score(y_train, new_y_train_pred)
new_test_accuracy = accuracy_score(y_test, new_y_test_pred)
print(f'The training accuracy on the new model is {new_train_accuracy:.4f}')
print(f'The test accuracy on the new model is {new_test_accuracy:.4f}')

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env python
# coding: utf-8
# # Lab: Titanic Survival Exploration with Decision Trees
# ## Getting Started
# In this lab, you will see how decision trees work by implementing a decision tree in sklearn.
#
# We'll start by loading the dataset and displaying some of its rows.
# In[6]:
# Import libraries necessary for this project
import numpy as np
import pandas as pd
# from IPython.display import display # Allows the use of display() for DataFrames
# Pretty display for notebooks
# get_ipython().run_line_magic('matplotlib', 'inline')
# Set a random seed
import random
random.seed(42)
# Load the dataset
in_file = 'titanic_data.csv'
full_data = pd.read_csv(in_file)
# Print the first few entries of the RMS Titanic data
# display(full_data.head())
# Recall that these are the various features present for each passenger on the ship:
# - **Survived**: Outcome of survival (0 = No; 1 = Yes)
# - **Pclass**: Socio-economic class (1 = Upper class; 2 = Middle class; 3 = Lower class)
# - **Name**: Name of passenger
# - **Sex**: Sex of the passenger
# - **Age**: Age of the passenger (Some entries contain `NaN`)
# - **SibSp**: Number of siblings and spouses of the passenger aboard
# - **Parch**: Number of parents and children of the passenger
# - **Ticket**: Ticket number of the passenger
# - **Fare**: Fare paid by the passenger
# - **Cabin** Cabin number of the passenger (Some entries contain `NaN`)
# - **Embarked**: Port of embarkation of the passenger (C = Cherbourg; Q = Queenstown; S = Southampton)
#
# Since we're interested in the outcome of survival for each passenger or crew member, we can remove the **Survived** feature from this dataset and store it as its own separate variable `outcomes`. We will use these outcomes as our prediction targets.
# Run the code cell below to remove **Survived** as a feature of the dataset and store it in `outcomes`.
# In[7]:
# Store the 'Survived' feature in a new variable and remove it from the dataset
outcomes = full_data['Survived']
features_raw = full_data.drop('Survived', axis = 1)
# Show the new dataset with 'Survived' removed
# display(features_raw.head())
# The very same sample of the RMS Titanic data now shows the **Survived** feature removed from the DataFrame. Note that `data` (the passenger data) and `outcomes` (the outcomes of survival) are now *paired*. That means for any passenger `data.loc[i]`, they have the survival outcome `outcomes[i]`.
#
# ## Preprocessing the data
#
# Now, let's do some data preprocessing. First, we'll remove the names of the passengers, and then one-hot encode the features.
#
# **Question:** Why would it be a terrible idea to one-hot encode the data without removing the names?
# (Andw
# In[8]:
# Removing the names
features_no_names = features_raw.drop(['Name'], axis=1)
# One-hot encoding
features = pd.get_dummies(features_no_names)
# And now we'll fill in any blanks with zeroes.
# In[9]:
features = features.fillna(0.0)
# display(features.head())
# ## (TODO) Training the model
#
# Now we're ready to train a model in sklearn. First, let's split the data into training and testing sets. Then we'll train the model on the training set.
# In[15]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(features, outcomes, test_size=0.2, random_state=42)
# In[17]:
# Import the classifier from sklearn
from sklearn.tree import DecisionTreeClassifier
# TODO: Define the classifier, and fit it to the data
model = DecisionTreeClassifier()
model.fit(X_train, y_train)
# ## Testing the model
# Now, let's see how our model does, let's calculate the accuracy over both the training and the testing set.
# In[18]:
# Making predictions
y_train_pred = model.predict(X_train)
y_test_pred = model.predict(X_test)
# Calculate the accuracy
from sklearn.metrics import accuracy_score
train_accuracy = accuracy_score(y_train, y_train_pred)
test_accuracy = accuracy_score(y_test, y_test_pred)
print('The training accuracy is', train_accuracy)
print('The test accuracy is', test_accuracy)
# # Exercise: Improving the model
#
# Ok, high training accuracy and a lower testing accuracy. We may be overfitting a bit.
#
# So now it's your turn to shine! Train a new model, and try to specify some parameters in order to improve the testing accuracy, such as:
# - `max_depth`
# - `min_samples_leaf`
# - `min_samples_split`
#
# You can use your intuition, trial and error, or even better, feel free to use Grid Search!
#
# **Challenge:** Try to get to 85% accuracy on the testing set. If you'd like a hint, take a look at the solutions notebook next.
# In[23]:
# TODO: Train the model
new_model = DecisionTreeClassifier(max_depth=10, min_samples_leaf=6, min_samples_split=8)
new_model.fit(X_train, y_train)
# TODO: Make predictions
new_y_train_pred = new_model.predict(X_train)
new_y_test_pred = new_model.predict(X_test)
# TODO: Calculate the accuracy
new_train_accuracy = accuracy_score(y_train, new_y_train_pred)
new_test_accuracy = accuracy_score(y_test, new_y_test_pred)
print(f'The training accuracy on the new model is {new_train_accuracy:.4f}')
print(f'The test accuracy on the new model is {new_test_accuracy:.4f}')

View File

@@ -0,0 +1,892 @@
,PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
0,1,0,3,"Braund, Mr. Owen Harris",male,22.0,1,0,A/5 21171,7.25,,S
1,2,1,1,"Cumings, Mrs. John Bradley (Florence Briggs Thayer)",female,38.0,1,0,PC 17599,71.2833,C85,C
2,3,1,3,"Heikkinen, Miss. Laina",female,26.0,0,0,STON/O2. 3101282,7.925,,S
3,4,1,1,"Futrelle, Mrs. Jacques Heath (Lily May Peel)",female,35.0,1,0,113803,53.1,C123,S
4,5,0,3,"Allen, Mr. William Henry",male,35.0,0,0,373450,8.05,,S
5,6,0,3,"Moran, Mr. James",male,,0,0,330877,8.4583,,Q
6,7,0,1,"McCarthy, Mr. Timothy J",male,54.0,0,0,17463,51.8625,E46,S
7,8,0,3,"Palsson, Master. Gosta Leonard",male,2.0,3,1,349909,21.075,,S
8,9,1,3,"Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg)",female,27.0,0,2,347742,11.1333,,S
9,10,1,2,"Nasser, Mrs. Nicholas (Adele Achem)",female,14.0,1,0,237736,30.0708,,C
10,11,1,3,"Sandstrom, Miss. Marguerite Rut",female,4.0,1,1,PP 9549,16.7,G6,S
11,12,1,1,"Bonnell, Miss. Elizabeth",female,58.0,0,0,113783,26.55,C103,S
12,13,0,3,"Saundercock, Mr. William Henry",male,20.0,0,0,A/5. 2151,8.05,,S
13,14,0,3,"Andersson, Mr. Anders Johan",male,39.0,1,5,347082,31.275,,S
14,15,0,3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14.0,0,0,350406,7.8542,,S
15,16,1,2,"Hewlett, Mrs. (Mary D Kingcome) ",female,55.0,0,0,248706,16.0,,S
16,17,0,3,"Rice, Master. Eugene",male,2.0,4,1,382652,29.125,,Q
17,18,1,2,"Williams, Mr. Charles Eugene",male,,0,0,244373,13.0,,S
18,19,0,3,"Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele)",female,31.0,1,0,345763,18.0,,S
19,20,1,3,"Masselmani, Mrs. Fatima",female,,0,0,2649,7.225,,C
20,21,0,2,"Fynney, Mr. Joseph J",male,35.0,0,0,239865,26.0,,S
21,22,1,2,"Beesley, Mr. Lawrence",male,34.0,0,0,248698,13.0,D56,S
22,23,1,3,"McGowan, Miss. Anna ""Annie""",female,15.0,0,0,330923,8.0292,,Q
23,24,1,1,"Sloper, Mr. William Thompson",male,28.0,0,0,113788,35.5,A6,S
24,25,0,3,"Palsson, Miss. Torborg Danira",female,8.0,3,1,349909,21.075,,S
25,26,1,3,"Asplund, Mrs. Carl Oscar (Selma Augusta Emilia Johansson)",female,38.0,1,5,347077,31.3875,,S
26,27,0,3,"Emir, Mr. Farred Chehab",male,,0,0,2631,7.225,,C
27,28,0,1,"Fortune, Mr. Charles Alexander",male,19.0,3,2,19950,263.0,C23 C25 C27,S
28,29,1,3,"O'Dwyer, Miss. Ellen ""Nellie""",female,,0,0,330959,7.8792,,Q
29,30,0,3,"Todoroff, Mr. Lalio",male,,0,0,349216,7.8958,,S
30,31,0,1,"Uruchurtu, Don. Manuel E",male,40.0,0,0,PC 17601,27.7208,,C
31,32,1,1,"Spencer, Mrs. William Augustus (Marie Eugenie)",female,,1,0,PC 17569,146.5208,B78,C
32,33,1,3,"Glynn, Miss. Mary Agatha",female,,0,0,335677,7.75,,Q
33,34,0,2,"Wheadon, Mr. Edward H",male,66.0,0,0,C.A. 24579,10.5,,S
34,35,0,1,"Meyer, Mr. Edgar Joseph",male,28.0,1,0,PC 17604,82.1708,,C
35,36,0,1,"Holverson, Mr. Alexander Oskar",male,42.0,1,0,113789,52.0,,S
36,37,1,3,"Mamee, Mr. Hanna",male,,0,0,2677,7.2292,,C
37,38,0,3,"Cann, Mr. Ernest Charles",male,21.0,0,0,A./5. 2152,8.05,,S
38,39,0,3,"Vander Planke, Miss. Augusta Maria",female,18.0,2,0,345764,18.0,,S
39,40,1,3,"Nicola-Yarred, Miss. Jamila",female,14.0,1,0,2651,11.2417,,C
40,41,0,3,"Ahlin, Mrs. Johan (Johanna Persdotter Larsson)",female,40.0,1,0,7546,9.475,,S
41,42,0,2,"Turpin, Mrs. William John Robert (Dorothy Ann Wonnacott)",female,27.0,1,0,11668,21.0,,S
42,43,0,3,"Kraeff, Mr. Theodor",male,,0,0,349253,7.8958,,C
43,44,1,2,"Laroche, Miss. Simonne Marie Anne Andree",female,3.0,1,2,SC/Paris 2123,41.5792,,C
44,45,1,3,"Devaney, Miss. Margaret Delia",female,19.0,0,0,330958,7.8792,,Q
45,46,0,3,"Rogers, Mr. William John",male,,0,0,S.C./A.4. 23567,8.05,,S
46,47,0,3,"Lennon, Mr. Denis",male,,1,0,370371,15.5,,Q
47,48,1,3,"O'Driscoll, Miss. Bridget",female,,0,0,14311,7.75,,Q
48,49,0,3,"Samaan, Mr. Youssef",male,,2,0,2662,21.6792,,C
49,50,0,3,"Arnold-Franchi, Mrs. Josef (Josefine Franchi)",female,18.0,1,0,349237,17.8,,S
50,51,0,3,"Panula, Master. Juha Niilo",male,7.0,4,1,3101295,39.6875,,S
51,52,0,3,"Nosworthy, Mr. Richard Cater",male,21.0,0,0,A/4. 39886,7.8,,S
52,53,1,1,"Harper, Mrs. Henry Sleeper (Myna Haxtun)",female,49.0,1,0,PC 17572,76.7292,D33,C
53,54,1,2,"Faunthorpe, Mrs. Lizzie (Elizabeth Anne Wilkinson)",female,29.0,1,0,2926,26.0,,S
54,55,0,1,"Ostby, Mr. Engelhart Cornelius",male,65.0,0,1,113509,61.9792,B30,C
55,56,1,1,"Woolner, Mr. Hugh",male,,0,0,19947,35.5,C52,S
56,57,1,2,"Rugg, Miss. Emily",female,21.0,0,0,C.A. 31026,10.5,,S
57,58,0,3,"Novel, Mr. Mansouer",male,28.5,0,0,2697,7.2292,,C
58,59,1,2,"West, Miss. Constance Mirium",female,5.0,1,2,C.A. 34651,27.75,,S
59,60,0,3,"Goodwin, Master. William Frederick",male,11.0,5,2,CA 2144,46.9,,S
60,61,0,3,"Sirayanian, Mr. Orsen",male,22.0,0,0,2669,7.2292,,C
61,62,1,1,"Icard, Miss. Amelie",female,38.0,0,0,113572,80.0,B28,
62,63,0,1,"Harris, Mr. Henry Birkhardt",male,45.0,1,0,36973,83.475,C83,S
63,64,0,3,"Skoog, Master. Harald",male,4.0,3,2,347088,27.9,,S
64,65,0,1,"Stewart, Mr. Albert A",male,,0,0,PC 17605,27.7208,,C
65,66,1,3,"Moubarek, Master. Gerios",male,,1,1,2661,15.2458,,C
66,67,1,2,"Nye, Mrs. (Elizabeth Ramell)",female,29.0,0,0,C.A. 29395,10.5,F33,S
67,68,0,3,"Crease, Mr. Ernest James",male,19.0,0,0,S.P. 3464,8.1583,,S
68,69,1,3,"Andersson, Miss. Erna Alexandra",female,17.0,4,2,3101281,7.925,,S
69,70,0,3,"Kink, Mr. Vincenz",male,26.0,2,0,315151,8.6625,,S
70,71,0,2,"Jenkin, Mr. Stephen Curnow",male,32.0,0,0,C.A. 33111,10.5,,S
71,72,0,3,"Goodwin, Miss. Lillian Amy",female,16.0,5,2,CA 2144,46.9,,S
72,73,0,2,"Hood, Mr. Ambrose Jr",male,21.0,0,0,S.O.C. 14879,73.5,,S
73,74,0,3,"Chronopoulos, Mr. Apostolos",male,26.0,1,0,2680,14.4542,,C
74,75,1,3,"Bing, Mr. Lee",male,32.0,0,0,1601,56.4958,,S
75,76,0,3,"Moen, Mr. Sigurd Hansen",male,25.0,0,0,348123,7.65,F G73,S
76,77,0,3,"Staneff, Mr. Ivan",male,,0,0,349208,7.8958,,S
77,78,0,3,"Moutal, Mr. Rahamin Haim",male,,0,0,374746,8.05,,S
78,79,1,2,"Caldwell, Master. Alden Gates",male,0.83,0,2,248738,29.0,,S
79,80,1,3,"Dowdell, Miss. Elizabeth",female,30.0,0,0,364516,12.475,,S
80,81,0,3,"Waelens, Mr. Achille",male,22.0,0,0,345767,9.0,,S
81,82,1,3,"Sheerlinck, Mr. Jan Baptist",male,29.0,0,0,345779,9.5,,S
82,83,1,3,"McDermott, Miss. Brigdet Delia",female,,0,0,330932,7.7875,,Q
83,84,0,1,"Carrau, Mr. Francisco M",male,28.0,0,0,113059,47.1,,S
84,85,1,2,"Ilett, Miss. Bertha",female,17.0,0,0,SO/C 14885,10.5,,S
85,86,1,3,"Backstrom, Mrs. Karl Alfred (Maria Mathilda Gustafsson)",female,33.0,3,0,3101278,15.85,,S
86,87,0,3,"Ford, Mr. William Neal",male,16.0,1,3,W./C. 6608,34.375,,S
87,88,0,3,"Slocovski, Mr. Selman Francis",male,,0,0,SOTON/OQ 392086,8.05,,S
88,89,1,1,"Fortune, Miss. Mabel Helen",female,23.0,3,2,19950,263.0,C23 C25 C27,S
89,90,0,3,"Celotti, Mr. Francesco",male,24.0,0,0,343275,8.05,,S
90,91,0,3,"Christmann, Mr. Emil",male,29.0,0,0,343276,8.05,,S
91,92,0,3,"Andreasson, Mr. Paul Edvin",male,20.0,0,0,347466,7.8542,,S
92,93,0,1,"Chaffee, Mr. Herbert Fuller",male,46.0,1,0,W.E.P. 5734,61.175,E31,S
93,94,0,3,"Dean, Mr. Bertram Frank",male,26.0,1,2,C.A. 2315,20.575,,S
94,95,0,3,"Coxon, Mr. Daniel",male,59.0,0,0,364500,7.25,,S
95,96,0,3,"Shorney, Mr. Charles Joseph",male,,0,0,374910,8.05,,S
96,97,0,1,"Goldschmidt, Mr. George B",male,71.0,0,0,PC 17754,34.6542,A5,C
97,98,1,1,"Greenfield, Mr. William Bertram",male,23.0,0,1,PC 17759,63.3583,D10 D12,C
98,99,1,2,"Doling, Mrs. John T (Ada Julia Bone)",female,34.0,0,1,231919,23.0,,S
99,100,0,2,"Kantor, Mr. Sinai",male,34.0,1,0,244367,26.0,,S
100,101,0,3,"Petranec, Miss. Matilda",female,28.0,0,0,349245,7.8958,,S
101,102,0,3,"Petroff, Mr. Pastcho (""Pentcho"")",male,,0,0,349215,7.8958,,S
102,103,0,1,"White, Mr. Richard Frasar",male,21.0,0,1,35281,77.2875,D26,S
103,104,0,3,"Johansson, Mr. Gustaf Joel",male,33.0,0,0,7540,8.6542,,S
104,105,0,3,"Gustafsson, Mr. Anders Vilhelm",male,37.0,2,0,3101276,7.925,,S
105,106,0,3,"Mionoff, Mr. Stoytcho",male,28.0,0,0,349207,7.8958,,S
106,107,1,3,"Salkjelsvik, Miss. Anna Kristine",female,21.0,0,0,343120,7.65,,S
107,108,1,3,"Moss, Mr. Albert Johan",male,,0,0,312991,7.775,,S
108,109,0,3,"Rekic, Mr. Tido",male,38.0,0,0,349249,7.8958,,S
109,110,1,3,"Moran, Miss. Bertha",female,,1,0,371110,24.15,,Q
110,111,0,1,"Porter, Mr. Walter Chamberlain",male,47.0,0,0,110465,52.0,C110,S
111,112,0,3,"Zabour, Miss. Hileni",female,14.5,1,0,2665,14.4542,,C
112,113,0,3,"Barton, Mr. David John",male,22.0,0,0,324669,8.05,,S
113,114,0,3,"Jussila, Miss. Katriina",female,20.0,1,0,4136,9.825,,S
114,115,0,3,"Attalah, Miss. Malake",female,17.0,0,0,2627,14.4583,,C
115,116,0,3,"Pekoniemi, Mr. Edvard",male,21.0,0,0,STON/O 2. 3101294,7.925,,S
116,117,0,3,"Connors, Mr. Patrick",male,70.5,0,0,370369,7.75,,Q
117,118,0,2,"Turpin, Mr. William John Robert",male,29.0,1,0,11668,21.0,,S
118,119,0,1,"Baxter, Mr. Quigg Edmond",male,24.0,0,1,PC 17558,247.5208,B58 B60,C
119,120,0,3,"Andersson, Miss. Ellis Anna Maria",female,2.0,4,2,347082,31.275,,S
120,121,0,2,"Hickman, Mr. Stanley George",male,21.0,2,0,S.O.C. 14879,73.5,,S
121,122,0,3,"Moore, Mr. Leonard Charles",male,,0,0,A4. 54510,8.05,,S
122,123,0,2,"Nasser, Mr. Nicholas",male,32.5,1,0,237736,30.0708,,C
123,124,1,2,"Webber, Miss. Susan",female,32.5,0,0,27267,13.0,E101,S
124,125,0,1,"White, Mr. Percival Wayland",male,54.0,0,1,35281,77.2875,D26,S
125,126,1,3,"Nicola-Yarred, Master. Elias",male,12.0,1,0,2651,11.2417,,C
126,127,0,3,"McMahon, Mr. Martin",male,,0,0,370372,7.75,,Q
127,128,1,3,"Madsen, Mr. Fridtjof Arne",male,24.0,0,0,C 17369,7.1417,,S
128,129,1,3,"Peter, Miss. Anna",female,,1,1,2668,22.3583,F E69,C
129,130,0,3,"Ekstrom, Mr. Johan",male,45.0,0,0,347061,6.975,,S
130,131,0,3,"Drazenoic, Mr. Jozef",male,33.0,0,0,349241,7.8958,,C
131,132,0,3,"Coelho, Mr. Domingos Fernandeo",male,20.0,0,0,SOTON/O.Q. 3101307,7.05,,S
132,133,0,3,"Robins, Mrs. Alexander A (Grace Charity Laury)",female,47.0,1,0,A/5. 3337,14.5,,S
133,134,1,2,"Weisz, Mrs. Leopold (Mathilde Francoise Pede)",female,29.0,1,0,228414,26.0,,S
134,135,0,2,"Sobey, Mr. Samuel James Hayden",male,25.0,0,0,C.A. 29178,13.0,,S
135,136,0,2,"Richard, Mr. Emile",male,23.0,0,0,SC/PARIS 2133,15.0458,,C
136,137,1,1,"Newsom, Miss. Helen Monypeny",female,19.0,0,2,11752,26.2833,D47,S
137,138,0,1,"Futrelle, Mr. Jacques Heath",male,37.0,1,0,113803,53.1,C123,S
138,139,0,3,"Osen, Mr. Olaf Elon",male,16.0,0,0,7534,9.2167,,S
139,140,0,1,"Giglio, Mr. Victor",male,24.0,0,0,PC 17593,79.2,B86,C
140,141,0,3,"Boulos, Mrs. Joseph (Sultana)",female,,0,2,2678,15.2458,,C
141,142,1,3,"Nysten, Miss. Anna Sofia",female,22.0,0,0,347081,7.75,,S
142,143,1,3,"Hakkarainen, Mrs. Pekka Pietari (Elin Matilda Dolck)",female,24.0,1,0,STON/O2. 3101279,15.85,,S
143,144,0,3,"Burke, Mr. Jeremiah",male,19.0,0,0,365222,6.75,,Q
144,145,0,2,"Andrew, Mr. Edgardo Samuel",male,18.0,0,0,231945,11.5,,S
145,146,0,2,"Nicholls, Mr. Joseph Charles",male,19.0,1,1,C.A. 33112,36.75,,S
146,147,1,3,"Andersson, Mr. August Edvard (""Wennerstrom"")",male,27.0,0,0,350043,7.7958,,S
147,148,0,3,"Ford, Miss. Robina Maggie ""Ruby""",female,9.0,2,2,W./C. 6608,34.375,,S
148,149,0,2,"Navratil, Mr. Michel (""Louis M Hoffman"")",male,36.5,0,2,230080,26.0,F2,S
149,150,0,2,"Byles, Rev. Thomas Roussel Davids",male,42.0,0,0,244310,13.0,,S
150,151,0,2,"Bateman, Rev. Robert James",male,51.0,0,0,S.O.P. 1166,12.525,,S
151,152,1,1,"Pears, Mrs. Thomas (Edith Wearne)",female,22.0,1,0,113776,66.6,C2,S
152,153,0,3,"Meo, Mr. Alfonzo",male,55.5,0,0,A.5. 11206,8.05,,S
153,154,0,3,"van Billiard, Mr. Austin Blyler",male,40.5,0,2,A/5. 851,14.5,,S
154,155,0,3,"Olsen, Mr. Ole Martin",male,,0,0,Fa 265302,7.3125,,S
155,156,0,1,"Williams, Mr. Charles Duane",male,51.0,0,1,PC 17597,61.3792,,C
156,157,1,3,"Gilnagh, Miss. Katherine ""Katie""",female,16.0,0,0,35851,7.7333,,Q
157,158,0,3,"Corn, Mr. Harry",male,30.0,0,0,SOTON/OQ 392090,8.05,,S
158,159,0,3,"Smiljanic, Mr. Mile",male,,0,0,315037,8.6625,,S
159,160,0,3,"Sage, Master. Thomas Henry",male,,8,2,CA. 2343,69.55,,S
160,161,0,3,"Cribb, Mr. John Hatfield",male,44.0,0,1,371362,16.1,,S
161,162,1,2,"Watt, Mrs. James (Elizabeth ""Bessie"" Inglis Milne)",female,40.0,0,0,C.A. 33595,15.75,,S
162,163,0,3,"Bengtsson, Mr. John Viktor",male,26.0,0,0,347068,7.775,,S
163,164,0,3,"Calic, Mr. Jovo",male,17.0,0,0,315093,8.6625,,S
164,165,0,3,"Panula, Master. Eino Viljami",male,1.0,4,1,3101295,39.6875,,S
165,166,1,3,"Goldsmith, Master. Frank John William ""Frankie""",male,9.0,0,2,363291,20.525,,S
166,167,1,1,"Chibnall, Mrs. (Edith Martha Bowerman)",female,,0,1,113505,55.0,E33,S
167,168,0,3,"Skoog, Mrs. William (Anna Bernhardina Karlsson)",female,45.0,1,4,347088,27.9,,S
168,169,0,1,"Baumann, Mr. John D",male,,0,0,PC 17318,25.925,,S
169,170,0,3,"Ling, Mr. Lee",male,28.0,0,0,1601,56.4958,,S
170,171,0,1,"Van der hoef, Mr. Wyckoff",male,61.0,0,0,111240,33.5,B19,S
171,172,0,3,"Rice, Master. Arthur",male,4.0,4,1,382652,29.125,,Q
172,173,1,3,"Johnson, Miss. Eleanor Ileen",female,1.0,1,1,347742,11.1333,,S
173,174,0,3,"Sivola, Mr. Antti Wilhelm",male,21.0,0,0,STON/O 2. 3101280,7.925,,S
174,175,0,1,"Smith, Mr. James Clinch",male,56.0,0,0,17764,30.6958,A7,C
175,176,0,3,"Klasen, Mr. Klas Albin",male,18.0,1,1,350404,7.8542,,S
176,177,0,3,"Lefebre, Master. Henry Forbes",male,,3,1,4133,25.4667,,S
177,178,0,1,"Isham, Miss. Ann Elizabeth",female,50.0,0,0,PC 17595,28.7125,C49,C
178,179,0,2,"Hale, Mr. Reginald",male,30.0,0,0,250653,13.0,,S
179,180,0,3,"Leonard, Mr. Lionel",male,36.0,0,0,LINE,0.0,,S
180,181,0,3,"Sage, Miss. Constance Gladys",female,,8,2,CA. 2343,69.55,,S
181,182,0,2,"Pernot, Mr. Rene",male,,0,0,SC/PARIS 2131,15.05,,C
182,183,0,3,"Asplund, Master. Clarence Gustaf Hugo",male,9.0,4,2,347077,31.3875,,S
183,184,1,2,"Becker, Master. Richard F",male,1.0,2,1,230136,39.0,F4,S
184,185,1,3,"Kink-Heilmann, Miss. Luise Gretchen",female,4.0,0,2,315153,22.025,,S
185,186,0,1,"Rood, Mr. Hugh Roscoe",male,,0,0,113767,50.0,A32,S
186,187,1,3,"O'Brien, Mrs. Thomas (Johanna ""Hannah"" Godfrey)",female,,1,0,370365,15.5,,Q
187,188,1,1,"Romaine, Mr. Charles Hallace (""Mr C Rolmane"")",male,45.0,0,0,111428,26.55,,S
188,189,0,3,"Bourke, Mr. John",male,40.0,1,1,364849,15.5,,Q
189,190,0,3,"Turcin, Mr. Stjepan",male,36.0,0,0,349247,7.8958,,S
190,191,1,2,"Pinsky, Mrs. (Rosa)",female,32.0,0,0,234604,13.0,,S
191,192,0,2,"Carbines, Mr. William",male,19.0,0,0,28424,13.0,,S
192,193,1,3,"Andersen-Jensen, Miss. Carla Christine Nielsine",female,19.0,1,0,350046,7.8542,,S
193,194,1,2,"Navratil, Master. Michel M",male,3.0,1,1,230080,26.0,F2,S
194,195,1,1,"Brown, Mrs. James Joseph (Margaret Tobin)",female,44.0,0,0,PC 17610,27.7208,B4,C
195,196,1,1,"Lurette, Miss. Elise",female,58.0,0,0,PC 17569,146.5208,B80,C
196,197,0,3,"Mernagh, Mr. Robert",male,,0,0,368703,7.75,,Q
197,198,0,3,"Olsen, Mr. Karl Siegwart Andreas",male,42.0,0,1,4579,8.4042,,S
198,199,1,3,"Madigan, Miss. Margaret ""Maggie""",female,,0,0,370370,7.75,,Q
199,200,0,2,"Yrois, Miss. Henriette (""Mrs Harbeck"")",female,24.0,0,0,248747,13.0,,S
200,201,0,3,"Vande Walle, Mr. Nestor Cyriel",male,28.0,0,0,345770,9.5,,S
201,202,0,3,"Sage, Mr. Frederick",male,,8,2,CA. 2343,69.55,,S
202,203,0,3,"Johanson, Mr. Jakob Alfred",male,34.0,0,0,3101264,6.4958,,S
203,204,0,3,"Youseff, Mr. Gerious",male,45.5,0,0,2628,7.225,,C
204,205,1,3,"Cohen, Mr. Gurshon ""Gus""",male,18.0,0,0,A/5 3540,8.05,,S
205,206,0,3,"Strom, Miss. Telma Matilda",female,2.0,0,1,347054,10.4625,G6,S
206,207,0,3,"Backstrom, Mr. Karl Alfred",male,32.0,1,0,3101278,15.85,,S
207,208,1,3,"Albimona, Mr. Nassef Cassem",male,26.0,0,0,2699,18.7875,,C
208,209,1,3,"Carr, Miss. Helen ""Ellen""",female,16.0,0,0,367231,7.75,,Q
209,210,1,1,"Blank, Mr. Henry",male,40.0,0,0,112277,31.0,A31,C
210,211,0,3,"Ali, Mr. Ahmed",male,24.0,0,0,SOTON/O.Q. 3101311,7.05,,S
211,212,1,2,"Cameron, Miss. Clear Annie",female,35.0,0,0,F.C.C. 13528,21.0,,S
212,213,0,3,"Perkin, Mr. John Henry",male,22.0,0,0,A/5 21174,7.25,,S
213,214,0,2,"Givard, Mr. Hans Kristensen",male,30.0,0,0,250646,13.0,,S
214,215,0,3,"Kiernan, Mr. Philip",male,,1,0,367229,7.75,,Q
215,216,1,1,"Newell, Miss. Madeleine",female,31.0,1,0,35273,113.275,D36,C
216,217,1,3,"Honkanen, Miss. Eliina",female,27.0,0,0,STON/O2. 3101283,7.925,,S
217,218,0,2,"Jacobsohn, Mr. Sidney Samuel",male,42.0,1,0,243847,27.0,,S
218,219,1,1,"Bazzani, Miss. Albina",female,32.0,0,0,11813,76.2917,D15,C
219,220,0,2,"Harris, Mr. Walter",male,30.0,0,0,W/C 14208,10.5,,S
220,221,1,3,"Sunderland, Mr. Victor Francis",male,16.0,0,0,SOTON/OQ 392089,8.05,,S
221,222,0,2,"Bracken, Mr. James H",male,27.0,0,0,220367,13.0,,S
222,223,0,3,"Green, Mr. George Henry",male,51.0,0,0,21440,8.05,,S
223,224,0,3,"Nenkoff, Mr. Christo",male,,0,0,349234,7.8958,,S
224,225,1,1,"Hoyt, Mr. Frederick Maxfield",male,38.0,1,0,19943,90.0,C93,S
225,226,0,3,"Berglund, Mr. Karl Ivar Sven",male,22.0,0,0,PP 4348,9.35,,S
226,227,1,2,"Mellors, Mr. William John",male,19.0,0,0,SW/PP 751,10.5,,S
227,228,0,3,"Lovell, Mr. John Hall (""Henry"")",male,20.5,0,0,A/5 21173,7.25,,S
228,229,0,2,"Fahlstrom, Mr. Arne Jonas",male,18.0,0,0,236171,13.0,,S
229,230,0,3,"Lefebre, Miss. Mathilde",female,,3,1,4133,25.4667,,S
230,231,1,1,"Harris, Mrs. Henry Birkhardt (Irene Wallach)",female,35.0,1,0,36973,83.475,C83,S
231,232,0,3,"Larsson, Mr. Bengt Edvin",male,29.0,0,0,347067,7.775,,S
232,233,0,2,"Sjostedt, Mr. Ernst Adolf",male,59.0,0,0,237442,13.5,,S
233,234,1,3,"Asplund, Miss. Lillian Gertrud",female,5.0,4,2,347077,31.3875,,S
234,235,0,2,"Leyson, Mr. Robert William Norman",male,24.0,0,0,C.A. 29566,10.5,,S
235,236,0,3,"Harknett, Miss. Alice Phoebe",female,,0,0,W./C. 6609,7.55,,S
236,237,0,2,"Hold, Mr. Stephen",male,44.0,1,0,26707,26.0,,S
237,238,1,2,"Collyer, Miss. Marjorie ""Lottie""",female,8.0,0,2,C.A. 31921,26.25,,S
238,239,0,2,"Pengelly, Mr. Frederick William",male,19.0,0,0,28665,10.5,,S
239,240,0,2,"Hunt, Mr. George Henry",male,33.0,0,0,SCO/W 1585,12.275,,S
240,241,0,3,"Zabour, Miss. Thamine",female,,1,0,2665,14.4542,,C
241,242,1,3,"Murphy, Miss. Katherine ""Kate""",female,,1,0,367230,15.5,,Q
242,243,0,2,"Coleridge, Mr. Reginald Charles",male,29.0,0,0,W./C. 14263,10.5,,S
243,244,0,3,"Maenpaa, Mr. Matti Alexanteri",male,22.0,0,0,STON/O 2. 3101275,7.125,,S
244,245,0,3,"Attalah, Mr. Sleiman",male,30.0,0,0,2694,7.225,,C
245,246,0,1,"Minahan, Dr. William Edward",male,44.0,2,0,19928,90.0,C78,Q
246,247,0,3,"Lindahl, Miss. Agda Thorilda Viktoria",female,25.0,0,0,347071,7.775,,S
247,248,1,2,"Hamalainen, Mrs. William (Anna)",female,24.0,0,2,250649,14.5,,S
248,249,1,1,"Beckwith, Mr. Richard Leonard",male,37.0,1,1,11751,52.5542,D35,S
249,250,0,2,"Carter, Rev. Ernest Courtenay",male,54.0,1,0,244252,26.0,,S
250,251,0,3,"Reed, Mr. James George",male,,0,0,362316,7.25,,S
251,252,0,3,"Strom, Mrs. Wilhelm (Elna Matilda Persson)",female,29.0,1,1,347054,10.4625,G6,S
252,253,0,1,"Stead, Mr. William Thomas",male,62.0,0,0,113514,26.55,C87,S
253,254,0,3,"Lobb, Mr. William Arthur",male,30.0,1,0,A/5. 3336,16.1,,S
254,255,0,3,"Rosblom, Mrs. Viktor (Helena Wilhelmina)",female,41.0,0,2,370129,20.2125,,S
255,256,1,3,"Touma, Mrs. Darwis (Hanne Youssef Razi)",female,29.0,0,2,2650,15.2458,,C
256,257,1,1,"Thorne, Mrs. Gertrude Maybelle",female,,0,0,PC 17585,79.2,,C
257,258,1,1,"Cherry, Miss. Gladys",female,30.0,0,0,110152,86.5,B77,S
258,259,1,1,"Ward, Miss. Anna",female,35.0,0,0,PC 17755,512.3292,,C
259,260,1,2,"Parrish, Mrs. (Lutie Davis)",female,50.0,0,1,230433,26.0,,S
260,261,0,3,"Smith, Mr. Thomas",male,,0,0,384461,7.75,,Q
261,262,1,3,"Asplund, Master. Edvin Rojj Felix",male,3.0,4,2,347077,31.3875,,S
262,263,0,1,"Taussig, Mr. Emil",male,52.0,1,1,110413,79.65,E67,S
263,264,0,1,"Harrison, Mr. William",male,40.0,0,0,112059,0.0,B94,S
264,265,0,3,"Henry, Miss. Delia",female,,0,0,382649,7.75,,Q
265,266,0,2,"Reeves, Mr. David",male,36.0,0,0,C.A. 17248,10.5,,S
266,267,0,3,"Panula, Mr. Ernesti Arvid",male,16.0,4,1,3101295,39.6875,,S
267,268,1,3,"Persson, Mr. Ernst Ulrik",male,25.0,1,0,347083,7.775,,S
268,269,1,1,"Graham, Mrs. William Thompson (Edith Junkins)",female,58.0,0,1,PC 17582,153.4625,C125,S
269,270,1,1,"Bissette, Miss. Amelia",female,35.0,0,0,PC 17760,135.6333,C99,S
270,271,0,1,"Cairns, Mr. Alexander",male,,0,0,113798,31.0,,S
271,272,1,3,"Tornquist, Mr. William Henry",male,25.0,0,0,LINE,0.0,,S
272,273,1,2,"Mellinger, Mrs. (Elizabeth Anne Maidment)",female,41.0,0,1,250644,19.5,,S
273,274,0,1,"Natsch, Mr. Charles H",male,37.0,0,1,PC 17596,29.7,C118,C
274,275,1,3,"Healy, Miss. Hanora ""Nora""",female,,0,0,370375,7.75,,Q
275,276,1,1,"Andrews, Miss. Kornelia Theodosia",female,63.0,1,0,13502,77.9583,D7,S
276,277,0,3,"Lindblom, Miss. Augusta Charlotta",female,45.0,0,0,347073,7.75,,S
277,278,0,2,"Parkes, Mr. Francis ""Frank""",male,,0,0,239853,0.0,,S
278,279,0,3,"Rice, Master. Eric",male,7.0,4,1,382652,29.125,,Q
279,280,1,3,"Abbott, Mrs. Stanton (Rosa Hunt)",female,35.0,1,1,C.A. 2673,20.25,,S
280,281,0,3,"Duane, Mr. Frank",male,65.0,0,0,336439,7.75,,Q
281,282,0,3,"Olsson, Mr. Nils Johan Goransson",male,28.0,0,0,347464,7.8542,,S
282,283,0,3,"de Pelsmaeker, Mr. Alfons",male,16.0,0,0,345778,9.5,,S
283,284,1,3,"Dorking, Mr. Edward Arthur",male,19.0,0,0,A/5. 10482,8.05,,S
284,285,0,1,"Smith, Mr. Richard William",male,,0,0,113056,26.0,A19,S
285,286,0,3,"Stankovic, Mr. Ivan",male,33.0,0,0,349239,8.6625,,C
286,287,1,3,"de Mulder, Mr. Theodore",male,30.0,0,0,345774,9.5,,S
287,288,0,3,"Naidenoff, Mr. Penko",male,22.0,0,0,349206,7.8958,,S
288,289,1,2,"Hosono, Mr. Masabumi",male,42.0,0,0,237798,13.0,,S
289,290,1,3,"Connolly, Miss. Kate",female,22.0,0,0,370373,7.75,,Q
290,291,1,1,"Barber, Miss. Ellen ""Nellie""",female,26.0,0,0,19877,78.85,,S
291,292,1,1,"Bishop, Mrs. Dickinson H (Helen Walton)",female,19.0,1,0,11967,91.0792,B49,C
292,293,0,2,"Levy, Mr. Rene Jacques",male,36.0,0,0,SC/Paris 2163,12.875,D,C
293,294,0,3,"Haas, Miss. Aloisia",female,24.0,0,0,349236,8.85,,S
294,295,0,3,"Mineff, Mr. Ivan",male,24.0,0,0,349233,7.8958,,S
295,296,0,1,"Lewy, Mr. Ervin G",male,,0,0,PC 17612,27.7208,,C
296,297,0,3,"Hanna, Mr. Mansour",male,23.5,0,0,2693,7.2292,,C
297,298,0,1,"Allison, Miss. Helen Loraine",female,2.0,1,2,113781,151.55,C22 C26,S
298,299,1,1,"Saalfeld, Mr. Adolphe",male,,0,0,19988,30.5,C106,S
299,300,1,1,"Baxter, Mrs. James (Helene DeLaudeniere Chaput)",female,50.0,0,1,PC 17558,247.5208,B58 B60,C
300,301,1,3,"Kelly, Miss. Anna Katherine ""Annie Kate""",female,,0,0,9234,7.75,,Q
301,302,1,3,"McCoy, Mr. Bernard",male,,2,0,367226,23.25,,Q
302,303,0,3,"Johnson, Mr. William Cahoone Jr",male,19.0,0,0,LINE,0.0,,S
303,304,1,2,"Keane, Miss. Nora A",female,,0,0,226593,12.35,E101,Q
304,305,0,3,"Williams, Mr. Howard Hugh ""Harry""",male,,0,0,A/5 2466,8.05,,S
305,306,1,1,"Allison, Master. Hudson Trevor",male,0.92,1,2,113781,151.55,C22 C26,S
306,307,1,1,"Fleming, Miss. Margaret",female,,0,0,17421,110.8833,,C
307,308,1,1,"Penasco y Castellana, Mrs. Victor de Satode (Maria Josefa Perez de Soto y Vallejo)",female,17.0,1,0,PC 17758,108.9,C65,C
308,309,0,2,"Abelson, Mr. Samuel",male,30.0,1,0,P/PP 3381,24.0,,C
309,310,1,1,"Francatelli, Miss. Laura Mabel",female,30.0,0,0,PC 17485,56.9292,E36,C
310,311,1,1,"Hays, Miss. Margaret Bechstein",female,24.0,0,0,11767,83.1583,C54,C
311,312,1,1,"Ryerson, Miss. Emily Borie",female,18.0,2,2,PC 17608,262.375,B57 B59 B63 B66,C
312,313,0,2,"Lahtinen, Mrs. William (Anna Sylfven)",female,26.0,1,1,250651,26.0,,S
313,314,0,3,"Hendekovic, Mr. Ignjac",male,28.0,0,0,349243,7.8958,,S
314,315,0,2,"Hart, Mr. Benjamin",male,43.0,1,1,F.C.C. 13529,26.25,,S
315,316,1,3,"Nilsson, Miss. Helmina Josefina",female,26.0,0,0,347470,7.8542,,S
316,317,1,2,"Kantor, Mrs. Sinai (Miriam Sternin)",female,24.0,1,0,244367,26.0,,S
317,318,0,2,"Moraweck, Dr. Ernest",male,54.0,0,0,29011,14.0,,S
318,319,1,1,"Wick, Miss. Mary Natalie",female,31.0,0,2,36928,164.8667,C7,S
319,320,1,1,"Spedden, Mrs. Frederic Oakley (Margaretta Corning Stone)",female,40.0,1,1,16966,134.5,E34,C
320,321,0,3,"Dennis, Mr. Samuel",male,22.0,0,0,A/5 21172,7.25,,S
321,322,0,3,"Danoff, Mr. Yoto",male,27.0,0,0,349219,7.8958,,S
322,323,1,2,"Slayter, Miss. Hilda Mary",female,30.0,0,0,234818,12.35,,Q
323,324,1,2,"Caldwell, Mrs. Albert Francis (Sylvia Mae Harbaugh)",female,22.0,1,1,248738,29.0,,S
324,325,0,3,"Sage, Mr. George John Jr",male,,8,2,CA. 2343,69.55,,S
325,326,1,1,"Young, Miss. Marie Grice",female,36.0,0,0,PC 17760,135.6333,C32,C
326,327,0,3,"Nysveen, Mr. Johan Hansen",male,61.0,0,0,345364,6.2375,,S
327,328,1,2,"Ball, Mrs. (Ada E Hall)",female,36.0,0,0,28551,13.0,D,S
328,329,1,3,"Goldsmith, Mrs. Frank John (Emily Alice Brown)",female,31.0,1,1,363291,20.525,,S
329,330,1,1,"Hippach, Miss. Jean Gertrude",female,16.0,0,1,111361,57.9792,B18,C
330,331,1,3,"McCoy, Miss. Agnes",female,,2,0,367226,23.25,,Q
331,332,0,1,"Partner, Mr. Austen",male,45.5,0,0,113043,28.5,C124,S
332,333,0,1,"Graham, Mr. George Edward",male,38.0,0,1,PC 17582,153.4625,C91,S
333,334,0,3,"Vander Planke, Mr. Leo Edmondus",male,16.0,2,0,345764,18.0,,S
334,335,1,1,"Frauenthal, Mrs. Henry William (Clara Heinsheimer)",female,,1,0,PC 17611,133.65,,S
335,336,0,3,"Denkoff, Mr. Mitto",male,,0,0,349225,7.8958,,S
336,337,0,1,"Pears, Mr. Thomas Clinton",male,29.0,1,0,113776,66.6,C2,S
337,338,1,1,"Burns, Miss. Elizabeth Margaret",female,41.0,0,0,16966,134.5,E40,C
338,339,1,3,"Dahl, Mr. Karl Edwart",male,45.0,0,0,7598,8.05,,S
339,340,0,1,"Blackwell, Mr. Stephen Weart",male,45.0,0,0,113784,35.5,T,S
340,341,1,2,"Navratil, Master. Edmond Roger",male,2.0,1,1,230080,26.0,F2,S
341,342,1,1,"Fortune, Miss. Alice Elizabeth",female,24.0,3,2,19950,263.0,C23 C25 C27,S
342,343,0,2,"Collander, Mr. Erik Gustaf",male,28.0,0,0,248740,13.0,,S
343,344,0,2,"Sedgwick, Mr. Charles Frederick Waddington",male,25.0,0,0,244361,13.0,,S
344,345,0,2,"Fox, Mr. Stanley Hubert",male,36.0,0,0,229236,13.0,,S
345,346,1,2,"Brown, Miss. Amelia ""Mildred""",female,24.0,0,0,248733,13.0,F33,S
346,347,1,2,"Smith, Miss. Marion Elsie",female,40.0,0,0,31418,13.0,,S
347,348,1,3,"Davison, Mrs. Thomas Henry (Mary E Finck)",female,,1,0,386525,16.1,,S
348,349,1,3,"Coutts, Master. William Loch ""William""",male,3.0,1,1,C.A. 37671,15.9,,S
349,350,0,3,"Dimic, Mr. Jovan",male,42.0,0,0,315088,8.6625,,S
350,351,0,3,"Odahl, Mr. Nils Martin",male,23.0,0,0,7267,9.225,,S
351,352,0,1,"Williams-Lambert, Mr. Fletcher Fellows",male,,0,0,113510,35.0,C128,S
352,353,0,3,"Elias, Mr. Tannous",male,15.0,1,1,2695,7.2292,,C
353,354,0,3,"Arnold-Franchi, Mr. Josef",male,25.0,1,0,349237,17.8,,S
354,355,0,3,"Yousif, Mr. Wazli",male,,0,0,2647,7.225,,C
355,356,0,3,"Vanden Steen, Mr. Leo Peter",male,28.0,0,0,345783,9.5,,S
356,357,1,1,"Bowerman, Miss. Elsie Edith",female,22.0,0,1,113505,55.0,E33,S
357,358,0,2,"Funk, Miss. Annie Clemmer",female,38.0,0,0,237671,13.0,,S
358,359,1,3,"McGovern, Miss. Mary",female,,0,0,330931,7.8792,,Q
359,360,1,3,"Mockler, Miss. Helen Mary ""Ellie""",female,,0,0,330980,7.8792,,Q
360,361,0,3,"Skoog, Mr. Wilhelm",male,40.0,1,4,347088,27.9,,S
361,362,0,2,"del Carlo, Mr. Sebastiano",male,29.0,1,0,SC/PARIS 2167,27.7208,,C
362,363,0,3,"Barbara, Mrs. (Catherine David)",female,45.0,0,1,2691,14.4542,,C
363,364,0,3,"Asim, Mr. Adola",male,35.0,0,0,SOTON/O.Q. 3101310,7.05,,S
364,365,0,3,"O'Brien, Mr. Thomas",male,,1,0,370365,15.5,,Q
365,366,0,3,"Adahl, Mr. Mauritz Nils Martin",male,30.0,0,0,C 7076,7.25,,S
366,367,1,1,"Warren, Mrs. Frank Manley (Anna Sophia Atkinson)",female,60.0,1,0,110813,75.25,D37,C
367,368,1,3,"Moussa, Mrs. (Mantoura Boulos)",female,,0,0,2626,7.2292,,C
368,369,1,3,"Jermyn, Miss. Annie",female,,0,0,14313,7.75,,Q
369,370,1,1,"Aubart, Mme. Leontine Pauline",female,24.0,0,0,PC 17477,69.3,B35,C
370,371,1,1,"Harder, Mr. George Achilles",male,25.0,1,0,11765,55.4417,E50,C
371,372,0,3,"Wiklund, Mr. Jakob Alfred",male,18.0,1,0,3101267,6.4958,,S
372,373,0,3,"Beavan, Mr. William Thomas",male,19.0,0,0,323951,8.05,,S
373,374,0,1,"Ringhini, Mr. Sante",male,22.0,0,0,PC 17760,135.6333,,C
374,375,0,3,"Palsson, Miss. Stina Viola",female,3.0,3,1,349909,21.075,,S
375,376,1,1,"Meyer, Mrs. Edgar Joseph (Leila Saks)",female,,1,0,PC 17604,82.1708,,C
376,377,1,3,"Landergren, Miss. Aurora Adelia",female,22.0,0,0,C 7077,7.25,,S
377,378,0,1,"Widener, Mr. Harry Elkins",male,27.0,0,2,113503,211.5,C82,C
378,379,0,3,"Betros, Mr. Tannous",male,20.0,0,0,2648,4.0125,,C
379,380,0,3,"Gustafsson, Mr. Karl Gideon",male,19.0,0,0,347069,7.775,,S
380,381,1,1,"Bidois, Miss. Rosalie",female,42.0,0,0,PC 17757,227.525,,C
381,382,1,3,"Nakid, Miss. Maria (""Mary"")",female,1.0,0,2,2653,15.7417,,C
382,383,0,3,"Tikkanen, Mr. Juho",male,32.0,0,0,STON/O 2. 3101293,7.925,,S
383,384,1,1,"Holverson, Mrs. Alexander Oskar (Mary Aline Towner)",female,35.0,1,0,113789,52.0,,S
384,385,0,3,"Plotcharsky, Mr. Vasil",male,,0,0,349227,7.8958,,S
385,386,0,2,"Davies, Mr. Charles Henry",male,18.0,0,0,S.O.C. 14879,73.5,,S
386,387,0,3,"Goodwin, Master. Sidney Leonard",male,1.0,5,2,CA 2144,46.9,,S
387,388,1,2,"Buss, Miss. Kate",female,36.0,0,0,27849,13.0,,S
388,389,0,3,"Sadlier, Mr. Matthew",male,,0,0,367655,7.7292,,Q
389,390,1,2,"Lehmann, Miss. Bertha",female,17.0,0,0,SC 1748,12.0,,C
390,391,1,1,"Carter, Mr. William Ernest",male,36.0,1,2,113760,120.0,B96 B98,S
391,392,1,3,"Jansson, Mr. Carl Olof",male,21.0,0,0,350034,7.7958,,S
392,393,0,3,"Gustafsson, Mr. Johan Birger",male,28.0,2,0,3101277,7.925,,S
393,394,1,1,"Newell, Miss. Marjorie",female,23.0,1,0,35273,113.275,D36,C
394,395,1,3,"Sandstrom, Mrs. Hjalmar (Agnes Charlotta Bengtsson)",female,24.0,0,2,PP 9549,16.7,G6,S
395,396,0,3,"Johansson, Mr. Erik",male,22.0,0,0,350052,7.7958,,S
396,397,0,3,"Olsson, Miss. Elina",female,31.0,0,0,350407,7.8542,,S
397,398,0,2,"McKane, Mr. Peter David",male,46.0,0,0,28403,26.0,,S
398,399,0,2,"Pain, Dr. Alfred",male,23.0,0,0,244278,10.5,,S
399,400,1,2,"Trout, Mrs. William H (Jessie L)",female,28.0,0,0,240929,12.65,,S
400,401,1,3,"Niskanen, Mr. Juha",male,39.0,0,0,STON/O 2. 3101289,7.925,,S
401,402,0,3,"Adams, Mr. John",male,26.0,0,0,341826,8.05,,S
402,403,0,3,"Jussila, Miss. Mari Aina",female,21.0,1,0,4137,9.825,,S
403,404,0,3,"Hakkarainen, Mr. Pekka Pietari",male,28.0,1,0,STON/O2. 3101279,15.85,,S
404,405,0,3,"Oreskovic, Miss. Marija",female,20.0,0,0,315096,8.6625,,S
405,406,0,2,"Gale, Mr. Shadrach",male,34.0,1,0,28664,21.0,,S
406,407,0,3,"Widegren, Mr. Carl/Charles Peter",male,51.0,0,0,347064,7.75,,S
407,408,1,2,"Richards, Master. William Rowe",male,3.0,1,1,29106,18.75,,S
408,409,0,3,"Birkeland, Mr. Hans Martin Monsen",male,21.0,0,0,312992,7.775,,S
409,410,0,3,"Lefebre, Miss. Ida",female,,3,1,4133,25.4667,,S
410,411,0,3,"Sdycoff, Mr. Todor",male,,0,0,349222,7.8958,,S
411,412,0,3,"Hart, Mr. Henry",male,,0,0,394140,6.8583,,Q
412,413,1,1,"Minahan, Miss. Daisy E",female,33.0,1,0,19928,90.0,C78,Q
413,414,0,2,"Cunningham, Mr. Alfred Fleming",male,,0,0,239853,0.0,,S
414,415,1,3,"Sundman, Mr. Johan Julian",male,44.0,0,0,STON/O 2. 3101269,7.925,,S
415,416,0,3,"Meek, Mrs. Thomas (Annie Louise Rowley)",female,,0,0,343095,8.05,,S
416,417,1,2,"Drew, Mrs. James Vivian (Lulu Thorne Christian)",female,34.0,1,1,28220,32.5,,S
417,418,1,2,"Silven, Miss. Lyyli Karoliina",female,18.0,0,2,250652,13.0,,S
418,419,0,2,"Matthews, Mr. William John",male,30.0,0,0,28228,13.0,,S
419,420,0,3,"Van Impe, Miss. Catharina",female,10.0,0,2,345773,24.15,,S
420,421,0,3,"Gheorgheff, Mr. Stanio",male,,0,0,349254,7.8958,,C
421,422,0,3,"Charters, Mr. David",male,21.0,0,0,A/5. 13032,7.7333,,Q
422,423,0,3,"Zimmerman, Mr. Leo",male,29.0,0,0,315082,7.875,,S
423,424,0,3,"Danbom, Mrs. Ernst Gilbert (Anna Sigrid Maria Brogren)",female,28.0,1,1,347080,14.4,,S
424,425,0,3,"Rosblom, Mr. Viktor Richard",male,18.0,1,1,370129,20.2125,,S
425,426,0,3,"Wiseman, Mr. Phillippe",male,,0,0,A/4. 34244,7.25,,S
426,427,1,2,"Clarke, Mrs. Charles V (Ada Maria Winfield)",female,28.0,1,0,2003,26.0,,S
427,428,1,2,"Phillips, Miss. Kate Florence (""Mrs Kate Louise Phillips Marshall"")",female,19.0,0,0,250655,26.0,,S
428,429,0,3,"Flynn, Mr. James",male,,0,0,364851,7.75,,Q
429,430,1,3,"Pickard, Mr. Berk (Berk Trembisky)",male,32.0,0,0,SOTON/O.Q. 392078,8.05,E10,S
430,431,1,1,"Bjornstrom-Steffansson, Mr. Mauritz Hakan",male,28.0,0,0,110564,26.55,C52,S
431,432,1,3,"Thorneycroft, Mrs. Percival (Florence Kate White)",female,,1,0,376564,16.1,,S
432,433,1,2,"Louch, Mrs. Charles Alexander (Alice Adelaide Slow)",female,42.0,1,0,SC/AH 3085,26.0,,S
433,434,0,3,"Kallio, Mr. Nikolai Erland",male,17.0,0,0,STON/O 2. 3101274,7.125,,S
434,435,0,1,"Silvey, Mr. William Baird",male,50.0,1,0,13507,55.9,E44,S
435,436,1,1,"Carter, Miss. Lucile Polk",female,14.0,1,2,113760,120.0,B96 B98,S
436,437,0,3,"Ford, Miss. Doolina Margaret ""Daisy""",female,21.0,2,2,W./C. 6608,34.375,,S
437,438,1,2,"Richards, Mrs. Sidney (Emily Hocking)",female,24.0,2,3,29106,18.75,,S
438,439,0,1,"Fortune, Mr. Mark",male,64.0,1,4,19950,263.0,C23 C25 C27,S
439,440,0,2,"Kvillner, Mr. Johan Henrik Johannesson",male,31.0,0,0,C.A. 18723,10.5,,S
440,441,1,2,"Hart, Mrs. Benjamin (Esther Ada Bloomfield)",female,45.0,1,1,F.C.C. 13529,26.25,,S
441,442,0,3,"Hampe, Mr. Leon",male,20.0,0,0,345769,9.5,,S
442,443,0,3,"Petterson, Mr. Johan Emil",male,25.0,1,0,347076,7.775,,S
443,444,1,2,"Reynaldo, Ms. Encarnacion",female,28.0,0,0,230434,13.0,,S
444,445,1,3,"Johannesen-Bratthammer, Mr. Bernt",male,,0,0,65306,8.1125,,S
445,446,1,1,"Dodge, Master. Washington",male,4.0,0,2,33638,81.8583,A34,S
446,447,1,2,"Mellinger, Miss. Madeleine Violet",female,13.0,0,1,250644,19.5,,S
447,448,1,1,"Seward, Mr. Frederic Kimber",male,34.0,0,0,113794,26.55,,S
448,449,1,3,"Baclini, Miss. Marie Catherine",female,5.0,2,1,2666,19.2583,,C
449,450,1,1,"Peuchen, Major. Arthur Godfrey",male,52.0,0,0,113786,30.5,C104,S
450,451,0,2,"West, Mr. Edwy Arthur",male,36.0,1,2,C.A. 34651,27.75,,S
451,452,0,3,"Hagland, Mr. Ingvald Olai Olsen",male,,1,0,65303,19.9667,,S
452,453,0,1,"Foreman, Mr. Benjamin Laventall",male,30.0,0,0,113051,27.75,C111,C
453,454,1,1,"Goldenberg, Mr. Samuel L",male,49.0,1,0,17453,89.1042,C92,C
454,455,0,3,"Peduzzi, Mr. Joseph",male,,0,0,A/5 2817,8.05,,S
455,456,1,3,"Jalsevac, Mr. Ivan",male,29.0,0,0,349240,7.8958,,C
456,457,0,1,"Millet, Mr. Francis Davis",male,65.0,0,0,13509,26.55,E38,S
457,458,1,1,"Kenyon, Mrs. Frederick R (Marion)",female,,1,0,17464,51.8625,D21,S
458,459,1,2,"Toomey, Miss. Ellen",female,50.0,0,0,F.C.C. 13531,10.5,,S
459,460,0,3,"O'Connor, Mr. Maurice",male,,0,0,371060,7.75,,Q
460,461,1,1,"Anderson, Mr. Harry",male,48.0,0,0,19952,26.55,E12,S
461,462,0,3,"Morley, Mr. William",male,34.0,0,0,364506,8.05,,S
462,463,0,1,"Gee, Mr. Arthur H",male,47.0,0,0,111320,38.5,E63,S
463,464,0,2,"Milling, Mr. Jacob Christian",male,48.0,0,0,234360,13.0,,S
464,465,0,3,"Maisner, Mr. Simon",male,,0,0,A/S 2816,8.05,,S
465,466,0,3,"Goncalves, Mr. Manuel Estanslas",male,38.0,0,0,SOTON/O.Q. 3101306,7.05,,S
466,467,0,2,"Campbell, Mr. William",male,,0,0,239853,0.0,,S
467,468,0,1,"Smart, Mr. John Montgomery",male,56.0,0,0,113792,26.55,,S
468,469,0,3,"Scanlan, Mr. James",male,,0,0,36209,7.725,,Q
469,470,1,3,"Baclini, Miss. Helene Barbara",female,0.75,2,1,2666,19.2583,,C
470,471,0,3,"Keefe, Mr. Arthur",male,,0,0,323592,7.25,,S
471,472,0,3,"Cacic, Mr. Luka",male,38.0,0,0,315089,8.6625,,S
472,473,1,2,"West, Mrs. Edwy Arthur (Ada Mary Worth)",female,33.0,1,2,C.A. 34651,27.75,,S
473,474,1,2,"Jerwan, Mrs. Amin S (Marie Marthe Thuillard)",female,23.0,0,0,SC/AH Basle 541,13.7917,D,C
474,475,0,3,"Strandberg, Miss. Ida Sofia",female,22.0,0,0,7553,9.8375,,S
475,476,0,1,"Clifford, Mr. George Quincy",male,,0,0,110465,52.0,A14,S
476,477,0,2,"Renouf, Mr. Peter Henry",male,34.0,1,0,31027,21.0,,S
477,478,0,3,"Braund, Mr. Lewis Richard",male,29.0,1,0,3460,7.0458,,S
478,479,0,3,"Karlsson, Mr. Nils August",male,22.0,0,0,350060,7.5208,,S
479,480,1,3,"Hirvonen, Miss. Hildur E",female,2.0,0,1,3101298,12.2875,,S
480,481,0,3,"Goodwin, Master. Harold Victor",male,9.0,5,2,CA 2144,46.9,,S
481,482,0,2,"Frost, Mr. Anthony Wood ""Archie""",male,,0,0,239854,0.0,,S
482,483,0,3,"Rouse, Mr. Richard Henry",male,50.0,0,0,A/5 3594,8.05,,S
483,484,1,3,"Turkula, Mrs. (Hedwig)",female,63.0,0,0,4134,9.5875,,S
484,485,1,1,"Bishop, Mr. Dickinson H",male,25.0,1,0,11967,91.0792,B49,C
485,486,0,3,"Lefebre, Miss. Jeannie",female,,3,1,4133,25.4667,,S
486,487,1,1,"Hoyt, Mrs. Frederick Maxfield (Jane Anne Forby)",female,35.0,1,0,19943,90.0,C93,S
487,488,0,1,"Kent, Mr. Edward Austin",male,58.0,0,0,11771,29.7,B37,C
488,489,0,3,"Somerton, Mr. Francis William",male,30.0,0,0,A.5. 18509,8.05,,S
489,490,1,3,"Coutts, Master. Eden Leslie ""Neville""",male,9.0,1,1,C.A. 37671,15.9,,S
490,491,0,3,"Hagland, Mr. Konrad Mathias Reiersen",male,,1,0,65304,19.9667,,S
491,492,0,3,"Windelov, Mr. Einar",male,21.0,0,0,SOTON/OQ 3101317,7.25,,S
492,493,0,1,"Molson, Mr. Harry Markland",male,55.0,0,0,113787,30.5,C30,S
493,494,0,1,"Artagaveytia, Mr. Ramon",male,71.0,0,0,PC 17609,49.5042,,C
494,495,0,3,"Stanley, Mr. Edward Roland",male,21.0,0,0,A/4 45380,8.05,,S
495,496,0,3,"Yousseff, Mr. Gerious",male,,0,0,2627,14.4583,,C
496,497,1,1,"Eustis, Miss. Elizabeth Mussey",female,54.0,1,0,36947,78.2667,D20,C
497,498,0,3,"Shellard, Mr. Frederick William",male,,0,0,C.A. 6212,15.1,,S
498,499,0,1,"Allison, Mrs. Hudson J C (Bessie Waldo Daniels)",female,25.0,1,2,113781,151.55,C22 C26,S
499,500,0,3,"Svensson, Mr. Olof",male,24.0,0,0,350035,7.7958,,S
500,501,0,3,"Calic, Mr. Petar",male,17.0,0,0,315086,8.6625,,S
501,502,0,3,"Canavan, Miss. Mary",female,21.0,0,0,364846,7.75,,Q
502,503,0,3,"O'Sullivan, Miss. Bridget Mary",female,,0,0,330909,7.6292,,Q
503,504,0,3,"Laitinen, Miss. Kristina Sofia",female,37.0,0,0,4135,9.5875,,S
504,505,1,1,"Maioni, Miss. Roberta",female,16.0,0,0,110152,86.5,B79,S
505,506,0,1,"Penasco y Castellana, Mr. Victor de Satode",male,18.0,1,0,PC 17758,108.9,C65,C
506,507,1,2,"Quick, Mrs. Frederick Charles (Jane Richards)",female,33.0,0,2,26360,26.0,,S
507,508,1,1,"Bradley, Mr. George (""George Arthur Brayton"")",male,,0,0,111427,26.55,,S
508,509,0,3,"Olsen, Mr. Henry Margido",male,28.0,0,0,C 4001,22.525,,S
509,510,1,3,"Lang, Mr. Fang",male,26.0,0,0,1601,56.4958,,S
510,511,1,3,"Daly, Mr. Eugene Patrick",male,29.0,0,0,382651,7.75,,Q
511,512,0,3,"Webber, Mr. James",male,,0,0,SOTON/OQ 3101316,8.05,,S
512,513,1,1,"McGough, Mr. James Robert",male,36.0,0,0,PC 17473,26.2875,E25,S
513,514,1,1,"Rothschild, Mrs. Martin (Elizabeth L. Barrett)",female,54.0,1,0,PC 17603,59.4,,C
514,515,0,3,"Coleff, Mr. Satio",male,24.0,0,0,349209,7.4958,,S
515,516,0,1,"Walker, Mr. William Anderson",male,47.0,0,0,36967,34.0208,D46,S
516,517,1,2,"Lemore, Mrs. (Amelia Milley)",female,34.0,0,0,C.A. 34260,10.5,F33,S
517,518,0,3,"Ryan, Mr. Patrick",male,,0,0,371110,24.15,,Q
518,519,1,2,"Angle, Mrs. William A (Florence ""Mary"" Agnes Hughes)",female,36.0,1,0,226875,26.0,,S
519,520,0,3,"Pavlovic, Mr. Stefo",male,32.0,0,0,349242,7.8958,,S
520,521,1,1,"Perreault, Miss. Anne",female,30.0,0,0,12749,93.5,B73,S
521,522,0,3,"Vovk, Mr. Janko",male,22.0,0,0,349252,7.8958,,S
522,523,0,3,"Lahoud, Mr. Sarkis",male,,0,0,2624,7.225,,C
523,524,1,1,"Hippach, Mrs. Louis Albert (Ida Sophia Fischer)",female,44.0,0,1,111361,57.9792,B18,C
524,525,0,3,"Kassem, Mr. Fared",male,,0,0,2700,7.2292,,C
525,526,0,3,"Farrell, Mr. James",male,40.5,0,0,367232,7.75,,Q
526,527,1,2,"Ridsdale, Miss. Lucy",female,50.0,0,0,W./C. 14258,10.5,,S
527,528,0,1,"Farthing, Mr. John",male,,0,0,PC 17483,221.7792,C95,S
528,529,0,3,"Salonen, Mr. Johan Werner",male,39.0,0,0,3101296,7.925,,S
529,530,0,2,"Hocking, Mr. Richard George",male,23.0,2,1,29104,11.5,,S
530,531,1,2,"Quick, Miss. Phyllis May",female,2.0,1,1,26360,26.0,,S
531,532,0,3,"Toufik, Mr. Nakli",male,,0,0,2641,7.2292,,C
532,533,0,3,"Elias, Mr. Joseph Jr",male,17.0,1,1,2690,7.2292,,C
533,534,1,3,"Peter, Mrs. Catherine (Catherine Rizk)",female,,0,2,2668,22.3583,,C
534,535,0,3,"Cacic, Miss. Marija",female,30.0,0,0,315084,8.6625,,S
535,536,1,2,"Hart, Miss. Eva Miriam",female,7.0,0,2,F.C.C. 13529,26.25,,S
536,537,0,1,"Butt, Major. Archibald Willingham",male,45.0,0,0,113050,26.55,B38,S
537,538,1,1,"LeRoy, Miss. Bertha",female,30.0,0,0,PC 17761,106.425,,C
538,539,0,3,"Risien, Mr. Samuel Beard",male,,0,0,364498,14.5,,S
539,540,1,1,"Frolicher, Miss. Hedwig Margaritha",female,22.0,0,2,13568,49.5,B39,C
540,541,1,1,"Crosby, Miss. Harriet R",female,36.0,0,2,WE/P 5735,71.0,B22,S
541,542,0,3,"Andersson, Miss. Ingeborg Constanzia",female,9.0,4,2,347082,31.275,,S
542,543,0,3,"Andersson, Miss. Sigrid Elisabeth",female,11.0,4,2,347082,31.275,,S
543,544,1,2,"Beane, Mr. Edward",male,32.0,1,0,2908,26.0,,S
544,545,0,1,"Douglas, Mr. Walter Donald",male,50.0,1,0,PC 17761,106.425,C86,C
545,546,0,1,"Nicholson, Mr. Arthur Ernest",male,64.0,0,0,693,26.0,,S
546,547,1,2,"Beane, Mrs. Edward (Ethel Clarke)",female,19.0,1,0,2908,26.0,,S
547,548,1,2,"Padro y Manent, Mr. Julian",male,,0,0,SC/PARIS 2146,13.8625,,C
548,549,0,3,"Goldsmith, Mr. Frank John",male,33.0,1,1,363291,20.525,,S
549,550,1,2,"Davies, Master. John Morgan Jr",male,8.0,1,1,C.A. 33112,36.75,,S
550,551,1,1,"Thayer, Mr. John Borland Jr",male,17.0,0,2,17421,110.8833,C70,C
551,552,0,2,"Sharp, Mr. Percival James R",male,27.0,0,0,244358,26.0,,S
552,553,0,3,"O'Brien, Mr. Timothy",male,,0,0,330979,7.8292,,Q
553,554,1,3,"Leeni, Mr. Fahim (""Philip Zenni"")",male,22.0,0,0,2620,7.225,,C
554,555,1,3,"Ohman, Miss. Velin",female,22.0,0,0,347085,7.775,,S
555,556,0,1,"Wright, Mr. George",male,62.0,0,0,113807,26.55,,S
556,557,1,1,"Duff Gordon, Lady. (Lucille Christiana Sutherland) (""Mrs Morgan"")",female,48.0,1,0,11755,39.6,A16,C
557,558,0,1,"Robbins, Mr. Victor",male,,0,0,PC 17757,227.525,,C
558,559,1,1,"Taussig, Mrs. Emil (Tillie Mandelbaum)",female,39.0,1,1,110413,79.65,E67,S
559,560,1,3,"de Messemaeker, Mrs. Guillaume Joseph (Emma)",female,36.0,1,0,345572,17.4,,S
560,561,0,3,"Morrow, Mr. Thomas Rowan",male,,0,0,372622,7.75,,Q
561,562,0,3,"Sivic, Mr. Husein",male,40.0,0,0,349251,7.8958,,S
562,563,0,2,"Norman, Mr. Robert Douglas",male,28.0,0,0,218629,13.5,,S
563,564,0,3,"Simmons, Mr. John",male,,0,0,SOTON/OQ 392082,8.05,,S
564,565,0,3,"Meanwell, Miss. (Marion Ogden)",female,,0,0,SOTON/O.Q. 392087,8.05,,S
565,566,0,3,"Davies, Mr. Alfred J",male,24.0,2,0,A/4 48871,24.15,,S
566,567,0,3,"Stoytcheff, Mr. Ilia",male,19.0,0,0,349205,7.8958,,S
567,568,0,3,"Palsson, Mrs. Nils (Alma Cornelia Berglund)",female,29.0,0,4,349909,21.075,,S
568,569,0,3,"Doharr, Mr. Tannous",male,,0,0,2686,7.2292,,C
569,570,1,3,"Jonsson, Mr. Carl",male,32.0,0,0,350417,7.8542,,S
570,571,1,2,"Harris, Mr. George",male,62.0,0,0,S.W./PP 752,10.5,,S
571,572,1,1,"Appleton, Mrs. Edward Dale (Charlotte Lamson)",female,53.0,2,0,11769,51.4792,C101,S
572,573,1,1,"Flynn, Mr. John Irwin (""Irving"")",male,36.0,0,0,PC 17474,26.3875,E25,S
573,574,1,3,"Kelly, Miss. Mary",female,,0,0,14312,7.75,,Q
574,575,0,3,"Rush, Mr. Alfred George John",male,16.0,0,0,A/4. 20589,8.05,,S
575,576,0,3,"Patchett, Mr. George",male,19.0,0,0,358585,14.5,,S
576,577,1,2,"Garside, Miss. Ethel",female,34.0,0,0,243880,13.0,,S
577,578,1,1,"Silvey, Mrs. William Baird (Alice Munger)",female,39.0,1,0,13507,55.9,E44,S
578,579,0,3,"Caram, Mrs. Joseph (Maria Elias)",female,,1,0,2689,14.4583,,C
579,580,1,3,"Jussila, Mr. Eiriik",male,32.0,0,0,STON/O 2. 3101286,7.925,,S
580,581,1,2,"Christy, Miss. Julie Rachel",female,25.0,1,1,237789,30.0,,S
581,582,1,1,"Thayer, Mrs. John Borland (Marian Longstreth Morris)",female,39.0,1,1,17421,110.8833,C68,C
582,583,0,2,"Downton, Mr. William James",male,54.0,0,0,28403,26.0,,S
583,584,0,1,"Ross, Mr. John Hugo",male,36.0,0,0,13049,40.125,A10,C
584,585,0,3,"Paulner, Mr. Uscher",male,,0,0,3411,8.7125,,C
585,586,1,1,"Taussig, Miss. Ruth",female,18.0,0,2,110413,79.65,E68,S
586,587,0,2,"Jarvis, Mr. John Denzil",male,47.0,0,0,237565,15.0,,S
587,588,1,1,"Frolicher-Stehli, Mr. Maxmillian",male,60.0,1,1,13567,79.2,B41,C
588,589,0,3,"Gilinski, Mr. Eliezer",male,22.0,0,0,14973,8.05,,S
589,590,0,3,"Murdlin, Mr. Joseph",male,,0,0,A./5. 3235,8.05,,S
590,591,0,3,"Rintamaki, Mr. Matti",male,35.0,0,0,STON/O 2. 3101273,7.125,,S
591,592,1,1,"Stephenson, Mrs. Walter Bertram (Martha Eustis)",female,52.0,1,0,36947,78.2667,D20,C
592,593,0,3,"Elsbury, Mr. William James",male,47.0,0,0,A/5 3902,7.25,,S
593,594,0,3,"Bourke, Miss. Mary",female,,0,2,364848,7.75,,Q
594,595,0,2,"Chapman, Mr. John Henry",male,37.0,1,0,SC/AH 29037,26.0,,S
595,596,0,3,"Van Impe, Mr. Jean Baptiste",male,36.0,1,1,345773,24.15,,S
596,597,1,2,"Leitch, Miss. Jessie Wills",female,,0,0,248727,33.0,,S
597,598,0,3,"Johnson, Mr. Alfred",male,49.0,0,0,LINE,0.0,,S
598,599,0,3,"Boulos, Mr. Hanna",male,,0,0,2664,7.225,,C
599,600,1,1,"Duff Gordon, Sir. Cosmo Edmund (""Mr Morgan"")",male,49.0,1,0,PC 17485,56.9292,A20,C
600,601,1,2,"Jacobsohn, Mrs. Sidney Samuel (Amy Frances Christy)",female,24.0,2,1,243847,27.0,,S
601,602,0,3,"Slabenoff, Mr. Petco",male,,0,0,349214,7.8958,,S
602,603,0,1,"Harrington, Mr. Charles H",male,,0,0,113796,42.4,,S
603,604,0,3,"Torber, Mr. Ernst William",male,44.0,0,0,364511,8.05,,S
604,605,1,1,"Homer, Mr. Harry (""Mr E Haven"")",male,35.0,0,0,111426,26.55,,C
605,606,0,3,"Lindell, Mr. Edvard Bengtsson",male,36.0,1,0,349910,15.55,,S
606,607,0,3,"Karaic, Mr. Milan",male,30.0,0,0,349246,7.8958,,S
607,608,1,1,"Daniel, Mr. Robert Williams",male,27.0,0,0,113804,30.5,,S
608,609,1,2,"Laroche, Mrs. Joseph (Juliette Marie Louise Lafargue)",female,22.0,1,2,SC/Paris 2123,41.5792,,C
609,610,1,1,"Shutes, Miss. Elizabeth W",female,40.0,0,0,PC 17582,153.4625,C125,S
610,611,0,3,"Andersson, Mrs. Anders Johan (Alfrida Konstantia Brogren)",female,39.0,1,5,347082,31.275,,S
611,612,0,3,"Jardin, Mr. Jose Neto",male,,0,0,SOTON/O.Q. 3101305,7.05,,S
612,613,1,3,"Murphy, Miss. Margaret Jane",female,,1,0,367230,15.5,,Q
613,614,0,3,"Horgan, Mr. John",male,,0,0,370377,7.75,,Q
614,615,0,3,"Brocklebank, Mr. William Alfred",male,35.0,0,0,364512,8.05,,S
615,616,1,2,"Herman, Miss. Alice",female,24.0,1,2,220845,65.0,,S
616,617,0,3,"Danbom, Mr. Ernst Gilbert",male,34.0,1,1,347080,14.4,,S
617,618,0,3,"Lobb, Mrs. William Arthur (Cordelia K Stanlick)",female,26.0,1,0,A/5. 3336,16.1,,S
618,619,1,2,"Becker, Miss. Marion Louise",female,4.0,2,1,230136,39.0,F4,S
619,620,0,2,"Gavey, Mr. Lawrence",male,26.0,0,0,31028,10.5,,S
620,621,0,3,"Yasbeck, Mr. Antoni",male,27.0,1,0,2659,14.4542,,C
621,622,1,1,"Kimball, Mr. Edwin Nelson Jr",male,42.0,1,0,11753,52.5542,D19,S
622,623,1,3,"Nakid, Mr. Sahid",male,20.0,1,1,2653,15.7417,,C
623,624,0,3,"Hansen, Mr. Henry Damsgaard",male,21.0,0,0,350029,7.8542,,S
624,625,0,3,"Bowen, Mr. David John ""Dai""",male,21.0,0,0,54636,16.1,,S
625,626,0,1,"Sutton, Mr. Frederick",male,61.0,0,0,36963,32.3208,D50,S
626,627,0,2,"Kirkland, Rev. Charles Leonard",male,57.0,0,0,219533,12.35,,Q
627,628,1,1,"Longley, Miss. Gretchen Fiske",female,21.0,0,0,13502,77.9583,D9,S
628,629,0,3,"Bostandyeff, Mr. Guentcho",male,26.0,0,0,349224,7.8958,,S
629,630,0,3,"O'Connell, Mr. Patrick D",male,,0,0,334912,7.7333,,Q
630,631,1,1,"Barkworth, Mr. Algernon Henry Wilson",male,80.0,0,0,27042,30.0,A23,S
631,632,0,3,"Lundahl, Mr. Johan Svensson",male,51.0,0,0,347743,7.0542,,S
632,633,1,1,"Stahelin-Maeglin, Dr. Max",male,32.0,0,0,13214,30.5,B50,C
633,634,0,1,"Parr, Mr. William Henry Marsh",male,,0,0,112052,0.0,,S
634,635,0,3,"Skoog, Miss. Mabel",female,9.0,3,2,347088,27.9,,S
635,636,1,2,"Davis, Miss. Mary",female,28.0,0,0,237668,13.0,,S
636,637,0,3,"Leinonen, Mr. Antti Gustaf",male,32.0,0,0,STON/O 2. 3101292,7.925,,S
637,638,0,2,"Collyer, Mr. Harvey",male,31.0,1,1,C.A. 31921,26.25,,S
638,639,0,3,"Panula, Mrs. Juha (Maria Emilia Ojala)",female,41.0,0,5,3101295,39.6875,,S
639,640,0,3,"Thorneycroft, Mr. Percival",male,,1,0,376564,16.1,,S
640,641,0,3,"Jensen, Mr. Hans Peder",male,20.0,0,0,350050,7.8542,,S
641,642,1,1,"Sagesser, Mlle. Emma",female,24.0,0,0,PC 17477,69.3,B35,C
642,643,0,3,"Skoog, Miss. Margit Elizabeth",female,2.0,3,2,347088,27.9,,S
643,644,1,3,"Foo, Mr. Choong",male,,0,0,1601,56.4958,,S
644,645,1,3,"Baclini, Miss. Eugenie",female,0.75,2,1,2666,19.2583,,C
645,646,1,1,"Harper, Mr. Henry Sleeper",male,48.0,1,0,PC 17572,76.7292,D33,C
646,647,0,3,"Cor, Mr. Liudevit",male,19.0,0,0,349231,7.8958,,S
647,648,1,1,"Simonius-Blumer, Col. Oberst Alfons",male,56.0,0,0,13213,35.5,A26,C
648,649,0,3,"Willey, Mr. Edward",male,,0,0,S.O./P.P. 751,7.55,,S
649,650,1,3,"Stanley, Miss. Amy Zillah Elsie",female,23.0,0,0,CA. 2314,7.55,,S
650,651,0,3,"Mitkoff, Mr. Mito",male,,0,0,349221,7.8958,,S
651,652,1,2,"Doling, Miss. Elsie",female,18.0,0,1,231919,23.0,,S
652,653,0,3,"Kalvik, Mr. Johannes Halvorsen",male,21.0,0,0,8475,8.4333,,S
653,654,1,3,"O'Leary, Miss. Hanora ""Norah""",female,,0,0,330919,7.8292,,Q
654,655,0,3,"Hegarty, Miss. Hanora ""Nora""",female,18.0,0,0,365226,6.75,,Q
655,656,0,2,"Hickman, Mr. Leonard Mark",male,24.0,2,0,S.O.C. 14879,73.5,,S
656,657,0,3,"Radeff, Mr. Alexander",male,,0,0,349223,7.8958,,S
657,658,0,3,"Bourke, Mrs. John (Catherine)",female,32.0,1,1,364849,15.5,,Q
658,659,0,2,"Eitemiller, Mr. George Floyd",male,23.0,0,0,29751,13.0,,S
659,660,0,1,"Newell, Mr. Arthur Webster",male,58.0,0,2,35273,113.275,D48,C
660,661,1,1,"Frauenthal, Dr. Henry William",male,50.0,2,0,PC 17611,133.65,,S
661,662,0,3,"Badt, Mr. Mohamed",male,40.0,0,0,2623,7.225,,C
662,663,0,1,"Colley, Mr. Edward Pomeroy",male,47.0,0,0,5727,25.5875,E58,S
663,664,0,3,"Coleff, Mr. Peju",male,36.0,0,0,349210,7.4958,,S
664,665,1,3,"Lindqvist, Mr. Eino William",male,20.0,1,0,STON/O 2. 3101285,7.925,,S
665,666,0,2,"Hickman, Mr. Lewis",male,32.0,2,0,S.O.C. 14879,73.5,,S
666,667,0,2,"Butler, Mr. Reginald Fenton",male,25.0,0,0,234686,13.0,,S
667,668,0,3,"Rommetvedt, Mr. Knud Paust",male,,0,0,312993,7.775,,S
668,669,0,3,"Cook, Mr. Jacob",male,43.0,0,0,A/5 3536,8.05,,S
669,670,1,1,"Taylor, Mrs. Elmer Zebley (Juliet Cummins Wright)",female,,1,0,19996,52.0,C126,S
670,671,1,2,"Brown, Mrs. Thomas William Solomon (Elizabeth Catherine Ford)",female,40.0,1,1,29750,39.0,,S
671,672,0,1,"Davidson, Mr. Thornton",male,31.0,1,0,F.C. 12750,52.0,B71,S
672,673,0,2,"Mitchell, Mr. Henry Michael",male,70.0,0,0,C.A. 24580,10.5,,S
673,674,1,2,"Wilhelms, Mr. Charles",male,31.0,0,0,244270,13.0,,S
674,675,0,2,"Watson, Mr. Ennis Hastings",male,,0,0,239856,0.0,,S
675,676,0,3,"Edvardsson, Mr. Gustaf Hjalmar",male,18.0,0,0,349912,7.775,,S
676,677,0,3,"Sawyer, Mr. Frederick Charles",male,24.5,0,0,342826,8.05,,S
677,678,1,3,"Turja, Miss. Anna Sofia",female,18.0,0,0,4138,9.8417,,S
678,679,0,3,"Goodwin, Mrs. Frederick (Augusta Tyler)",female,43.0,1,6,CA 2144,46.9,,S
679,680,1,1,"Cardeza, Mr. Thomas Drake Martinez",male,36.0,0,1,PC 17755,512.3292,B51 B53 B55,C
680,681,0,3,"Peters, Miss. Katie",female,,0,0,330935,8.1375,,Q
681,682,1,1,"Hassab, Mr. Hammad",male,27.0,0,0,PC 17572,76.7292,D49,C
682,683,0,3,"Olsvigen, Mr. Thor Anderson",male,20.0,0,0,6563,9.225,,S
683,684,0,3,"Goodwin, Mr. Charles Edward",male,14.0,5,2,CA 2144,46.9,,S
684,685,0,2,"Brown, Mr. Thomas William Solomon",male,60.0,1,1,29750,39.0,,S
685,686,0,2,"Laroche, Mr. Joseph Philippe Lemercier",male,25.0,1,2,SC/Paris 2123,41.5792,,C
686,687,0,3,"Panula, Mr. Jaako Arnold",male,14.0,4,1,3101295,39.6875,,S
687,688,0,3,"Dakic, Mr. Branko",male,19.0,0,0,349228,10.1708,,S
688,689,0,3,"Fischer, Mr. Eberhard Thelander",male,18.0,0,0,350036,7.7958,,S
689,690,1,1,"Madill, Miss. Georgette Alexandra",female,15.0,0,1,24160,211.3375,B5,S
690,691,1,1,"Dick, Mr. Albert Adrian",male,31.0,1,0,17474,57.0,B20,S
691,692,1,3,"Karun, Miss. Manca",female,4.0,0,1,349256,13.4167,,C
692,693,1,3,"Lam, Mr. Ali",male,,0,0,1601,56.4958,,S
693,694,0,3,"Saad, Mr. Khalil",male,25.0,0,0,2672,7.225,,C
694,695,0,1,"Weir, Col. John",male,60.0,0,0,113800,26.55,,S
695,696,0,2,"Chapman, Mr. Charles Henry",male,52.0,0,0,248731,13.5,,S
696,697,0,3,"Kelly, Mr. James",male,44.0,0,0,363592,8.05,,S
697,698,1,3,"Mullens, Miss. Katherine ""Katie""",female,,0,0,35852,7.7333,,Q
698,699,0,1,"Thayer, Mr. John Borland",male,49.0,1,1,17421,110.8833,C68,C
699,700,0,3,"Humblen, Mr. Adolf Mathias Nicolai Olsen",male,42.0,0,0,348121,7.65,F G63,S
700,701,1,1,"Astor, Mrs. John Jacob (Madeleine Talmadge Force)",female,18.0,1,0,PC 17757,227.525,C62 C64,C
701,702,1,1,"Silverthorne, Mr. Spencer Victor",male,35.0,0,0,PC 17475,26.2875,E24,S
702,703,0,3,"Barbara, Miss. Saiide",female,18.0,0,1,2691,14.4542,,C
703,704,0,3,"Gallagher, Mr. Martin",male,25.0,0,0,36864,7.7417,,Q
704,705,0,3,"Hansen, Mr. Henrik Juul",male,26.0,1,0,350025,7.8542,,S
705,706,0,2,"Morley, Mr. Henry Samuel (""Mr Henry Marshall"")",male,39.0,0,0,250655,26.0,,S
706,707,1,2,"Kelly, Mrs. Florence ""Fannie""",female,45.0,0,0,223596,13.5,,S
707,708,1,1,"Calderhead, Mr. Edward Pennington",male,42.0,0,0,PC 17476,26.2875,E24,S
708,709,1,1,"Cleaver, Miss. Alice",female,22.0,0,0,113781,151.55,,S
709,710,1,3,"Moubarek, Master. Halim Gonios (""William George"")",male,,1,1,2661,15.2458,,C
710,711,1,1,"Mayne, Mlle. Berthe Antonine (""Mrs de Villiers"")",female,24.0,0,0,PC 17482,49.5042,C90,C
711,712,0,1,"Klaber, Mr. Herman",male,,0,0,113028,26.55,C124,S
712,713,1,1,"Taylor, Mr. Elmer Zebley",male,48.0,1,0,19996,52.0,C126,S
713,714,0,3,"Larsson, Mr. August Viktor",male,29.0,0,0,7545,9.4833,,S
714,715,0,2,"Greenberg, Mr. Samuel",male,52.0,0,0,250647,13.0,,S
715,716,0,3,"Soholt, Mr. Peter Andreas Lauritz Andersen",male,19.0,0,0,348124,7.65,F G73,S
716,717,1,1,"Endres, Miss. Caroline Louise",female,38.0,0,0,PC 17757,227.525,C45,C
717,718,1,2,"Troutt, Miss. Edwina Celia ""Winnie""",female,27.0,0,0,34218,10.5,E101,S
718,719,0,3,"McEvoy, Mr. Michael",male,,0,0,36568,15.5,,Q
719,720,0,3,"Johnson, Mr. Malkolm Joackim",male,33.0,0,0,347062,7.775,,S
720,721,1,2,"Harper, Miss. Annie Jessie ""Nina""",female,6.0,0,1,248727,33.0,,S
721,722,0,3,"Jensen, Mr. Svend Lauritz",male,17.0,1,0,350048,7.0542,,S
722,723,0,2,"Gillespie, Mr. William Henry",male,34.0,0,0,12233,13.0,,S
723,724,0,2,"Hodges, Mr. Henry Price",male,50.0,0,0,250643,13.0,,S
724,725,1,1,"Chambers, Mr. Norman Campbell",male,27.0,1,0,113806,53.1,E8,S
725,726,0,3,"Oreskovic, Mr. Luka",male,20.0,0,0,315094,8.6625,,S
726,727,1,2,"Renouf, Mrs. Peter Henry (Lillian Jefferys)",female,30.0,3,0,31027,21.0,,S
727,728,1,3,"Mannion, Miss. Margareth",female,,0,0,36866,7.7375,,Q
728,729,0,2,"Bryhl, Mr. Kurt Arnold Gottfrid",male,25.0,1,0,236853,26.0,,S
729,730,0,3,"Ilmakangas, Miss. Pieta Sofia",female,25.0,1,0,STON/O2. 3101271,7.925,,S
730,731,1,1,"Allen, Miss. Elisabeth Walton",female,29.0,0,0,24160,211.3375,B5,S
731,732,0,3,"Hassan, Mr. Houssein G N",male,11.0,0,0,2699,18.7875,,C
732,733,0,2,"Knight, Mr. Robert J",male,,0,0,239855,0.0,,S
733,734,0,2,"Berriman, Mr. William John",male,23.0,0,0,28425,13.0,,S
734,735,0,2,"Troupiansky, Mr. Moses Aaron",male,23.0,0,0,233639,13.0,,S
735,736,0,3,"Williams, Mr. Leslie",male,28.5,0,0,54636,16.1,,S
736,737,0,3,"Ford, Mrs. Edward (Margaret Ann Watson)",female,48.0,1,3,W./C. 6608,34.375,,S
737,738,1,1,"Lesurer, Mr. Gustave J",male,35.0,0,0,PC 17755,512.3292,B101,C
738,739,0,3,"Ivanoff, Mr. Kanio",male,,0,0,349201,7.8958,,S
739,740,0,3,"Nankoff, Mr. Minko",male,,0,0,349218,7.8958,,S
740,741,1,1,"Hawksford, Mr. Walter James",male,,0,0,16988,30.0,D45,S
741,742,0,1,"Cavendish, Mr. Tyrell William",male,36.0,1,0,19877,78.85,C46,S
742,743,1,1,"Ryerson, Miss. Susan Parker ""Suzette""",female,21.0,2,2,PC 17608,262.375,B57 B59 B63 B66,C
743,744,0,3,"McNamee, Mr. Neal",male,24.0,1,0,376566,16.1,,S
744,745,1,3,"Stranden, Mr. Juho",male,31.0,0,0,STON/O 2. 3101288,7.925,,S
745,746,0,1,"Crosby, Capt. Edward Gifford",male,70.0,1,1,WE/P 5735,71.0,B22,S
746,747,0,3,"Abbott, Mr. Rossmore Edward",male,16.0,1,1,C.A. 2673,20.25,,S
747,748,1,2,"Sinkkonen, Miss. Anna",female,30.0,0,0,250648,13.0,,S
748,749,0,1,"Marvin, Mr. Daniel Warner",male,19.0,1,0,113773,53.1,D30,S
749,750,0,3,"Connaghton, Mr. Michael",male,31.0,0,0,335097,7.75,,Q
750,751,1,2,"Wells, Miss. Joan",female,4.0,1,1,29103,23.0,,S
751,752,1,3,"Moor, Master. Meier",male,6.0,0,1,392096,12.475,E121,S
752,753,0,3,"Vande Velde, Mr. Johannes Joseph",male,33.0,0,0,345780,9.5,,S
753,754,0,3,"Jonkoff, Mr. Lalio",male,23.0,0,0,349204,7.8958,,S
754,755,1,2,"Herman, Mrs. Samuel (Jane Laver)",female,48.0,1,2,220845,65.0,,S
755,756,1,2,"Hamalainen, Master. Viljo",male,0.67,1,1,250649,14.5,,S
756,757,0,3,"Carlsson, Mr. August Sigfrid",male,28.0,0,0,350042,7.7958,,S
757,758,0,2,"Bailey, Mr. Percy Andrew",male,18.0,0,0,29108,11.5,,S
758,759,0,3,"Theobald, Mr. Thomas Leonard",male,34.0,0,0,363294,8.05,,S
759,760,1,1,"Rothes, the Countess. of (Lucy Noel Martha Dyer-Edwards)",female,33.0,0,0,110152,86.5,B77,S
760,761,0,3,"Garfirth, Mr. John",male,,0,0,358585,14.5,,S
761,762,0,3,"Nirva, Mr. Iisakki Antino Aijo",male,41.0,0,0,SOTON/O2 3101272,7.125,,S
762,763,1,3,"Barah, Mr. Hanna Assi",male,20.0,0,0,2663,7.2292,,C
763,764,1,1,"Carter, Mrs. William Ernest (Lucile Polk)",female,36.0,1,2,113760,120.0,B96 B98,S
764,765,0,3,"Eklund, Mr. Hans Linus",male,16.0,0,0,347074,7.775,,S
765,766,1,1,"Hogeboom, Mrs. John C (Anna Andrews)",female,51.0,1,0,13502,77.9583,D11,S
766,767,0,1,"Brewe, Dr. Arthur Jackson",male,,0,0,112379,39.6,,C
767,768,0,3,"Mangan, Miss. Mary",female,30.5,0,0,364850,7.75,,Q
768,769,0,3,"Moran, Mr. Daniel J",male,,1,0,371110,24.15,,Q
769,770,0,3,"Gronnestad, Mr. Daniel Danielsen",male,32.0,0,0,8471,8.3625,,S
770,771,0,3,"Lievens, Mr. Rene Aime",male,24.0,0,0,345781,9.5,,S
771,772,0,3,"Jensen, Mr. Niels Peder",male,48.0,0,0,350047,7.8542,,S
772,773,0,2,"Mack, Mrs. (Mary)",female,57.0,0,0,S.O./P.P. 3,10.5,E77,S
773,774,0,3,"Elias, Mr. Dibo",male,,0,0,2674,7.225,,C
774,775,1,2,"Hocking, Mrs. Elizabeth (Eliza Needs)",female,54.0,1,3,29105,23.0,,S
775,776,0,3,"Myhrman, Mr. Pehr Fabian Oliver Malkolm",male,18.0,0,0,347078,7.75,,S
776,777,0,3,"Tobin, Mr. Roger",male,,0,0,383121,7.75,F38,Q
777,778,1,3,"Emanuel, Miss. Virginia Ethel",female,5.0,0,0,364516,12.475,,S
778,779,0,3,"Kilgannon, Mr. Thomas J",male,,0,0,36865,7.7375,,Q
779,780,1,1,"Robert, Mrs. Edward Scott (Elisabeth Walton McMillan)",female,43.0,0,1,24160,211.3375,B3,S
780,781,1,3,"Ayoub, Miss. Banoura",female,13.0,0,0,2687,7.2292,,C
781,782,1,1,"Dick, Mrs. Albert Adrian (Vera Gillespie)",female,17.0,1,0,17474,57.0,B20,S
782,783,0,1,"Long, Mr. Milton Clyde",male,29.0,0,0,113501,30.0,D6,S
783,784,0,3,"Johnston, Mr. Andrew G",male,,1,2,W./C. 6607,23.45,,S
784,785,0,3,"Ali, Mr. William",male,25.0,0,0,SOTON/O.Q. 3101312,7.05,,S
785,786,0,3,"Harmer, Mr. Abraham (David Lishin)",male,25.0,0,0,374887,7.25,,S
786,787,1,3,"Sjoblom, Miss. Anna Sofia",female,18.0,0,0,3101265,7.4958,,S
787,788,0,3,"Rice, Master. George Hugh",male,8.0,4,1,382652,29.125,,Q
788,789,1,3,"Dean, Master. Bertram Vere",male,1.0,1,2,C.A. 2315,20.575,,S
789,790,0,1,"Guggenheim, Mr. Benjamin",male,46.0,0,0,PC 17593,79.2,B82 B84,C
790,791,0,3,"Keane, Mr. Andrew ""Andy""",male,,0,0,12460,7.75,,Q
791,792,0,2,"Gaskell, Mr. Alfred",male,16.0,0,0,239865,26.0,,S
792,793,0,3,"Sage, Miss. Stella Anna",female,,8,2,CA. 2343,69.55,,S
793,794,0,1,"Hoyt, Mr. William Fisher",male,,0,0,PC 17600,30.6958,,C
794,795,0,3,"Dantcheff, Mr. Ristiu",male,25.0,0,0,349203,7.8958,,S
795,796,0,2,"Otter, Mr. Richard",male,39.0,0,0,28213,13.0,,S
796,797,1,1,"Leader, Dr. Alice (Farnham)",female,49.0,0,0,17465,25.9292,D17,S
797,798,1,3,"Osman, Mrs. Mara",female,31.0,0,0,349244,8.6833,,S
798,799,0,3,"Ibrahim Shawah, Mr. Yousseff",male,30.0,0,0,2685,7.2292,,C
799,800,0,3,"Van Impe, Mrs. Jean Baptiste (Rosalie Paula Govaert)",female,30.0,1,1,345773,24.15,,S
800,801,0,2,"Ponesell, Mr. Martin",male,34.0,0,0,250647,13.0,,S
801,802,1,2,"Collyer, Mrs. Harvey (Charlotte Annie Tate)",female,31.0,1,1,C.A. 31921,26.25,,S
802,803,1,1,"Carter, Master. William Thornton II",male,11.0,1,2,113760,120.0,B96 B98,S
803,804,1,3,"Thomas, Master. Assad Alexander",male,0.42,0,1,2625,8.5167,,C
804,805,1,3,"Hedman, Mr. Oskar Arvid",male,27.0,0,0,347089,6.975,,S
805,806,0,3,"Johansson, Mr. Karl Johan",male,31.0,0,0,347063,7.775,,S
806,807,0,1,"Andrews, Mr. Thomas Jr",male,39.0,0,0,112050,0.0,A36,S
807,808,0,3,"Pettersson, Miss. Ellen Natalia",female,18.0,0,0,347087,7.775,,S
808,809,0,2,"Meyer, Mr. August",male,39.0,0,0,248723,13.0,,S
809,810,1,1,"Chambers, Mrs. Norman Campbell (Bertha Griggs)",female,33.0,1,0,113806,53.1,E8,S
810,811,0,3,"Alexander, Mr. William",male,26.0,0,0,3474,7.8875,,S
811,812,0,3,"Lester, Mr. James",male,39.0,0,0,A/4 48871,24.15,,S
812,813,0,2,"Slemen, Mr. Richard James",male,35.0,0,0,28206,10.5,,S
813,814,0,3,"Andersson, Miss. Ebba Iris Alfrida",female,6.0,4,2,347082,31.275,,S
814,815,0,3,"Tomlin, Mr. Ernest Portage",male,30.5,0,0,364499,8.05,,S
815,816,0,1,"Fry, Mr. Richard",male,,0,0,112058,0.0,B102,S
816,817,0,3,"Heininen, Miss. Wendla Maria",female,23.0,0,0,STON/O2. 3101290,7.925,,S
817,818,0,2,"Mallet, Mr. Albert",male,31.0,1,1,S.C./PARIS 2079,37.0042,,C
818,819,0,3,"Holm, Mr. John Fredrik Alexander",male,43.0,0,0,C 7075,6.45,,S
819,820,0,3,"Skoog, Master. Karl Thorsten",male,10.0,3,2,347088,27.9,,S
820,821,1,1,"Hays, Mrs. Charles Melville (Clara Jennings Gregg)",female,52.0,1,1,12749,93.5,B69,S
821,822,1,3,"Lulic, Mr. Nikola",male,27.0,0,0,315098,8.6625,,S
822,823,0,1,"Reuchlin, Jonkheer. John George",male,38.0,0,0,19972,0.0,,S
823,824,1,3,"Moor, Mrs. (Beila)",female,27.0,0,1,392096,12.475,E121,S
824,825,0,3,"Panula, Master. Urho Abraham",male,2.0,4,1,3101295,39.6875,,S
825,826,0,3,"Flynn, Mr. John",male,,0,0,368323,6.95,,Q
826,827,0,3,"Lam, Mr. Len",male,,0,0,1601,56.4958,,S
827,828,1,2,"Mallet, Master. Andre",male,1.0,0,2,S.C./PARIS 2079,37.0042,,C
828,829,1,3,"McCormack, Mr. Thomas Joseph",male,,0,0,367228,7.75,,Q
829,830,1,1,"Stone, Mrs. George Nelson (Martha Evelyn)",female,62.0,0,0,113572,80.0,B28,
830,831,1,3,"Yasbeck, Mrs. Antoni (Selini Alexander)",female,15.0,1,0,2659,14.4542,,C
831,832,1,2,"Richards, Master. George Sibley",male,0.83,1,1,29106,18.75,,S
832,833,0,3,"Saad, Mr. Amin",male,,0,0,2671,7.2292,,C
833,834,0,3,"Augustsson, Mr. Albert",male,23.0,0,0,347468,7.8542,,S
834,835,0,3,"Allum, Mr. Owen George",male,18.0,0,0,2223,8.3,,S
835,836,1,1,"Compton, Miss. Sara Rebecca",female,39.0,1,1,PC 17756,83.1583,E49,C
836,837,0,3,"Pasic, Mr. Jakob",male,21.0,0,0,315097,8.6625,,S
837,838,0,3,"Sirota, Mr. Maurice",male,,0,0,392092,8.05,,S
838,839,1,3,"Chip, Mr. Chang",male,32.0,0,0,1601,56.4958,,S
839,840,1,1,"Marechal, Mr. Pierre",male,,0,0,11774,29.7,C47,C
840,841,0,3,"Alhomaki, Mr. Ilmari Rudolf",male,20.0,0,0,SOTON/O2 3101287,7.925,,S
841,842,0,2,"Mudd, Mr. Thomas Charles",male,16.0,0,0,S.O./P.P. 3,10.5,,S
842,843,1,1,"Serepeca, Miss. Augusta",female,30.0,0,0,113798,31.0,,C
843,844,0,3,"Lemberopolous, Mr. Peter L",male,34.5,0,0,2683,6.4375,,C
844,845,0,3,"Culumovic, Mr. Jeso",male,17.0,0,0,315090,8.6625,,S
845,846,0,3,"Abbing, Mr. Anthony",male,42.0,0,0,C.A. 5547,7.55,,S
846,847,0,3,"Sage, Mr. Douglas Bullen",male,,8,2,CA. 2343,69.55,,S
847,848,0,3,"Markoff, Mr. Marin",male,35.0,0,0,349213,7.8958,,C
848,849,0,2,"Harper, Rev. John",male,28.0,0,1,248727,33.0,,S
849,850,1,1,"Goldenberg, Mrs. Samuel L (Edwiga Grabowska)",female,,1,0,17453,89.1042,C92,C
850,851,0,3,"Andersson, Master. Sigvard Harald Elias",male,4.0,4,2,347082,31.275,,S
851,852,0,3,"Svensson, Mr. Johan",male,74.0,0,0,347060,7.775,,S
852,853,0,3,"Boulos, Miss. Nourelain",female,9.0,1,1,2678,15.2458,,C
853,854,1,1,"Lines, Miss. Mary Conover",female,16.0,0,1,PC 17592,39.4,D28,S
854,855,0,2,"Carter, Mrs. Ernest Courtenay (Lilian Hughes)",female,44.0,1,0,244252,26.0,,S
855,856,1,3,"Aks, Mrs. Sam (Leah Rosen)",female,18.0,0,1,392091,9.35,,S
856,857,1,1,"Wick, Mrs. George Dennick (Mary Hitchcock)",female,45.0,1,1,36928,164.8667,,S
857,858,1,1,"Daly, Mr. Peter Denis ",male,51.0,0,0,113055,26.55,E17,S
858,859,1,3,"Baclini, Mrs. Solomon (Latifa Qurban)",female,24.0,0,3,2666,19.2583,,C
859,860,0,3,"Razi, Mr. Raihed",male,,0,0,2629,7.2292,,C
860,861,0,3,"Hansen, Mr. Claus Peter",male,41.0,2,0,350026,14.1083,,S
861,862,0,2,"Giles, Mr. Frederick Edward",male,21.0,1,0,28134,11.5,,S
862,863,1,1,"Swift, Mrs. Frederick Joel (Margaret Welles Barron)",female,48.0,0,0,17466,25.9292,D17,S
863,864,0,3,"Sage, Miss. Dorothy Edith ""Dolly""",female,,8,2,CA. 2343,69.55,,S
864,865,0,2,"Gill, Mr. John William",male,24.0,0,0,233866,13.0,,S
865,866,1,2,"Bystrom, Mrs. (Karolina)",female,42.0,0,0,236852,13.0,,S
866,867,1,2,"Duran y More, Miss. Asuncion",female,27.0,1,0,SC/PARIS 2149,13.8583,,C
867,868,0,1,"Roebling, Mr. Washington Augustus II",male,31.0,0,0,PC 17590,50.4958,A24,S
868,869,0,3,"van Melkebeke, Mr. Philemon",male,,0,0,345777,9.5,,S
869,870,1,3,"Johnson, Master. Harold Theodor",male,4.0,1,1,347742,11.1333,,S
870,871,0,3,"Balkic, Mr. Cerin",male,26.0,0,0,349248,7.8958,,S
871,872,1,1,"Beckwith, Mrs. Richard Leonard (Sallie Monypeny)",female,47.0,1,1,11751,52.5542,D35,S
872,873,0,1,"Carlsson, Mr. Frans Olof",male,33.0,0,0,695,5.0,B51 B53 B55,S
873,874,0,3,"Vander Cruyssen, Mr. Victor",male,47.0,0,0,345765,9.0,,S
874,875,1,2,"Abelson, Mrs. Samuel (Hannah Wizosky)",female,28.0,1,0,P/PP 3381,24.0,,C
875,876,1,3,"Najib, Miss. Adele Kiamie ""Jane""",female,15.0,0,0,2667,7.225,,C
876,877,0,3,"Gustafsson, Mr. Alfred Ossian",male,20.0,0,0,7534,9.8458,,S
877,878,0,3,"Petroff, Mr. Nedelio",male,19.0,0,0,349212,7.8958,,S
878,879,0,3,"Laleff, Mr. Kristo",male,,0,0,349217,7.8958,,S
879,880,1,1,"Potter, Mrs. Thomas Jr (Lily Alexenia Wilson)",female,56.0,0,1,11767,83.1583,C50,C
880,881,1,2,"Shelley, Mrs. William (Imanita Parrish Hall)",female,25.0,0,1,230433,26.0,,S
881,882,0,3,"Markun, Mr. Johann",male,33.0,0,0,349257,7.8958,,S
882,883,0,3,"Dahlberg, Miss. Gerda Ulrika",female,22.0,0,0,7552,10.5167,,S
883,884,0,2,"Banfield, Mr. Frederick James",male,28.0,0,0,C.A./SOTON 34068,10.5,,S
884,885,0,3,"Sutehall, Mr. Henry Jr",male,25.0,0,0,SOTON/OQ 392076,7.05,,S
885,886,0,3,"Rice, Mrs. William (Margaret Norton)",female,39.0,0,5,382652,29.125,,Q
886,887,0,2,"Montvila, Rev. Juozas",male,27.0,0,0,211536,13.0,,S
887,888,1,1,"Graham, Miss. Margaret Edith",female,19.0,0,0,112053,30.0,B42,S
888,889,0,3,"Johnston, Miss. Catherine Helen ""Carrie""",female,,1,2,W./C. 6607,23.45,,S
889,890,1,1,"Behr, Mr. Karl Howell",male,26.0,0,0,111369,30.0,C148,C
890,891,0,3,"Dooley, Mr. Patrick",male,32.0,0,0,370376,7.75,,Q
1 PassengerId Survived Pclass Name Sex Age SibSp Parch Ticket Fare Cabin Embarked
2 0 1 0 3 Braund, Mr. Owen Harris male 22.0 1 0 A/5 21171 7.25 S
3 1 2 1 1 Cumings, Mrs. John Bradley (Florence Briggs Thayer) female 38.0 1 0 PC 17599 71.2833 C85 C
4 2 3 1 3 Heikkinen, Miss. Laina female 26.0 0 0 STON/O2. 3101282 7.925 S
5 3 4 1 1 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 35.0 1 0 113803 53.1 C123 S
6 4 5 0 3 Allen, Mr. William Henry male 35.0 0 0 373450 8.05 S
7 5 6 0 3 Moran, Mr. James male 0 0 330877 8.4583 Q
8 6 7 0 1 McCarthy, Mr. Timothy J male 54.0 0 0 17463 51.8625 E46 S
9 7 8 0 3 Palsson, Master. Gosta Leonard male 2.0 3 1 349909 21.075 S
10 8 9 1 3 Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg) female 27.0 0 2 347742 11.1333 S
11 9 10 1 2 Nasser, Mrs. Nicholas (Adele Achem) female 14.0 1 0 237736 30.0708 C
12 10 11 1 3 Sandstrom, Miss. Marguerite Rut female 4.0 1 1 PP 9549 16.7 G6 S
13 11 12 1 1 Bonnell, Miss. Elizabeth female 58.0 0 0 113783 26.55 C103 S
14 12 13 0 3 Saundercock, Mr. William Henry male 20.0 0 0 A/5. 2151 8.05 S
15 13 14 0 3 Andersson, Mr. Anders Johan male 39.0 1 5 347082 31.275 S
16 14 15 0 3 Vestrom, Miss. Hulda Amanda Adolfina female 14.0 0 0 350406 7.8542 S
17 15 16 1 2 Hewlett, Mrs. (Mary D Kingcome) female 55.0 0 0 248706 16.0 S
18 16 17 0 3 Rice, Master. Eugene male 2.0 4 1 382652 29.125 Q
19 17 18 1 2 Williams, Mr. Charles Eugene male 0 0 244373 13.0 S
20 18 19 0 3 Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele) female 31.0 1 0 345763 18.0 S
21 19 20 1 3 Masselmani, Mrs. Fatima female 0 0 2649 7.225 C
22 20 21 0 2 Fynney, Mr. Joseph J male 35.0 0 0 239865 26.0 S
23 21 22 1 2 Beesley, Mr. Lawrence male 34.0 0 0 248698 13.0 D56 S
24 22 23 1 3 McGowan, Miss. Anna "Annie" female 15.0 0 0 330923 8.0292 Q
25 23 24 1 1 Sloper, Mr. William Thompson male 28.0 0 0 113788 35.5 A6 S
26 24 25 0 3 Palsson, Miss. Torborg Danira female 8.0 3 1 349909 21.075 S
27 25 26 1 3 Asplund, Mrs. Carl Oscar (Selma Augusta Emilia Johansson) female 38.0 1 5 347077 31.3875 S
28 26 27 0 3 Emir, Mr. Farred Chehab male 0 0 2631 7.225 C
29 27 28 0 1 Fortune, Mr. Charles Alexander male 19.0 3 2 19950 263.0 C23 C25 C27 S
30 28 29 1 3 O'Dwyer, Miss. Ellen "Nellie" female 0 0 330959 7.8792 Q
31 29 30 0 3 Todoroff, Mr. Lalio male 0 0 349216 7.8958 S
32 30 31 0 1 Uruchurtu, Don. Manuel E male 40.0 0 0 PC 17601 27.7208 C
33 31 32 1 1 Spencer, Mrs. William Augustus (Marie Eugenie) female 1 0 PC 17569 146.5208 B78 C
34 32 33 1 3 Glynn, Miss. Mary Agatha female 0 0 335677 7.75 Q
35 33 34 0 2 Wheadon, Mr. Edward H male 66.0 0 0 C.A. 24579 10.5 S
36 34 35 0 1 Meyer, Mr. Edgar Joseph male 28.0 1 0 PC 17604 82.1708 C
37 35 36 0 1 Holverson, Mr. Alexander Oskar male 42.0 1 0 113789 52.0 S
38 36 37 1 3 Mamee, Mr. Hanna male 0 0 2677 7.2292 C
39 37 38 0 3 Cann, Mr. Ernest Charles male 21.0 0 0 A./5. 2152 8.05 S
40 38 39 0 3 Vander Planke, Miss. Augusta Maria female 18.0 2 0 345764 18.0 S
41 39 40 1 3 Nicola-Yarred, Miss. Jamila female 14.0 1 0 2651 11.2417 C
42 40 41 0 3 Ahlin, Mrs. Johan (Johanna Persdotter Larsson) female 40.0 1 0 7546 9.475 S
43 41 42 0 2 Turpin, Mrs. William John Robert (Dorothy Ann Wonnacott) female 27.0 1 0 11668 21.0 S
44 42 43 0 3 Kraeff, Mr. Theodor male 0 0 349253 7.8958 C
45 43 44 1 2 Laroche, Miss. Simonne Marie Anne Andree female 3.0 1 2 SC/Paris 2123 41.5792 C
46 44 45 1 3 Devaney, Miss. Margaret Delia female 19.0 0 0 330958 7.8792 Q
47 45 46 0 3 Rogers, Mr. William John male 0 0 S.C./A.4. 23567 8.05 S
48 46 47 0 3 Lennon, Mr. Denis male 1 0 370371 15.5 Q
49 47 48 1 3 O'Driscoll, Miss. Bridget female 0 0 14311 7.75 Q
50 48 49 0 3 Samaan, Mr. Youssef male 2 0 2662 21.6792 C
51 49 50 0 3 Arnold-Franchi, Mrs. Josef (Josefine Franchi) female 18.0 1 0 349237 17.8 S
52 50 51 0 3 Panula, Master. Juha Niilo male 7.0 4 1 3101295 39.6875 S
53 51 52 0 3 Nosworthy, Mr. Richard Cater male 21.0 0 0 A/4. 39886 7.8 S
54 52 53 1 1 Harper, Mrs. Henry Sleeper (Myna Haxtun) female 49.0 1 0 PC 17572 76.7292 D33 C
55 53 54 1 2 Faunthorpe, Mrs. Lizzie (Elizabeth Anne Wilkinson) female 29.0 1 0 2926 26.0 S
56 54 55 0 1 Ostby, Mr. Engelhart Cornelius male 65.0 0 1 113509 61.9792 B30 C
57 55 56 1 1 Woolner, Mr. Hugh male 0 0 19947 35.5 C52 S
58 56 57 1 2 Rugg, Miss. Emily female 21.0 0 0 C.A. 31026 10.5 S
59 57 58 0 3 Novel, Mr. Mansouer male 28.5 0 0 2697 7.2292 C
60 58 59 1 2 West, Miss. Constance Mirium female 5.0 1 2 C.A. 34651 27.75 S
61 59 60 0 3 Goodwin, Master. William Frederick male 11.0 5 2 CA 2144 46.9 S
62 60 61 0 3 Sirayanian, Mr. Orsen male 22.0 0 0 2669 7.2292 C
63 61 62 1 1 Icard, Miss. Amelie female 38.0 0 0 113572 80.0 B28
64 62 63 0 1 Harris, Mr. Henry Birkhardt male 45.0 1 0 36973 83.475 C83 S
65 63 64 0 3 Skoog, Master. Harald male 4.0 3 2 347088 27.9 S
66 64 65 0 1 Stewart, Mr. Albert A male 0 0 PC 17605 27.7208 C
67 65 66 1 3 Moubarek, Master. Gerios male 1 1 2661 15.2458 C
68 66 67 1 2 Nye, Mrs. (Elizabeth Ramell) female 29.0 0 0 C.A. 29395 10.5 F33 S
69 67 68 0 3 Crease, Mr. Ernest James male 19.0 0 0 S.P. 3464 8.1583 S
70 68 69 1 3 Andersson, Miss. Erna Alexandra female 17.0 4 2 3101281 7.925 S
71 69 70 0 3 Kink, Mr. Vincenz male 26.0 2 0 315151 8.6625 S
72 70 71 0 2 Jenkin, Mr. Stephen Curnow male 32.0 0 0 C.A. 33111 10.5 S
73 71 72 0 3 Goodwin, Miss. Lillian Amy female 16.0 5 2 CA 2144 46.9 S
74 72 73 0 2 Hood, Mr. Ambrose Jr male 21.0 0 0 S.O.C. 14879 73.5 S
75 73 74 0 3 Chronopoulos, Mr. Apostolos male 26.0 1 0 2680 14.4542 C
76 74 75 1 3 Bing, Mr. Lee male 32.0 0 0 1601 56.4958 S
77 75 76 0 3 Moen, Mr. Sigurd Hansen male 25.0 0 0 348123 7.65 F G73 S
78 76 77 0 3 Staneff, Mr. Ivan male 0 0 349208 7.8958 S
79 77 78 0 3 Moutal, Mr. Rahamin Haim male 0 0 374746 8.05 S
80 78 79 1 2 Caldwell, Master. Alden Gates male 0.83 0 2 248738 29.0 S
81 79 80 1 3 Dowdell, Miss. Elizabeth female 30.0 0 0 364516 12.475 S
82 80 81 0 3 Waelens, Mr. Achille male 22.0 0 0 345767 9.0 S
83 81 82 1 3 Sheerlinck, Mr. Jan Baptist male 29.0 0 0 345779 9.5 S
84 82 83 1 3 McDermott, Miss. Brigdet Delia female 0 0 330932 7.7875 Q
85 83 84 0 1 Carrau, Mr. Francisco M male 28.0 0 0 113059 47.1 S
86 84 85 1 2 Ilett, Miss. Bertha female 17.0 0 0 SO/C 14885 10.5 S
87 85 86 1 3 Backstrom, Mrs. Karl Alfred (Maria Mathilda Gustafsson) female 33.0 3 0 3101278 15.85 S
88 86 87 0 3 Ford, Mr. William Neal male 16.0 1 3 W./C. 6608 34.375 S
89 87 88 0 3 Slocovski, Mr. Selman Francis male 0 0 SOTON/OQ 392086 8.05 S
90 88 89 1 1 Fortune, Miss. Mabel Helen female 23.0 3 2 19950 263.0 C23 C25 C27 S
91 89 90 0 3 Celotti, Mr. Francesco male 24.0 0 0 343275 8.05 S
92 90 91 0 3 Christmann, Mr. Emil male 29.0 0 0 343276 8.05 S
93 91 92 0 3 Andreasson, Mr. Paul Edvin male 20.0 0 0 347466 7.8542 S
94 92 93 0 1 Chaffee, Mr. Herbert Fuller male 46.0 1 0 W.E.P. 5734 61.175 E31 S
95 93 94 0 3 Dean, Mr. Bertram Frank male 26.0 1 2 C.A. 2315 20.575 S
96 94 95 0 3 Coxon, Mr. Daniel male 59.0 0 0 364500 7.25 S
97 95 96 0 3 Shorney, Mr. Charles Joseph male 0 0 374910 8.05 S
98 96 97 0 1 Goldschmidt, Mr. George B male 71.0 0 0 PC 17754 34.6542 A5 C
99 97 98 1 1 Greenfield, Mr. William Bertram male 23.0 0 1 PC 17759 63.3583 D10 D12 C
100 98 99 1 2 Doling, Mrs. John T (Ada Julia Bone) female 34.0 0 1 231919 23.0 S
101 99 100 0 2 Kantor, Mr. Sinai male 34.0 1 0 244367 26.0 S
102 100 101 0 3 Petranec, Miss. Matilda female 28.0 0 0 349245 7.8958 S
103 101 102 0 3 Petroff, Mr. Pastcho ("Pentcho") male 0 0 349215 7.8958 S
104 102 103 0 1 White, Mr. Richard Frasar male 21.0 0 1 35281 77.2875 D26 S
105 103 104 0 3 Johansson, Mr. Gustaf Joel male 33.0 0 0 7540 8.6542 S
106 104 105 0 3 Gustafsson, Mr. Anders Vilhelm male 37.0 2 0 3101276 7.925 S
107 105 106 0 3 Mionoff, Mr. Stoytcho male 28.0 0 0 349207 7.8958 S
108 106 107 1 3 Salkjelsvik, Miss. Anna Kristine female 21.0 0 0 343120 7.65 S
109 107 108 1 3 Moss, Mr. Albert Johan male 0 0 312991 7.775 S
110 108 109 0 3 Rekic, Mr. Tido male 38.0 0 0 349249 7.8958 S
111 109 110 1 3 Moran, Miss. Bertha female 1 0 371110 24.15 Q
112 110 111 0 1 Porter, Mr. Walter Chamberlain male 47.0 0 0 110465 52.0 C110 S
113 111 112 0 3 Zabour, Miss. Hileni female 14.5 1 0 2665 14.4542 C
114 112 113 0 3 Barton, Mr. David John male 22.0 0 0 324669 8.05 S
115 113 114 0 3 Jussila, Miss. Katriina female 20.0 1 0 4136 9.825 S
116 114 115 0 3 Attalah, Miss. Malake female 17.0 0 0 2627 14.4583 C
117 115 116 0 3 Pekoniemi, Mr. Edvard male 21.0 0 0 STON/O 2. 3101294 7.925 S
118 116 117 0 3 Connors, Mr. Patrick male 70.5 0 0 370369 7.75 Q
119 117 118 0 2 Turpin, Mr. William John Robert male 29.0 1 0 11668 21.0 S
120 118 119 0 1 Baxter, Mr. Quigg Edmond male 24.0 0 1 PC 17558 247.5208 B58 B60 C
121 119 120 0 3 Andersson, Miss. Ellis Anna Maria female 2.0 4 2 347082 31.275 S
122 120 121 0 2 Hickman, Mr. Stanley George male 21.0 2 0 S.O.C. 14879 73.5 S
123 121 122 0 3 Moore, Mr. Leonard Charles male 0 0 A4. 54510 8.05 S
124 122 123 0 2 Nasser, Mr. Nicholas male 32.5 1 0 237736 30.0708 C
125 123 124 1 2 Webber, Miss. Susan female 32.5 0 0 27267 13.0 E101 S
126 124 125 0 1 White, Mr. Percival Wayland male 54.0 0 1 35281 77.2875 D26 S
127 125 126 1 3 Nicola-Yarred, Master. Elias male 12.0 1 0 2651 11.2417 C
128 126 127 0 3 McMahon, Mr. Martin male 0 0 370372 7.75 Q
129 127 128 1 3 Madsen, Mr. Fridtjof Arne male 24.0 0 0 C 17369 7.1417 S
130 128 129 1 3 Peter, Miss. Anna female 1 1 2668 22.3583 F E69 C
131 129 130 0 3 Ekstrom, Mr. Johan male 45.0 0 0 347061 6.975 S
132 130 131 0 3 Drazenoic, Mr. Jozef male 33.0 0 0 349241 7.8958 C
133 131 132 0 3 Coelho, Mr. Domingos Fernandeo male 20.0 0 0 SOTON/O.Q. 3101307 7.05 S
134 132 133 0 3 Robins, Mrs. Alexander A (Grace Charity Laury) female 47.0 1 0 A/5. 3337 14.5 S
135 133 134 1 2 Weisz, Mrs. Leopold (Mathilde Francoise Pede) female 29.0 1 0 228414 26.0 S
136 134 135 0 2 Sobey, Mr. Samuel James Hayden male 25.0 0 0 C.A. 29178 13.0 S
137 135 136 0 2 Richard, Mr. Emile male 23.0 0 0 SC/PARIS 2133 15.0458 C
138 136 137 1 1 Newsom, Miss. Helen Monypeny female 19.0 0 2 11752 26.2833 D47 S
139 137 138 0 1 Futrelle, Mr. Jacques Heath male 37.0 1 0 113803 53.1 C123 S
140 138 139 0 3 Osen, Mr. Olaf Elon male 16.0 0 0 7534 9.2167 S
141 139 140 0 1 Giglio, Mr. Victor male 24.0 0 0 PC 17593 79.2 B86 C
142 140 141 0 3 Boulos, Mrs. Joseph (Sultana) female 0 2 2678 15.2458 C
143 141 142 1 3 Nysten, Miss. Anna Sofia female 22.0 0 0 347081 7.75 S
144 142 143 1 3 Hakkarainen, Mrs. Pekka Pietari (Elin Matilda Dolck) female 24.0 1 0 STON/O2. 3101279 15.85 S
145 143 144 0 3 Burke, Mr. Jeremiah male 19.0 0 0 365222 6.75 Q
146 144 145 0 2 Andrew, Mr. Edgardo Samuel male 18.0 0 0 231945 11.5 S
147 145 146 0 2 Nicholls, Mr. Joseph Charles male 19.0 1 1 C.A. 33112 36.75 S
148 146 147 1 3 Andersson, Mr. August Edvard ("Wennerstrom") male 27.0 0 0 350043 7.7958 S
149 147 148 0 3 Ford, Miss. Robina Maggie "Ruby" female 9.0 2 2 W./C. 6608 34.375 S
150 148 149 0 2 Navratil, Mr. Michel ("Louis M Hoffman") male 36.5 0 2 230080 26.0 F2 S
151 149 150 0 2 Byles, Rev. Thomas Roussel Davids male 42.0 0 0 244310 13.0 S
152 150 151 0 2 Bateman, Rev. Robert James male 51.0 0 0 S.O.P. 1166 12.525 S
153 151 152 1 1 Pears, Mrs. Thomas (Edith Wearne) female 22.0 1 0 113776 66.6 C2 S
154 152 153 0 3 Meo, Mr. Alfonzo male 55.5 0 0 A.5. 11206 8.05 S
155 153 154 0 3 van Billiard, Mr. Austin Blyler male 40.5 0 2 A/5. 851 14.5 S
156 154 155 0 3 Olsen, Mr. Ole Martin male 0 0 Fa 265302 7.3125 S
157 155 156 0 1 Williams, Mr. Charles Duane male 51.0 0 1 PC 17597 61.3792 C
158 156 157 1 3 Gilnagh, Miss. Katherine "Katie" female 16.0 0 0 35851 7.7333 Q
159 157 158 0 3 Corn, Mr. Harry male 30.0 0 0 SOTON/OQ 392090 8.05 S
160 158 159 0 3 Smiljanic, Mr. Mile male 0 0 315037 8.6625 S
161 159 160 0 3 Sage, Master. Thomas Henry male 8 2 CA. 2343 69.55 S
162 160 161 0 3 Cribb, Mr. John Hatfield male 44.0 0 1 371362 16.1 S
163 161 162 1 2 Watt, Mrs. James (Elizabeth "Bessie" Inglis Milne) female 40.0 0 0 C.A. 33595 15.75 S
164 162 163 0 3 Bengtsson, Mr. John Viktor male 26.0 0 0 347068 7.775 S
165 163 164 0 3 Calic, Mr. Jovo male 17.0 0 0 315093 8.6625 S
166 164 165 0 3 Panula, Master. Eino Viljami male 1.0 4 1 3101295 39.6875 S
167 165 166 1 3 Goldsmith, Master. Frank John William "Frankie" male 9.0 0 2 363291 20.525 S
168 166 167 1 1 Chibnall, Mrs. (Edith Martha Bowerman) female 0 1 113505 55.0 E33 S
169 167 168 0 3 Skoog, Mrs. William (Anna Bernhardina Karlsson) female 45.0 1 4 347088 27.9 S
170 168 169 0 1 Baumann, Mr. John D male 0 0 PC 17318 25.925 S
171 169 170 0 3 Ling, Mr. Lee male 28.0 0 0 1601 56.4958 S
172 170 171 0 1 Van der hoef, Mr. Wyckoff male 61.0 0 0 111240 33.5 B19 S
173 171 172 0 3 Rice, Master. Arthur male 4.0 4 1 382652 29.125 Q
174 172 173 1 3 Johnson, Miss. Eleanor Ileen female 1.0 1 1 347742 11.1333 S
175 173 174 0 3 Sivola, Mr. Antti Wilhelm male 21.0 0 0 STON/O 2. 3101280 7.925 S
176 174 175 0 1 Smith, Mr. James Clinch male 56.0 0 0 17764 30.6958 A7 C
177 175 176 0 3 Klasen, Mr. Klas Albin male 18.0 1 1 350404 7.8542 S
178 176 177 0 3 Lefebre, Master. Henry Forbes male 3 1 4133 25.4667 S
179 177 178 0 1 Isham, Miss. Ann Elizabeth female 50.0 0 0 PC 17595 28.7125 C49 C
180 178 179 0 2 Hale, Mr. Reginald male 30.0 0 0 250653 13.0 S
181 179 180 0 3 Leonard, Mr. Lionel male 36.0 0 0 LINE 0.0 S
182 180 181 0 3 Sage, Miss. Constance Gladys female 8 2 CA. 2343 69.55 S
183 181 182 0 2 Pernot, Mr. Rene male 0 0 SC/PARIS 2131 15.05 C
184 182 183 0 3 Asplund, Master. Clarence Gustaf Hugo male 9.0 4 2 347077 31.3875 S
185 183 184 1 2 Becker, Master. Richard F male 1.0 2 1 230136 39.0 F4 S
186 184 185 1 3 Kink-Heilmann, Miss. Luise Gretchen female 4.0 0 2 315153 22.025 S
187 185 186 0 1 Rood, Mr. Hugh Roscoe male 0 0 113767 50.0 A32 S
188 186 187 1 3 O'Brien, Mrs. Thomas (Johanna "Hannah" Godfrey) female 1 0 370365 15.5 Q
189 187 188 1 1 Romaine, Mr. Charles Hallace ("Mr C Rolmane") male 45.0 0 0 111428 26.55 S
190 188 189 0 3 Bourke, Mr. John male 40.0 1 1 364849 15.5 Q
191 189 190 0 3 Turcin, Mr. Stjepan male 36.0 0 0 349247 7.8958 S
192 190 191 1 2 Pinsky, Mrs. (Rosa) female 32.0 0 0 234604 13.0 S
193 191 192 0 2 Carbines, Mr. William male 19.0 0 0 28424 13.0 S
194 192 193 1 3 Andersen-Jensen, Miss. Carla Christine Nielsine female 19.0 1 0 350046 7.8542 S
195 193 194 1 2 Navratil, Master. Michel M male 3.0 1 1 230080 26.0 F2 S
196 194 195 1 1 Brown, Mrs. James Joseph (Margaret Tobin) female 44.0 0 0 PC 17610 27.7208 B4 C
197 195 196 1 1 Lurette, Miss. Elise female 58.0 0 0 PC 17569 146.5208 B80 C
198 196 197 0 3 Mernagh, Mr. Robert male 0 0 368703 7.75 Q
199 197 198 0 3 Olsen, Mr. Karl Siegwart Andreas male 42.0 0 1 4579 8.4042 S
200 198 199 1 3 Madigan, Miss. Margaret "Maggie" female 0 0 370370 7.75 Q
201 199 200 0 2 Yrois, Miss. Henriette ("Mrs Harbeck") female 24.0 0 0 248747 13.0 S
202 200 201 0 3 Vande Walle, Mr. Nestor Cyriel male 28.0 0 0 345770 9.5 S
203 201 202 0 3 Sage, Mr. Frederick male 8 2 CA. 2343 69.55 S
204 202 203 0 3 Johanson, Mr. Jakob Alfred male 34.0 0 0 3101264 6.4958 S
205 203 204 0 3 Youseff, Mr. Gerious male 45.5 0 0 2628 7.225 C
206 204 205 1 3 Cohen, Mr. Gurshon "Gus" male 18.0 0 0 A/5 3540 8.05 S
207 205 206 0 3 Strom, Miss. Telma Matilda female 2.0 0 1 347054 10.4625 G6 S
208 206 207 0 3 Backstrom, Mr. Karl Alfred male 32.0 1 0 3101278 15.85 S
209 207 208 1 3 Albimona, Mr. Nassef Cassem male 26.0 0 0 2699 18.7875 C
210 208 209 1 3 Carr, Miss. Helen "Ellen" female 16.0 0 0 367231 7.75 Q
211 209 210 1 1 Blank, Mr. Henry male 40.0 0 0 112277 31.0 A31 C
212 210 211 0 3 Ali, Mr. Ahmed male 24.0 0 0 SOTON/O.Q. 3101311 7.05 S
213 211 212 1 2 Cameron, Miss. Clear Annie female 35.0 0 0 F.C.C. 13528 21.0 S
214 212 213 0 3 Perkin, Mr. John Henry male 22.0 0 0 A/5 21174 7.25 S
215 213 214 0 2 Givard, Mr. Hans Kristensen male 30.0 0 0 250646 13.0 S
216 214 215 0 3 Kiernan, Mr. Philip male 1 0 367229 7.75 Q
217 215 216 1 1 Newell, Miss. Madeleine female 31.0 1 0 35273 113.275 D36 C
218 216 217 1 3 Honkanen, Miss. Eliina female 27.0 0 0 STON/O2. 3101283 7.925 S
219 217 218 0 2 Jacobsohn, Mr. Sidney Samuel male 42.0 1 0 243847 27.0 S
220 218 219 1 1 Bazzani, Miss. Albina female 32.0 0 0 11813 76.2917 D15 C
221 219 220 0 2 Harris, Mr. Walter male 30.0 0 0 W/C 14208 10.5 S
222 220 221 1 3 Sunderland, Mr. Victor Francis male 16.0 0 0 SOTON/OQ 392089 8.05 S
223 221 222 0 2 Bracken, Mr. James H male 27.0 0 0 220367 13.0 S
224 222 223 0 3 Green, Mr. George Henry male 51.0 0 0 21440 8.05 S
225 223 224 0 3 Nenkoff, Mr. Christo male 0 0 349234 7.8958 S
226 224 225 1 1 Hoyt, Mr. Frederick Maxfield male 38.0 1 0 19943 90.0 C93 S
227 225 226 0 3 Berglund, Mr. Karl Ivar Sven male 22.0 0 0 PP 4348 9.35 S
228 226 227 1 2 Mellors, Mr. William John male 19.0 0 0 SW/PP 751 10.5 S
229 227 228 0 3 Lovell, Mr. John Hall ("Henry") male 20.5 0 0 A/5 21173 7.25 S
230 228 229 0 2 Fahlstrom, Mr. Arne Jonas male 18.0 0 0 236171 13.0 S
231 229 230 0 3 Lefebre, Miss. Mathilde female 3 1 4133 25.4667 S
232 230 231 1 1 Harris, Mrs. Henry Birkhardt (Irene Wallach) female 35.0 1 0 36973 83.475 C83 S
233 231 232 0 3 Larsson, Mr. Bengt Edvin male 29.0 0 0 347067 7.775 S
234 232 233 0 2 Sjostedt, Mr. Ernst Adolf male 59.0 0 0 237442 13.5 S
235 233 234 1 3 Asplund, Miss. Lillian Gertrud female 5.0 4 2 347077 31.3875 S
236 234 235 0 2 Leyson, Mr. Robert William Norman male 24.0 0 0 C.A. 29566 10.5 S
237 235 236 0 3 Harknett, Miss. Alice Phoebe female 0 0 W./C. 6609 7.55 S
238 236 237 0 2 Hold, Mr. Stephen male 44.0 1 0 26707 26.0 S
239 237 238 1 2 Collyer, Miss. Marjorie "Lottie" female 8.0 0 2 C.A. 31921 26.25 S
240 238 239 0 2 Pengelly, Mr. Frederick William male 19.0 0 0 28665 10.5 S
241 239 240 0 2 Hunt, Mr. George Henry male 33.0 0 0 SCO/W 1585 12.275 S
242 240 241 0 3 Zabour, Miss. Thamine female 1 0 2665 14.4542 C
243 241 242 1 3 Murphy, Miss. Katherine "Kate" female 1 0 367230 15.5 Q
244 242 243 0 2 Coleridge, Mr. Reginald Charles male 29.0 0 0 W./C. 14263 10.5 S
245 243 244 0 3 Maenpaa, Mr. Matti Alexanteri male 22.0 0 0 STON/O 2. 3101275 7.125 S
246 244 245 0 3 Attalah, Mr. Sleiman male 30.0 0 0 2694 7.225 C
247 245 246 0 1 Minahan, Dr. William Edward male 44.0 2 0 19928 90.0 C78 Q
248 246 247 0 3 Lindahl, Miss. Agda Thorilda Viktoria female 25.0 0 0 347071 7.775 S
249 247 248 1 2 Hamalainen, Mrs. William (Anna) female 24.0 0 2 250649 14.5 S
250 248 249 1 1 Beckwith, Mr. Richard Leonard male 37.0 1 1 11751 52.5542 D35 S
251 249 250 0 2 Carter, Rev. Ernest Courtenay male 54.0 1 0 244252 26.0 S
252 250 251 0 3 Reed, Mr. James George male 0 0 362316 7.25 S
253 251 252 0 3 Strom, Mrs. Wilhelm (Elna Matilda Persson) female 29.0 1 1 347054 10.4625 G6 S
254 252 253 0 1 Stead, Mr. William Thomas male 62.0 0 0 113514 26.55 C87 S
255 253 254 0 3 Lobb, Mr. William Arthur male 30.0 1 0 A/5. 3336 16.1 S
256 254 255 0 3 Rosblom, Mrs. Viktor (Helena Wilhelmina) female 41.0 0 2 370129 20.2125 S
257 255 256 1 3 Touma, Mrs. Darwis (Hanne Youssef Razi) female 29.0 0 2 2650 15.2458 C
258 256 257 1 1 Thorne, Mrs. Gertrude Maybelle female 0 0 PC 17585 79.2 C
259 257 258 1 1 Cherry, Miss. Gladys female 30.0 0 0 110152 86.5 B77 S
260 258 259 1 1 Ward, Miss. Anna female 35.0 0 0 PC 17755 512.3292 C
261 259 260 1 2 Parrish, Mrs. (Lutie Davis) female 50.0 0 1 230433 26.0 S
262 260 261 0 3 Smith, Mr. Thomas male 0 0 384461 7.75 Q
263 261 262 1 3 Asplund, Master. Edvin Rojj Felix male 3.0 4 2 347077 31.3875 S
264 262 263 0 1 Taussig, Mr. Emil male 52.0 1 1 110413 79.65 E67 S
265 263 264 0 1 Harrison, Mr. William male 40.0 0 0 112059 0.0 B94 S
266 264 265 0 3 Henry, Miss. Delia female 0 0 382649 7.75 Q
267 265 266 0 2 Reeves, Mr. David male 36.0 0 0 C.A. 17248 10.5 S
268 266 267 0 3 Panula, Mr. Ernesti Arvid male 16.0 4 1 3101295 39.6875 S
269 267 268 1 3 Persson, Mr. Ernst Ulrik male 25.0 1 0 347083 7.775 S
270 268 269 1 1 Graham, Mrs. William Thompson (Edith Junkins) female 58.0 0 1 PC 17582 153.4625 C125 S
271 269 270 1 1 Bissette, Miss. Amelia female 35.0 0 0 PC 17760 135.6333 C99 S
272 270 271 0 1 Cairns, Mr. Alexander male 0 0 113798 31.0 S
273 271 272 1 3 Tornquist, Mr. William Henry male 25.0 0 0 LINE 0.0 S
274 272 273 1 2 Mellinger, Mrs. (Elizabeth Anne Maidment) female 41.0 0 1 250644 19.5 S
275 273 274 0 1 Natsch, Mr. Charles H male 37.0 0 1 PC 17596 29.7 C118 C
276 274 275 1 3 Healy, Miss. Hanora "Nora" female 0 0 370375 7.75 Q
277 275 276 1 1 Andrews, Miss. Kornelia Theodosia female 63.0 1 0 13502 77.9583 D7 S
278 276 277 0 3 Lindblom, Miss. Augusta Charlotta female 45.0 0 0 347073 7.75 S
279 277 278 0 2 Parkes, Mr. Francis "Frank" male 0 0 239853 0.0 S
280 278 279 0 3 Rice, Master. Eric male 7.0 4 1 382652 29.125 Q
281 279 280 1 3 Abbott, Mrs. Stanton (Rosa Hunt) female 35.0 1 1 C.A. 2673 20.25 S
282 280 281 0 3 Duane, Mr. Frank male 65.0 0 0 336439 7.75 Q
283 281 282 0 3 Olsson, Mr. Nils Johan Goransson male 28.0 0 0 347464 7.8542 S
284 282 283 0 3 de Pelsmaeker, Mr. Alfons male 16.0 0 0 345778 9.5 S
285 283 284 1 3 Dorking, Mr. Edward Arthur male 19.0 0 0 A/5. 10482 8.05 S
286 284 285 0 1 Smith, Mr. Richard William male 0 0 113056 26.0 A19 S
287 285 286 0 3 Stankovic, Mr. Ivan male 33.0 0 0 349239 8.6625 C
288 286 287 1 3 de Mulder, Mr. Theodore male 30.0 0 0 345774 9.5 S
289 287 288 0 3 Naidenoff, Mr. Penko male 22.0 0 0 349206 7.8958 S
290 288 289 1 2 Hosono, Mr. Masabumi male 42.0 0 0 237798 13.0 S
291 289 290 1 3 Connolly, Miss. Kate female 22.0 0 0 370373 7.75 Q
292 290 291 1 1 Barber, Miss. Ellen "Nellie" female 26.0 0 0 19877 78.85 S
293 291 292 1 1 Bishop, Mrs. Dickinson H (Helen Walton) female 19.0 1 0 11967 91.0792 B49 C
294 292 293 0 2 Levy, Mr. Rene Jacques male 36.0 0 0 SC/Paris 2163 12.875 D C
295 293 294 0 3 Haas, Miss. Aloisia female 24.0 0 0 349236 8.85 S
296 294 295 0 3 Mineff, Mr. Ivan male 24.0 0 0 349233 7.8958 S
297 295 296 0 1 Lewy, Mr. Ervin G male 0 0 PC 17612 27.7208 C
298 296 297 0 3 Hanna, Mr. Mansour male 23.5 0 0 2693 7.2292 C
299 297 298 0 1 Allison, Miss. Helen Loraine female 2.0 1 2 113781 151.55 C22 C26 S
300 298 299 1 1 Saalfeld, Mr. Adolphe male 0 0 19988 30.5 C106 S
301 299 300 1 1 Baxter, Mrs. James (Helene DeLaudeniere Chaput) female 50.0 0 1 PC 17558 247.5208 B58 B60 C
302 300 301 1 3 Kelly, Miss. Anna Katherine "Annie Kate" female 0 0 9234 7.75 Q
303 301 302 1 3 McCoy, Mr. Bernard male 2 0 367226 23.25 Q
304 302 303 0 3 Johnson, Mr. William Cahoone Jr male 19.0 0 0 LINE 0.0 S
305 303 304 1 2 Keane, Miss. Nora A female 0 0 226593 12.35 E101 Q
306 304 305 0 3 Williams, Mr. Howard Hugh "Harry" male 0 0 A/5 2466 8.05 S
307 305 306 1 1 Allison, Master. Hudson Trevor male 0.92 1 2 113781 151.55 C22 C26 S
308 306 307 1 1 Fleming, Miss. Margaret female 0 0 17421 110.8833 C
309 307 308 1 1 Penasco y Castellana, Mrs. Victor de Satode (Maria Josefa Perez de Soto y Vallejo) female 17.0 1 0 PC 17758 108.9 C65 C
310 308 309 0 2 Abelson, Mr. Samuel male 30.0 1 0 P/PP 3381 24.0 C
311 309 310 1 1 Francatelli, Miss. Laura Mabel female 30.0 0 0 PC 17485 56.9292 E36 C
312 310 311 1 1 Hays, Miss. Margaret Bechstein female 24.0 0 0 11767 83.1583 C54 C
313 311 312 1 1 Ryerson, Miss. Emily Borie female 18.0 2 2 PC 17608 262.375 B57 B59 B63 B66 C
314 312 313 0 2 Lahtinen, Mrs. William (Anna Sylfven) female 26.0 1 1 250651 26.0 S
315 313 314 0 3 Hendekovic, Mr. Ignjac male 28.0 0 0 349243 7.8958 S
316 314 315 0 2 Hart, Mr. Benjamin male 43.0 1 1 F.C.C. 13529 26.25 S
317 315 316 1 3 Nilsson, Miss. Helmina Josefina female 26.0 0 0 347470 7.8542 S
318 316 317 1 2 Kantor, Mrs. Sinai (Miriam Sternin) female 24.0 1 0 244367 26.0 S
319 317 318 0 2 Moraweck, Dr. Ernest male 54.0 0 0 29011 14.0 S
320 318 319 1 1 Wick, Miss. Mary Natalie female 31.0 0 2 36928 164.8667 C7 S
321 319 320 1 1 Spedden, Mrs. Frederic Oakley (Margaretta Corning Stone) female 40.0 1 1 16966 134.5 E34 C
322 320 321 0 3 Dennis, Mr. Samuel male 22.0 0 0 A/5 21172 7.25 S
323 321 322 0 3 Danoff, Mr. Yoto male 27.0 0 0 349219 7.8958 S
324 322 323 1 2 Slayter, Miss. Hilda Mary female 30.0 0 0 234818 12.35 Q
325 323 324 1 2 Caldwell, Mrs. Albert Francis (Sylvia Mae Harbaugh) female 22.0 1 1 248738 29.0 S
326 324 325 0 3 Sage, Mr. George John Jr male 8 2 CA. 2343 69.55 S
327 325 326 1 1 Young, Miss. Marie Grice female 36.0 0 0 PC 17760 135.6333 C32 C
328 326 327 0 3 Nysveen, Mr. Johan Hansen male 61.0 0 0 345364 6.2375 S
329 327 328 1 2 Ball, Mrs. (Ada E Hall) female 36.0 0 0 28551 13.0 D S
330 328 329 1 3 Goldsmith, Mrs. Frank John (Emily Alice Brown) female 31.0 1 1 363291 20.525 S
331 329 330 1 1 Hippach, Miss. Jean Gertrude female 16.0 0 1 111361 57.9792 B18 C
332 330 331 1 3 McCoy, Miss. Agnes female 2 0 367226 23.25 Q
333 331 332 0 1 Partner, Mr. Austen male 45.5 0 0 113043 28.5 C124 S
334 332 333 0 1 Graham, Mr. George Edward male 38.0 0 1 PC 17582 153.4625 C91 S
335 333 334 0 3 Vander Planke, Mr. Leo Edmondus male 16.0 2 0 345764 18.0 S
336 334 335 1 1 Frauenthal, Mrs. Henry William (Clara Heinsheimer) female 1 0 PC 17611 133.65 S
337 335 336 0 3 Denkoff, Mr. Mitto male 0 0 349225 7.8958 S
338 336 337 0 1 Pears, Mr. Thomas Clinton male 29.0 1 0 113776 66.6 C2 S
339 337 338 1 1 Burns, Miss. Elizabeth Margaret female 41.0 0 0 16966 134.5 E40 C
340 338 339 1 3 Dahl, Mr. Karl Edwart male 45.0 0 0 7598 8.05 S
341 339 340 0 1 Blackwell, Mr. Stephen Weart male 45.0 0 0 113784 35.5 T S
342 340 341 1 2 Navratil, Master. Edmond Roger male 2.0 1 1 230080 26.0 F2 S
343 341 342 1 1 Fortune, Miss. Alice Elizabeth female 24.0 3 2 19950 263.0 C23 C25 C27 S
344 342 343 0 2 Collander, Mr. Erik Gustaf male 28.0 0 0 248740 13.0 S
345 343 344 0 2 Sedgwick, Mr. Charles Frederick Waddington male 25.0 0 0 244361 13.0 S
346 344 345 0 2 Fox, Mr. Stanley Hubert male 36.0 0 0 229236 13.0 S
347 345 346 1 2 Brown, Miss. Amelia "Mildred" female 24.0 0 0 248733 13.0 F33 S
348 346 347 1 2 Smith, Miss. Marion Elsie female 40.0 0 0 31418 13.0 S
349 347 348 1 3 Davison, Mrs. Thomas Henry (Mary E Finck) female 1 0 386525 16.1 S
350 348 349 1 3 Coutts, Master. William Loch "William" male 3.0 1 1 C.A. 37671 15.9 S
351 349 350 0 3 Dimic, Mr. Jovan male 42.0 0 0 315088 8.6625 S
352 350 351 0 3 Odahl, Mr. Nils Martin male 23.0 0 0 7267 9.225 S
353 351 352 0 1 Williams-Lambert, Mr. Fletcher Fellows male 0 0 113510 35.0 C128 S
354 352 353 0 3 Elias, Mr. Tannous male 15.0 1 1 2695 7.2292 C
355 353 354 0 3 Arnold-Franchi, Mr. Josef male 25.0 1 0 349237 17.8 S
356 354 355 0 3 Yousif, Mr. Wazli male 0 0 2647 7.225 C
357 355 356 0 3 Vanden Steen, Mr. Leo Peter male 28.0 0 0 345783 9.5 S
358 356 357 1 1 Bowerman, Miss. Elsie Edith female 22.0 0 1 113505 55.0 E33 S
359 357 358 0 2 Funk, Miss. Annie Clemmer female 38.0 0 0 237671 13.0 S
360 358 359 1 3 McGovern, Miss. Mary female 0 0 330931 7.8792 Q
361 359 360 1 3 Mockler, Miss. Helen Mary "Ellie" female 0 0 330980 7.8792 Q
362 360 361 0 3 Skoog, Mr. Wilhelm male 40.0 1 4 347088 27.9 S
363 361 362 0 2 del Carlo, Mr. Sebastiano male 29.0 1 0 SC/PARIS 2167 27.7208 C
364 362 363 0 3 Barbara, Mrs. (Catherine David) female 45.0 0 1 2691 14.4542 C
365 363 364 0 3 Asim, Mr. Adola male 35.0 0 0 SOTON/O.Q. 3101310 7.05 S
366 364 365 0 3 O'Brien, Mr. Thomas male 1 0 370365 15.5 Q
367 365 366 0 3 Adahl, Mr. Mauritz Nils Martin male 30.0 0 0 C 7076 7.25 S
368 366 367 1 1 Warren, Mrs. Frank Manley (Anna Sophia Atkinson) female 60.0 1 0 110813 75.25 D37 C
369 367 368 1 3 Moussa, Mrs. (Mantoura Boulos) female 0 0 2626 7.2292 C
370 368 369 1 3 Jermyn, Miss. Annie female 0 0 14313 7.75 Q
371 369 370 1 1 Aubart, Mme. Leontine Pauline female 24.0 0 0 PC 17477 69.3 B35 C
372 370 371 1 1 Harder, Mr. George Achilles male 25.0 1 0 11765 55.4417 E50 C
373 371 372 0 3 Wiklund, Mr. Jakob Alfred male 18.0 1 0 3101267 6.4958 S
374 372 373 0 3 Beavan, Mr. William Thomas male 19.0 0 0 323951 8.05 S
375 373 374 0 1 Ringhini, Mr. Sante male 22.0 0 0 PC 17760 135.6333 C
376 374 375 0 3 Palsson, Miss. Stina Viola female 3.0 3 1 349909 21.075 S
377 375 376 1 1 Meyer, Mrs. Edgar Joseph (Leila Saks) female 1 0 PC 17604 82.1708 C
378 376 377 1 3 Landergren, Miss. Aurora Adelia female 22.0 0 0 C 7077 7.25 S
379 377 378 0 1 Widener, Mr. Harry Elkins male 27.0 0 2 113503 211.5 C82 C
380 378 379 0 3 Betros, Mr. Tannous male 20.0 0 0 2648 4.0125 C
381 379 380 0 3 Gustafsson, Mr. Karl Gideon male 19.0 0 0 347069 7.775 S
382 380 381 1 1 Bidois, Miss. Rosalie female 42.0 0 0 PC 17757 227.525 C
383 381 382 1 3 Nakid, Miss. Maria ("Mary") female 1.0 0 2 2653 15.7417 C
384 382 383 0 3 Tikkanen, Mr. Juho male 32.0 0 0 STON/O 2. 3101293 7.925 S
385 383 384 1 1 Holverson, Mrs. Alexander Oskar (Mary Aline Towner) female 35.0 1 0 113789 52.0 S
386 384 385 0 3 Plotcharsky, Mr. Vasil male 0 0 349227 7.8958 S
387 385 386 0 2 Davies, Mr. Charles Henry male 18.0 0 0 S.O.C. 14879 73.5 S
388 386 387 0 3 Goodwin, Master. Sidney Leonard male 1.0 5 2 CA 2144 46.9 S
389 387 388 1 2 Buss, Miss. Kate female 36.0 0 0 27849 13.0 S
390 388 389 0 3 Sadlier, Mr. Matthew male 0 0 367655 7.7292 Q
391 389 390 1 2 Lehmann, Miss. Bertha female 17.0 0 0 SC 1748 12.0 C
392 390 391 1 1 Carter, Mr. William Ernest male 36.0 1 2 113760 120.0 B96 B98 S
393 391 392 1 3 Jansson, Mr. Carl Olof male 21.0 0 0 350034 7.7958 S
394 392 393 0 3 Gustafsson, Mr. Johan Birger male 28.0 2 0 3101277 7.925 S
395 393 394 1 1 Newell, Miss. Marjorie female 23.0 1 0 35273 113.275 D36 C
396 394 395 1 3 Sandstrom, Mrs. Hjalmar (Agnes Charlotta Bengtsson) female 24.0 0 2 PP 9549 16.7 G6 S
397 395 396 0 3 Johansson, Mr. Erik male 22.0 0 0 350052 7.7958 S
398 396 397 0 3 Olsson, Miss. Elina female 31.0 0 0 350407 7.8542 S
399 397 398 0 2 McKane, Mr. Peter David male 46.0 0 0 28403 26.0 S
400 398 399 0 2 Pain, Dr. Alfred male 23.0 0 0 244278 10.5 S
401 399 400 1 2 Trout, Mrs. William H (Jessie L) female 28.0 0 0 240929 12.65 S
402 400 401 1 3 Niskanen, Mr. Juha male 39.0 0 0 STON/O 2. 3101289 7.925 S
403 401 402 0 3 Adams, Mr. John male 26.0 0 0 341826 8.05 S
404 402 403 0 3 Jussila, Miss. Mari Aina female 21.0 1 0 4137 9.825 S
405 403 404 0 3 Hakkarainen, Mr. Pekka Pietari male 28.0 1 0 STON/O2. 3101279 15.85 S
406 404 405 0 3 Oreskovic, Miss. Marija female 20.0 0 0 315096 8.6625 S
407 405 406 0 2 Gale, Mr. Shadrach male 34.0 1 0 28664 21.0 S
408 406 407 0 3 Widegren, Mr. Carl/Charles Peter male 51.0 0 0 347064 7.75 S
409 407 408 1 2 Richards, Master. William Rowe male 3.0 1 1 29106 18.75 S
410 408 409 0 3 Birkeland, Mr. Hans Martin Monsen male 21.0 0 0 312992 7.775 S
411 409 410 0 3 Lefebre, Miss. Ida female 3 1 4133 25.4667 S
412 410 411 0 3 Sdycoff, Mr. Todor male 0 0 349222 7.8958 S
413 411 412 0 3 Hart, Mr. Henry male 0 0 394140 6.8583 Q
414 412 413 1 1 Minahan, Miss. Daisy E female 33.0 1 0 19928 90.0 C78 Q
415 413 414 0 2 Cunningham, Mr. Alfred Fleming male 0 0 239853 0.0 S
416 414 415 1 3 Sundman, Mr. Johan Julian male 44.0 0 0 STON/O 2. 3101269 7.925 S
417 415 416 0 3 Meek, Mrs. Thomas (Annie Louise Rowley) female 0 0 343095 8.05 S
418 416 417 1 2 Drew, Mrs. James Vivian (Lulu Thorne Christian) female 34.0 1 1 28220 32.5 S
419 417 418 1 2 Silven, Miss. Lyyli Karoliina female 18.0 0 2 250652 13.0 S
420 418 419 0 2 Matthews, Mr. William John male 30.0 0 0 28228 13.0 S
421 419 420 0 3 Van Impe, Miss. Catharina female 10.0 0 2 345773 24.15 S
422 420 421 0 3 Gheorgheff, Mr. Stanio male 0 0 349254 7.8958 C
423 421 422 0 3 Charters, Mr. David male 21.0 0 0 A/5. 13032 7.7333 Q
424 422 423 0 3 Zimmerman, Mr. Leo male 29.0 0 0 315082 7.875 S
425 423 424 0 3 Danbom, Mrs. Ernst Gilbert (Anna Sigrid Maria Brogren) female 28.0 1 1 347080 14.4 S
426 424 425 0 3 Rosblom, Mr. Viktor Richard male 18.0 1 1 370129 20.2125 S
427 425 426 0 3 Wiseman, Mr. Phillippe male 0 0 A/4. 34244 7.25 S
428 426 427 1 2 Clarke, Mrs. Charles V (Ada Maria Winfield) female 28.0 1 0 2003 26.0 S
429 427 428 1 2 Phillips, Miss. Kate Florence ("Mrs Kate Louise Phillips Marshall") female 19.0 0 0 250655 26.0 S
430 428 429 0 3 Flynn, Mr. James male 0 0 364851 7.75 Q
431 429 430 1 3 Pickard, Mr. Berk (Berk Trembisky) male 32.0 0 0 SOTON/O.Q. 392078 8.05 E10 S
432 430 431 1 1 Bjornstrom-Steffansson, Mr. Mauritz Hakan male 28.0 0 0 110564 26.55 C52 S
433 431 432 1 3 Thorneycroft, Mrs. Percival (Florence Kate White) female 1 0 376564 16.1 S
434 432 433 1 2 Louch, Mrs. Charles Alexander (Alice Adelaide Slow) female 42.0 1 0 SC/AH 3085 26.0 S
435 433 434 0 3 Kallio, Mr. Nikolai Erland male 17.0 0 0 STON/O 2. 3101274 7.125 S
436 434 435 0 1 Silvey, Mr. William Baird male 50.0 1 0 13507 55.9 E44 S
437 435 436 1 1 Carter, Miss. Lucile Polk female 14.0 1 2 113760 120.0 B96 B98 S
438 436 437 0 3 Ford, Miss. Doolina Margaret "Daisy" female 21.0 2 2 W./C. 6608 34.375 S
439 437 438 1 2 Richards, Mrs. Sidney (Emily Hocking) female 24.0 2 3 29106 18.75 S
440 438 439 0 1 Fortune, Mr. Mark male 64.0 1 4 19950 263.0 C23 C25 C27 S
441 439 440 0 2 Kvillner, Mr. Johan Henrik Johannesson male 31.0 0 0 C.A. 18723 10.5 S
442 440 441 1 2 Hart, Mrs. Benjamin (Esther Ada Bloomfield) female 45.0 1 1 F.C.C. 13529 26.25 S
443 441 442 0 3 Hampe, Mr. Leon male 20.0 0 0 345769 9.5 S
444 442 443 0 3 Petterson, Mr. Johan Emil male 25.0 1 0 347076 7.775 S
445 443 444 1 2 Reynaldo, Ms. Encarnacion female 28.0 0 0 230434 13.0 S
446 444 445 1 3 Johannesen-Bratthammer, Mr. Bernt male 0 0 65306 8.1125 S
447 445 446 1 1 Dodge, Master. Washington male 4.0 0 2 33638 81.8583 A34 S
448 446 447 1 2 Mellinger, Miss. Madeleine Violet female 13.0 0 1 250644 19.5 S
449 447 448 1 1 Seward, Mr. Frederic Kimber male 34.0 0 0 113794 26.55 S
450 448 449 1 3 Baclini, Miss. Marie Catherine female 5.0 2 1 2666 19.2583 C
451 449 450 1 1 Peuchen, Major. Arthur Godfrey male 52.0 0 0 113786 30.5 C104 S
452 450 451 0 2 West, Mr. Edwy Arthur male 36.0 1 2 C.A. 34651 27.75 S
453 451 452 0 3 Hagland, Mr. Ingvald Olai Olsen male 1 0 65303 19.9667 S
454 452 453 0 1 Foreman, Mr. Benjamin Laventall male 30.0 0 0 113051 27.75 C111 C
455 453 454 1 1 Goldenberg, Mr. Samuel L male 49.0 1 0 17453 89.1042 C92 C
456 454 455 0 3 Peduzzi, Mr. Joseph male 0 0 A/5 2817 8.05 S
457 455 456 1 3 Jalsevac, Mr. Ivan male 29.0 0 0 349240 7.8958 C
458 456 457 0 1 Millet, Mr. Francis Davis male 65.0 0 0 13509 26.55 E38 S
459 457 458 1 1 Kenyon, Mrs. Frederick R (Marion) female 1 0 17464 51.8625 D21 S
460 458 459 1 2 Toomey, Miss. Ellen female 50.0 0 0 F.C.C. 13531 10.5 S
461 459 460 0 3 O'Connor, Mr. Maurice male 0 0 371060 7.75 Q
462 460 461 1 1 Anderson, Mr. Harry male 48.0 0 0 19952 26.55 E12 S
463 461 462 0 3 Morley, Mr. William male 34.0 0 0 364506 8.05 S
464 462 463 0 1 Gee, Mr. Arthur H male 47.0 0 0 111320 38.5 E63 S
465 463 464 0 2 Milling, Mr. Jacob Christian male 48.0 0 0 234360 13.0 S
466 464 465 0 3 Maisner, Mr. Simon male 0 0 A/S 2816 8.05 S
467 465 466 0 3 Goncalves, Mr. Manuel Estanslas male 38.0 0 0 SOTON/O.Q. 3101306 7.05 S
468 466 467 0 2 Campbell, Mr. William male 0 0 239853 0.0 S
469 467 468 0 1 Smart, Mr. John Montgomery male 56.0 0 0 113792 26.55 S
470 468 469 0 3 Scanlan, Mr. James male 0 0 36209 7.725 Q
471 469 470 1 3 Baclini, Miss. Helene Barbara female 0.75 2 1 2666 19.2583 C
472 470 471 0 3 Keefe, Mr. Arthur male 0 0 323592 7.25 S
473 471 472 0 3 Cacic, Mr. Luka male 38.0 0 0 315089 8.6625 S
474 472 473 1 2 West, Mrs. Edwy Arthur (Ada Mary Worth) female 33.0 1 2 C.A. 34651 27.75 S
475 473 474 1 2 Jerwan, Mrs. Amin S (Marie Marthe Thuillard) female 23.0 0 0 SC/AH Basle 541 13.7917 D C
476 474 475 0 3 Strandberg, Miss. Ida Sofia female 22.0 0 0 7553 9.8375 S
477 475 476 0 1 Clifford, Mr. George Quincy male 0 0 110465 52.0 A14 S
478 476 477 0 2 Renouf, Mr. Peter Henry male 34.0 1 0 31027 21.0 S
479 477 478 0 3 Braund, Mr. Lewis Richard male 29.0 1 0 3460 7.0458 S
480 478 479 0 3 Karlsson, Mr. Nils August male 22.0 0 0 350060 7.5208 S
481 479 480 1 3 Hirvonen, Miss. Hildur E female 2.0 0 1 3101298 12.2875 S
482 480 481 0 3 Goodwin, Master. Harold Victor male 9.0 5 2 CA 2144 46.9 S
483 481 482 0 2 Frost, Mr. Anthony Wood "Archie" male 0 0 239854 0.0 S
484 482 483 0 3 Rouse, Mr. Richard Henry male 50.0 0 0 A/5 3594 8.05 S
485 483 484 1 3 Turkula, Mrs. (Hedwig) female 63.0 0 0 4134 9.5875 S
486 484 485 1 1 Bishop, Mr. Dickinson H male 25.0 1 0 11967 91.0792 B49 C
487 485 486 0 3 Lefebre, Miss. Jeannie female 3 1 4133 25.4667 S
488 486 487 1 1 Hoyt, Mrs. Frederick Maxfield (Jane Anne Forby) female 35.0 1 0 19943 90.0 C93 S
489 487 488 0 1 Kent, Mr. Edward Austin male 58.0 0 0 11771 29.7 B37 C
490 488 489 0 3 Somerton, Mr. Francis William male 30.0 0 0 A.5. 18509 8.05 S
491 489 490 1 3 Coutts, Master. Eden Leslie "Neville" male 9.0 1 1 C.A. 37671 15.9 S
492 490 491 0 3 Hagland, Mr. Konrad Mathias Reiersen male 1 0 65304 19.9667 S
493 491 492 0 3 Windelov, Mr. Einar male 21.0 0 0 SOTON/OQ 3101317 7.25 S
494 492 493 0 1 Molson, Mr. Harry Markland male 55.0 0 0 113787 30.5 C30 S
495 493 494 0 1 Artagaveytia, Mr. Ramon male 71.0 0 0 PC 17609 49.5042 C
496 494 495 0 3 Stanley, Mr. Edward Roland male 21.0 0 0 A/4 45380 8.05 S
497 495 496 0 3 Yousseff, Mr. Gerious male 0 0 2627 14.4583 C
498 496 497 1 1 Eustis, Miss. Elizabeth Mussey female 54.0 1 0 36947 78.2667 D20 C
499 497 498 0 3 Shellard, Mr. Frederick William male 0 0 C.A. 6212 15.1 S
500 498 499 0 1 Allison, Mrs. Hudson J C (Bessie Waldo Daniels) female 25.0 1 2 113781 151.55 C22 C26 S
501 499 500 0 3 Svensson, Mr. Olof male 24.0 0 0 350035 7.7958 S
502 500 501 0 3 Calic, Mr. Petar male 17.0 0 0 315086 8.6625 S
503 501 502 0 3 Canavan, Miss. Mary female 21.0 0 0 364846 7.75 Q
504 502 503 0 3 O'Sullivan, Miss. Bridget Mary female 0 0 330909 7.6292 Q
505 503 504 0 3 Laitinen, Miss. Kristina Sofia female 37.0 0 0 4135 9.5875 S
506 504 505 1 1 Maioni, Miss. Roberta female 16.0 0 0 110152 86.5 B79 S
507 505 506 0 1 Penasco y Castellana, Mr. Victor de Satode male 18.0 1 0 PC 17758 108.9 C65 C
508 506 507 1 2 Quick, Mrs. Frederick Charles (Jane Richards) female 33.0 0 2 26360 26.0 S
509 507 508 1 1 Bradley, Mr. George ("George Arthur Brayton") male 0 0 111427 26.55 S
510 508 509 0 3 Olsen, Mr. Henry Margido male 28.0 0 0 C 4001 22.525 S
511 509 510 1 3 Lang, Mr. Fang male 26.0 0 0 1601 56.4958 S
512 510 511 1 3 Daly, Mr. Eugene Patrick male 29.0 0 0 382651 7.75 Q
513 511 512 0 3 Webber, Mr. James male 0 0 SOTON/OQ 3101316 8.05 S
514 512 513 1 1 McGough, Mr. James Robert male 36.0 0 0 PC 17473 26.2875 E25 S
515 513 514 1 1 Rothschild, Mrs. Martin (Elizabeth L. Barrett) female 54.0 1 0 PC 17603 59.4 C
516 514 515 0 3 Coleff, Mr. Satio male 24.0 0 0 349209 7.4958 S
517 515 516 0 1 Walker, Mr. William Anderson male 47.0 0 0 36967 34.0208 D46 S
518 516 517 1 2 Lemore, Mrs. (Amelia Milley) female 34.0 0 0 C.A. 34260 10.5 F33 S
519 517 518 0 3 Ryan, Mr. Patrick male 0 0 371110 24.15 Q
520 518 519 1 2 Angle, Mrs. William A (Florence "Mary" Agnes Hughes) female 36.0 1 0 226875 26.0 S
521 519 520 0 3 Pavlovic, Mr. Stefo male 32.0 0 0 349242 7.8958 S
522 520 521 1 1 Perreault, Miss. Anne female 30.0 0 0 12749 93.5 B73 S
523 521 522 0 3 Vovk, Mr. Janko male 22.0 0 0 349252 7.8958 S
524 522 523 0 3 Lahoud, Mr. Sarkis male 0 0 2624 7.225 C
525 523 524 1 1 Hippach, Mrs. Louis Albert (Ida Sophia Fischer) female 44.0 0 1 111361 57.9792 B18 C
526 524 525 0 3 Kassem, Mr. Fared male 0 0 2700 7.2292 C
527 525 526 0 3 Farrell, Mr. James male 40.5 0 0 367232 7.75 Q
528 526 527 1 2 Ridsdale, Miss. Lucy female 50.0 0 0 W./C. 14258 10.5 S
529 527 528 0 1 Farthing, Mr. John male 0 0 PC 17483 221.7792 C95 S
530 528 529 0 3 Salonen, Mr. Johan Werner male 39.0 0 0 3101296 7.925 S
531 529 530 0 2 Hocking, Mr. Richard George male 23.0 2 1 29104 11.5 S
532 530 531 1 2 Quick, Miss. Phyllis May female 2.0 1 1 26360 26.0 S
533 531 532 0 3 Toufik, Mr. Nakli male 0 0 2641 7.2292 C
534 532 533 0 3 Elias, Mr. Joseph Jr male 17.0 1 1 2690 7.2292 C
535 533 534 1 3 Peter, Mrs. Catherine (Catherine Rizk) female 0 2 2668 22.3583 C
536 534 535 0 3 Cacic, Miss. Marija female 30.0 0 0 315084 8.6625 S
537 535 536 1 2 Hart, Miss. Eva Miriam female 7.0 0 2 F.C.C. 13529 26.25 S
538 536 537 0 1 Butt, Major. Archibald Willingham male 45.0 0 0 113050 26.55 B38 S
539 537 538 1 1 LeRoy, Miss. Bertha female 30.0 0 0 PC 17761 106.425 C
540 538 539 0 3 Risien, Mr. Samuel Beard male 0 0 364498 14.5 S
541 539 540 1 1 Frolicher, Miss. Hedwig Margaritha female 22.0 0 2 13568 49.5 B39 C
542 540 541 1 1 Crosby, Miss. Harriet R female 36.0 0 2 WE/P 5735 71.0 B22 S
543 541 542 0 3 Andersson, Miss. Ingeborg Constanzia female 9.0 4 2 347082 31.275 S
544 542 543 0 3 Andersson, Miss. Sigrid Elisabeth female 11.0 4 2 347082 31.275 S
545 543 544 1 2 Beane, Mr. Edward male 32.0 1 0 2908 26.0 S
546 544 545 0 1 Douglas, Mr. Walter Donald male 50.0 1 0 PC 17761 106.425 C86 C
547 545 546 0 1 Nicholson, Mr. Arthur Ernest male 64.0 0 0 693 26.0 S
548 546 547 1 2 Beane, Mrs. Edward (Ethel Clarke) female 19.0 1 0 2908 26.0 S
549 547 548 1 2 Padro y Manent, Mr. Julian male 0 0 SC/PARIS 2146 13.8625 C
550 548 549 0 3 Goldsmith, Mr. Frank John male 33.0 1 1 363291 20.525 S
551 549 550 1 2 Davies, Master. John Morgan Jr male 8.0 1 1 C.A. 33112 36.75 S
552 550 551 1 1 Thayer, Mr. John Borland Jr male 17.0 0 2 17421 110.8833 C70 C
553 551 552 0 2 Sharp, Mr. Percival James R male 27.0 0 0 244358 26.0 S
554 552 553 0 3 O'Brien, Mr. Timothy male 0 0 330979 7.8292 Q
555 553 554 1 3 Leeni, Mr. Fahim ("Philip Zenni") male 22.0 0 0 2620 7.225 C
556 554 555 1 3 Ohman, Miss. Velin female 22.0 0 0 347085 7.775 S
557 555 556 0 1 Wright, Mr. George male 62.0 0 0 113807 26.55 S
558 556 557 1 1 Duff Gordon, Lady. (Lucille Christiana Sutherland) ("Mrs Morgan") female 48.0 1 0 11755 39.6 A16 C
559 557 558 0 1 Robbins, Mr. Victor male 0 0 PC 17757 227.525 C
560 558 559 1 1 Taussig, Mrs. Emil (Tillie Mandelbaum) female 39.0 1 1 110413 79.65 E67 S
561 559 560 1 3 de Messemaeker, Mrs. Guillaume Joseph (Emma) female 36.0 1 0 345572 17.4 S
562 560 561 0 3 Morrow, Mr. Thomas Rowan male 0 0 372622 7.75 Q
563 561 562 0 3 Sivic, Mr. Husein male 40.0 0 0 349251 7.8958 S
564 562 563 0 2 Norman, Mr. Robert Douglas male 28.0 0 0 218629 13.5 S
565 563 564 0 3 Simmons, Mr. John male 0 0 SOTON/OQ 392082 8.05 S
566 564 565 0 3 Meanwell, Miss. (Marion Ogden) female 0 0 SOTON/O.Q. 392087 8.05 S
567 565 566 0 3 Davies, Mr. Alfred J male 24.0 2 0 A/4 48871 24.15 S
568 566 567 0 3 Stoytcheff, Mr. Ilia male 19.0 0 0 349205 7.8958 S
569 567 568 0 3 Palsson, Mrs. Nils (Alma Cornelia Berglund) female 29.0 0 4 349909 21.075 S
570 568 569 0 3 Doharr, Mr. Tannous male 0 0 2686 7.2292 C
571 569 570 1 3 Jonsson, Mr. Carl male 32.0 0 0 350417 7.8542 S
572 570 571 1 2 Harris, Mr. George male 62.0 0 0 S.W./PP 752 10.5 S
573 571 572 1 1 Appleton, Mrs. Edward Dale (Charlotte Lamson) female 53.0 2 0 11769 51.4792 C101 S
574 572 573 1 1 Flynn, Mr. John Irwin ("Irving") male 36.0 0 0 PC 17474 26.3875 E25 S
575 573 574 1 3 Kelly, Miss. Mary female 0 0 14312 7.75 Q
576 574 575 0 3 Rush, Mr. Alfred George John male 16.0 0 0 A/4. 20589 8.05 S
577 575 576 0 3 Patchett, Mr. George male 19.0 0 0 358585 14.5 S
578 576 577 1 2 Garside, Miss. Ethel female 34.0 0 0 243880 13.0 S
579 577 578 1 1 Silvey, Mrs. William Baird (Alice Munger) female 39.0 1 0 13507 55.9 E44 S
580 578 579 0 3 Caram, Mrs. Joseph (Maria Elias) female 1 0 2689 14.4583 C
581 579 580 1 3 Jussila, Mr. Eiriik male 32.0 0 0 STON/O 2. 3101286 7.925 S
582 580 581 1 2 Christy, Miss. Julie Rachel female 25.0 1 1 237789 30.0 S
583 581 582 1 1 Thayer, Mrs. John Borland (Marian Longstreth Morris) female 39.0 1 1 17421 110.8833 C68 C
584 582 583 0 2 Downton, Mr. William James male 54.0 0 0 28403 26.0 S
585 583 584 0 1 Ross, Mr. John Hugo male 36.0 0 0 13049 40.125 A10 C
586 584 585 0 3 Paulner, Mr. Uscher male 0 0 3411 8.7125 C
587 585 586 1 1 Taussig, Miss. Ruth female 18.0 0 2 110413 79.65 E68 S
588 586 587 0 2 Jarvis, Mr. John Denzil male 47.0 0 0 237565 15.0 S
589 587 588 1 1 Frolicher-Stehli, Mr. Maxmillian male 60.0 1 1 13567 79.2 B41 C
590 588 589 0 3 Gilinski, Mr. Eliezer male 22.0 0 0 14973 8.05 S
591 589 590 0 3 Murdlin, Mr. Joseph male 0 0 A./5. 3235 8.05 S
592 590 591 0 3 Rintamaki, Mr. Matti male 35.0 0 0 STON/O 2. 3101273 7.125 S
593 591 592 1 1 Stephenson, Mrs. Walter Bertram (Martha Eustis) female 52.0 1 0 36947 78.2667 D20 C
594 592 593 0 3 Elsbury, Mr. William James male 47.0 0 0 A/5 3902 7.25 S
595 593 594 0 3 Bourke, Miss. Mary female 0 2 364848 7.75 Q
596 594 595 0 2 Chapman, Mr. John Henry male 37.0 1 0 SC/AH 29037 26.0 S
597 595 596 0 3 Van Impe, Mr. Jean Baptiste male 36.0 1 1 345773 24.15 S
598 596 597 1 2 Leitch, Miss. Jessie Wills female 0 0 248727 33.0 S
599 597 598 0 3 Johnson, Mr. Alfred male 49.0 0 0 LINE 0.0 S
600 598 599 0 3 Boulos, Mr. Hanna male 0 0 2664 7.225 C
601 599 600 1 1 Duff Gordon, Sir. Cosmo Edmund ("Mr Morgan") male 49.0 1 0 PC 17485 56.9292 A20 C
602 600 601 1 2 Jacobsohn, Mrs. Sidney Samuel (Amy Frances Christy) female 24.0 2 1 243847 27.0 S
603 601 602 0 3 Slabenoff, Mr. Petco male 0 0 349214 7.8958 S
604 602 603 0 1 Harrington, Mr. Charles H male 0 0 113796 42.4 S
605 603 604 0 3 Torber, Mr. Ernst William male 44.0 0 0 364511 8.05 S
606 604 605 1 1 Homer, Mr. Harry ("Mr E Haven") male 35.0 0 0 111426 26.55 C
607 605 606 0 3 Lindell, Mr. Edvard Bengtsson male 36.0 1 0 349910 15.55 S
608 606 607 0 3 Karaic, Mr. Milan male 30.0 0 0 349246 7.8958 S
609 607 608 1 1 Daniel, Mr. Robert Williams male 27.0 0 0 113804 30.5 S
610 608 609 1 2 Laroche, Mrs. Joseph (Juliette Marie Louise Lafargue) female 22.0 1 2 SC/Paris 2123 41.5792 C
611 609 610 1 1 Shutes, Miss. Elizabeth W female 40.0 0 0 PC 17582 153.4625 C125 S
612 610 611 0 3 Andersson, Mrs. Anders Johan (Alfrida Konstantia Brogren) female 39.0 1 5 347082 31.275 S
613 611 612 0 3 Jardin, Mr. Jose Neto male 0 0 SOTON/O.Q. 3101305 7.05 S
614 612 613 1 3 Murphy, Miss. Margaret Jane female 1 0 367230 15.5 Q
615 613 614 0 3 Horgan, Mr. John male 0 0 370377 7.75 Q
616 614 615 0 3 Brocklebank, Mr. William Alfred male 35.0 0 0 364512 8.05 S
617 615 616 1 2 Herman, Miss. Alice female 24.0 1 2 220845 65.0 S
618 616 617 0 3 Danbom, Mr. Ernst Gilbert male 34.0 1 1 347080 14.4 S
619 617 618 0 3 Lobb, Mrs. William Arthur (Cordelia K Stanlick) female 26.0 1 0 A/5. 3336 16.1 S
620 618 619 1 2 Becker, Miss. Marion Louise female 4.0 2 1 230136 39.0 F4 S
621 619 620 0 2 Gavey, Mr. Lawrence male 26.0 0 0 31028 10.5 S
622 620 621 0 3 Yasbeck, Mr. Antoni male 27.0 1 0 2659 14.4542 C
623 621 622 1 1 Kimball, Mr. Edwin Nelson Jr male 42.0 1 0 11753 52.5542 D19 S
624 622 623 1 3 Nakid, Mr. Sahid male 20.0 1 1 2653 15.7417 C
625 623 624 0 3 Hansen, Mr. Henry Damsgaard male 21.0 0 0 350029 7.8542 S
626 624 625 0 3 Bowen, Mr. David John "Dai" male 21.0 0 0 54636 16.1 S
627 625 626 0 1 Sutton, Mr. Frederick male 61.0 0 0 36963 32.3208 D50 S
628 626 627 0 2 Kirkland, Rev. Charles Leonard male 57.0 0 0 219533 12.35 Q
629 627 628 1 1 Longley, Miss. Gretchen Fiske female 21.0 0 0 13502 77.9583 D9 S
630 628 629 0 3 Bostandyeff, Mr. Guentcho male 26.0 0 0 349224 7.8958 S
631 629 630 0 3 O'Connell, Mr. Patrick D male 0 0 334912 7.7333 Q
632 630 631 1 1 Barkworth, Mr. Algernon Henry Wilson male 80.0 0 0 27042 30.0 A23 S
633 631 632 0 3 Lundahl, Mr. Johan Svensson male 51.0 0 0 347743 7.0542 S
634 632 633 1 1 Stahelin-Maeglin, Dr. Max male 32.0 0 0 13214 30.5 B50 C
635 633 634 0 1 Parr, Mr. William Henry Marsh male 0 0 112052 0.0 S
636 634 635 0 3 Skoog, Miss. Mabel female 9.0 3 2 347088 27.9 S
637 635 636 1 2 Davis, Miss. Mary female 28.0 0 0 237668 13.0 S
638 636 637 0 3 Leinonen, Mr. Antti Gustaf male 32.0 0 0 STON/O 2. 3101292 7.925 S
639 637 638 0 2 Collyer, Mr. Harvey male 31.0 1 1 C.A. 31921 26.25 S
640 638 639 0 3 Panula, Mrs. Juha (Maria Emilia Ojala) female 41.0 0 5 3101295 39.6875 S
641 639 640 0 3 Thorneycroft, Mr. Percival male 1 0 376564 16.1 S
642 640 641 0 3 Jensen, Mr. Hans Peder male 20.0 0 0 350050 7.8542 S
643 641 642 1 1 Sagesser, Mlle. Emma female 24.0 0 0 PC 17477 69.3 B35 C
644 642 643 0 3 Skoog, Miss. Margit Elizabeth female 2.0 3 2 347088 27.9 S
645 643 644 1 3 Foo, Mr. Choong male 0 0 1601 56.4958 S
646 644 645 1 3 Baclini, Miss. Eugenie female 0.75 2 1 2666 19.2583 C
647 645 646 1 1 Harper, Mr. Henry Sleeper male 48.0 1 0 PC 17572 76.7292 D33 C
648 646 647 0 3 Cor, Mr. Liudevit male 19.0 0 0 349231 7.8958 S
649 647 648 1 1 Simonius-Blumer, Col. Oberst Alfons male 56.0 0 0 13213 35.5 A26 C
650 648 649 0 3 Willey, Mr. Edward male 0 0 S.O./P.P. 751 7.55 S
651 649 650 1 3 Stanley, Miss. Amy Zillah Elsie female 23.0 0 0 CA. 2314 7.55 S
652 650 651 0 3 Mitkoff, Mr. Mito male 0 0 349221 7.8958 S
653 651 652 1 2 Doling, Miss. Elsie female 18.0 0 1 231919 23.0 S
654 652 653 0 3 Kalvik, Mr. Johannes Halvorsen male 21.0 0 0 8475 8.4333 S
655 653 654 1 3 O'Leary, Miss. Hanora "Norah" female 0 0 330919 7.8292 Q
656 654 655 0 3 Hegarty, Miss. Hanora "Nora" female 18.0 0 0 365226 6.75 Q
657 655 656 0 2 Hickman, Mr. Leonard Mark male 24.0 2 0 S.O.C. 14879 73.5 S
658 656 657 0 3 Radeff, Mr. Alexander male 0 0 349223 7.8958 S
659 657 658 0 3 Bourke, Mrs. John (Catherine) female 32.0 1 1 364849 15.5 Q
660 658 659 0 2 Eitemiller, Mr. George Floyd male 23.0 0 0 29751 13.0 S
661 659 660 0 1 Newell, Mr. Arthur Webster male 58.0 0 2 35273 113.275 D48 C
662 660 661 1 1 Frauenthal, Dr. Henry William male 50.0 2 0 PC 17611 133.65 S
663 661 662 0 3 Badt, Mr. Mohamed male 40.0 0 0 2623 7.225 C
664 662 663 0 1 Colley, Mr. Edward Pomeroy male 47.0 0 0 5727 25.5875 E58 S
665 663 664 0 3 Coleff, Mr. Peju male 36.0 0 0 349210 7.4958 S
666 664 665 1 3 Lindqvist, Mr. Eino William male 20.0 1 0 STON/O 2. 3101285 7.925 S
667 665 666 0 2 Hickman, Mr. Lewis male 32.0 2 0 S.O.C. 14879 73.5 S
668 666 667 0 2 Butler, Mr. Reginald Fenton male 25.0 0 0 234686 13.0 S
669 667 668 0 3 Rommetvedt, Mr. Knud Paust male 0 0 312993 7.775 S
670 668 669 0 3 Cook, Mr. Jacob male 43.0 0 0 A/5 3536 8.05 S
671 669 670 1 1 Taylor, Mrs. Elmer Zebley (Juliet Cummins Wright) female 1 0 19996 52.0 C126 S
672 670 671 1 2 Brown, Mrs. Thomas William Solomon (Elizabeth Catherine Ford) female 40.0 1 1 29750 39.0 S
673 671 672 0 1 Davidson, Mr. Thornton male 31.0 1 0 F.C. 12750 52.0 B71 S
674 672 673 0 2 Mitchell, Mr. Henry Michael male 70.0 0 0 C.A. 24580 10.5 S
675 673 674 1 2 Wilhelms, Mr. Charles male 31.0 0 0 244270 13.0 S
676 674 675 0 2 Watson, Mr. Ennis Hastings male 0 0 239856 0.0 S
677 675 676 0 3 Edvardsson, Mr. Gustaf Hjalmar male 18.0 0 0 349912 7.775 S
678 676 677 0 3 Sawyer, Mr. Frederick Charles male 24.5 0 0 342826 8.05 S
679 677 678 1 3 Turja, Miss. Anna Sofia female 18.0 0 0 4138 9.8417 S
680 678 679 0 3 Goodwin, Mrs. Frederick (Augusta Tyler) female 43.0 1 6 CA 2144 46.9 S
681 679 680 1 1 Cardeza, Mr. Thomas Drake Martinez male 36.0 0 1 PC 17755 512.3292 B51 B53 B55 C
682 680 681 0 3 Peters, Miss. Katie female 0 0 330935 8.1375 Q
683 681 682 1 1 Hassab, Mr. Hammad male 27.0 0 0 PC 17572 76.7292 D49 C
684 682 683 0 3 Olsvigen, Mr. Thor Anderson male 20.0 0 0 6563 9.225 S
685 683 684 0 3 Goodwin, Mr. Charles Edward male 14.0 5 2 CA 2144 46.9 S
686 684 685 0 2 Brown, Mr. Thomas William Solomon male 60.0 1 1 29750 39.0 S
687 685 686 0 2 Laroche, Mr. Joseph Philippe Lemercier male 25.0 1 2 SC/Paris 2123 41.5792 C
688 686 687 0 3 Panula, Mr. Jaako Arnold male 14.0 4 1 3101295 39.6875 S
689 687 688 0 3 Dakic, Mr. Branko male 19.0 0 0 349228 10.1708 S
690 688 689 0 3 Fischer, Mr. Eberhard Thelander male 18.0 0 0 350036 7.7958 S
691 689 690 1 1 Madill, Miss. Georgette Alexandra female 15.0 0 1 24160 211.3375 B5 S
692 690 691 1 1 Dick, Mr. Albert Adrian male 31.0 1 0 17474 57.0 B20 S
693 691 692 1 3 Karun, Miss. Manca female 4.0 0 1 349256 13.4167 C
694 692 693 1 3 Lam, Mr. Ali male 0 0 1601 56.4958 S
695 693 694 0 3 Saad, Mr. Khalil male 25.0 0 0 2672 7.225 C
696 694 695 0 1 Weir, Col. John male 60.0 0 0 113800 26.55 S
697 695 696 0 2 Chapman, Mr. Charles Henry male 52.0 0 0 248731 13.5 S
698 696 697 0 3 Kelly, Mr. James male 44.0 0 0 363592 8.05 S
699 697 698 1 3 Mullens, Miss. Katherine "Katie" female 0 0 35852 7.7333 Q
700 698 699 0 1 Thayer, Mr. John Borland male 49.0 1 1 17421 110.8833 C68 C
701 699 700 0 3 Humblen, Mr. Adolf Mathias Nicolai Olsen male 42.0 0 0 348121 7.65 F G63 S
702 700 701 1 1 Astor, Mrs. John Jacob (Madeleine Talmadge Force) female 18.0 1 0 PC 17757 227.525 C62 C64 C
703 701 702 1 1 Silverthorne, Mr. Spencer Victor male 35.0 0 0 PC 17475 26.2875 E24 S
704 702 703 0 3 Barbara, Miss. Saiide female 18.0 0 1 2691 14.4542 C
705 703 704 0 3 Gallagher, Mr. Martin male 25.0 0 0 36864 7.7417 Q
706 704 705 0 3 Hansen, Mr. Henrik Juul male 26.0 1 0 350025 7.8542 S
707 705 706 0 2 Morley, Mr. Henry Samuel ("Mr Henry Marshall") male 39.0 0 0 250655 26.0 S
708 706 707 1 2 Kelly, Mrs. Florence "Fannie" female 45.0 0 0 223596 13.5 S
709 707 708 1 1 Calderhead, Mr. Edward Pennington male 42.0 0 0 PC 17476 26.2875 E24 S
710 708 709 1 1 Cleaver, Miss. Alice female 22.0 0 0 113781 151.55 S
711 709 710 1 3 Moubarek, Master. Halim Gonios ("William George") male 1 1 2661 15.2458 C
712 710 711 1 1 Mayne, Mlle. Berthe Antonine ("Mrs de Villiers") female 24.0 0 0 PC 17482 49.5042 C90 C
713 711 712 0 1 Klaber, Mr. Herman male 0 0 113028 26.55 C124 S
714 712 713 1 1 Taylor, Mr. Elmer Zebley male 48.0 1 0 19996 52.0 C126 S
715 713 714 0 3 Larsson, Mr. August Viktor male 29.0 0 0 7545 9.4833 S
716 714 715 0 2 Greenberg, Mr. Samuel male 52.0 0 0 250647 13.0 S
717 715 716 0 3 Soholt, Mr. Peter Andreas Lauritz Andersen male 19.0 0 0 348124 7.65 F G73 S
718 716 717 1 1 Endres, Miss. Caroline Louise female 38.0 0 0 PC 17757 227.525 C45 C
719 717 718 1 2 Troutt, Miss. Edwina Celia "Winnie" female 27.0 0 0 34218 10.5 E101 S
720 718 719 0 3 McEvoy, Mr. Michael male 0 0 36568 15.5 Q
721 719 720 0 3 Johnson, Mr. Malkolm Joackim male 33.0 0 0 347062 7.775 S
722 720 721 1 2 Harper, Miss. Annie Jessie "Nina" female 6.0 0 1 248727 33.0 S
723 721 722 0 3 Jensen, Mr. Svend Lauritz male 17.0 1 0 350048 7.0542 S
724 722 723 0 2 Gillespie, Mr. William Henry male 34.0 0 0 12233 13.0 S
725 723 724 0 2 Hodges, Mr. Henry Price male 50.0 0 0 250643 13.0 S
726 724 725 1 1 Chambers, Mr. Norman Campbell male 27.0 1 0 113806 53.1 E8 S
727 725 726 0 3 Oreskovic, Mr. Luka male 20.0 0 0 315094 8.6625 S
728 726 727 1 2 Renouf, Mrs. Peter Henry (Lillian Jefferys) female 30.0 3 0 31027 21.0 S
729 727 728 1 3 Mannion, Miss. Margareth female 0 0 36866 7.7375 Q
730 728 729 0 2 Bryhl, Mr. Kurt Arnold Gottfrid male 25.0 1 0 236853 26.0 S
731 729 730 0 3 Ilmakangas, Miss. Pieta Sofia female 25.0 1 0 STON/O2. 3101271 7.925 S
732 730 731 1 1 Allen, Miss. Elisabeth Walton female 29.0 0 0 24160 211.3375 B5 S
733 731 732 0 3 Hassan, Mr. Houssein G N male 11.0 0 0 2699 18.7875 C
734 732 733 0 2 Knight, Mr. Robert J male 0 0 239855 0.0 S
735 733 734 0 2 Berriman, Mr. William John male 23.0 0 0 28425 13.0 S
736 734 735 0 2 Troupiansky, Mr. Moses Aaron male 23.0 0 0 233639 13.0 S
737 735 736 0 3 Williams, Mr. Leslie male 28.5 0 0 54636 16.1 S
738 736 737 0 3 Ford, Mrs. Edward (Margaret Ann Watson) female 48.0 1 3 W./C. 6608 34.375 S
739 737 738 1 1 Lesurer, Mr. Gustave J male 35.0 0 0 PC 17755 512.3292 B101 C
740 738 739 0 3 Ivanoff, Mr. Kanio male 0 0 349201 7.8958 S
741 739 740 0 3 Nankoff, Mr. Minko male 0 0 349218 7.8958 S
742 740 741 1 1 Hawksford, Mr. Walter James male 0 0 16988 30.0 D45 S
743 741 742 0 1 Cavendish, Mr. Tyrell William male 36.0 1 0 19877 78.85 C46 S
744 742 743 1 1 Ryerson, Miss. Susan Parker "Suzette" female 21.0 2 2 PC 17608 262.375 B57 B59 B63 B66 C
745 743 744 0 3 McNamee, Mr. Neal male 24.0 1 0 376566 16.1 S
746 744 745 1 3 Stranden, Mr. Juho male 31.0 0 0 STON/O 2. 3101288 7.925 S
747 745 746 0 1 Crosby, Capt. Edward Gifford male 70.0 1 1 WE/P 5735 71.0 B22 S
748 746 747 0 3 Abbott, Mr. Rossmore Edward male 16.0 1 1 C.A. 2673 20.25 S
749 747 748 1 2 Sinkkonen, Miss. Anna female 30.0 0 0 250648 13.0 S
750 748 749 0 1 Marvin, Mr. Daniel Warner male 19.0 1 0 113773 53.1 D30 S
751 749 750 0 3 Connaghton, Mr. Michael male 31.0 0 0 335097 7.75 Q
752 750 751 1 2 Wells, Miss. Joan female 4.0 1 1 29103 23.0 S
753 751 752 1 3 Moor, Master. Meier male 6.0 0 1 392096 12.475 E121 S
754 752 753 0 3 Vande Velde, Mr. Johannes Joseph male 33.0 0 0 345780 9.5 S
755 753 754 0 3 Jonkoff, Mr. Lalio male 23.0 0 0 349204 7.8958 S
756 754 755 1 2 Herman, Mrs. Samuel (Jane Laver) female 48.0 1 2 220845 65.0 S
757 755 756 1 2 Hamalainen, Master. Viljo male 0.67 1 1 250649 14.5 S
758 756 757 0 3 Carlsson, Mr. August Sigfrid male 28.0 0 0 350042 7.7958 S
759 757 758 0 2 Bailey, Mr. Percy Andrew male 18.0 0 0 29108 11.5 S
760 758 759 0 3 Theobald, Mr. Thomas Leonard male 34.0 0 0 363294 8.05 S
761 759 760 1 1 Rothes, the Countess. of (Lucy Noel Martha Dyer-Edwards) female 33.0 0 0 110152 86.5 B77 S
762 760 761 0 3 Garfirth, Mr. John male 0 0 358585 14.5 S
763 761 762 0 3 Nirva, Mr. Iisakki Antino Aijo male 41.0 0 0 SOTON/O2 3101272 7.125 S
764 762 763 1 3 Barah, Mr. Hanna Assi male 20.0 0 0 2663 7.2292 C
765 763 764 1 1 Carter, Mrs. William Ernest (Lucile Polk) female 36.0 1 2 113760 120.0 B96 B98 S
766 764 765 0 3 Eklund, Mr. Hans Linus male 16.0 0 0 347074 7.775 S
767 765 766 1 1 Hogeboom, Mrs. John C (Anna Andrews) female 51.0 1 0 13502 77.9583 D11 S
768 766 767 0 1 Brewe, Dr. Arthur Jackson male 0 0 112379 39.6 C
769 767 768 0 3 Mangan, Miss. Mary female 30.5 0 0 364850 7.75 Q
770 768 769 0 3 Moran, Mr. Daniel J male 1 0 371110 24.15 Q
771 769 770 0 3 Gronnestad, Mr. Daniel Danielsen male 32.0 0 0 8471 8.3625 S
772 770 771 0 3 Lievens, Mr. Rene Aime male 24.0 0 0 345781 9.5 S
773 771 772 0 3 Jensen, Mr. Niels Peder male 48.0 0 0 350047 7.8542 S
774 772 773 0 2 Mack, Mrs. (Mary) female 57.0 0 0 S.O./P.P. 3 10.5 E77 S
775 773 774 0 3 Elias, Mr. Dibo male 0 0 2674 7.225 C
776 774 775 1 2 Hocking, Mrs. Elizabeth (Eliza Needs) female 54.0 1 3 29105 23.0 S
777 775 776 0 3 Myhrman, Mr. Pehr Fabian Oliver Malkolm male 18.0 0 0 347078 7.75 S
778 776 777 0 3 Tobin, Mr. Roger male 0 0 383121 7.75 F38 Q
779 777 778 1 3 Emanuel, Miss. Virginia Ethel female 5.0 0 0 364516 12.475 S
780 778 779 0 3 Kilgannon, Mr. Thomas J male 0 0 36865 7.7375 Q
781 779 780 1 1 Robert, Mrs. Edward Scott (Elisabeth Walton McMillan) female 43.0 0 1 24160 211.3375 B3 S
782 780 781 1 3 Ayoub, Miss. Banoura female 13.0 0 0 2687 7.2292 C
783 781 782 1 1 Dick, Mrs. Albert Adrian (Vera Gillespie) female 17.0 1 0 17474 57.0 B20 S
784 782 783 0 1 Long, Mr. Milton Clyde male 29.0 0 0 113501 30.0 D6 S
785 783 784 0 3 Johnston, Mr. Andrew G male 1 2 W./C. 6607 23.45 S
786 784 785 0 3 Ali, Mr. William male 25.0 0 0 SOTON/O.Q. 3101312 7.05 S
787 785 786 0 3 Harmer, Mr. Abraham (David Lishin) male 25.0 0 0 374887 7.25 S
788 786 787 1 3 Sjoblom, Miss. Anna Sofia female 18.0 0 0 3101265 7.4958 S
789 787 788 0 3 Rice, Master. George Hugh male 8.0 4 1 382652 29.125 Q
790 788 789 1 3 Dean, Master. Bertram Vere male 1.0 1 2 C.A. 2315 20.575 S
791 789 790 0 1 Guggenheim, Mr. Benjamin male 46.0 0 0 PC 17593 79.2 B82 B84 C
792 790 791 0 3 Keane, Mr. Andrew "Andy" male 0 0 12460 7.75 Q
793 791 792 0 2 Gaskell, Mr. Alfred male 16.0 0 0 239865 26.0 S
794 792 793 0 3 Sage, Miss. Stella Anna female 8 2 CA. 2343 69.55 S
795 793 794 0 1 Hoyt, Mr. William Fisher male 0 0 PC 17600 30.6958 C
796 794 795 0 3 Dantcheff, Mr. Ristiu male 25.0 0 0 349203 7.8958 S
797 795 796 0 2 Otter, Mr. Richard male 39.0 0 0 28213 13.0 S
798 796 797 1 1 Leader, Dr. Alice (Farnham) female 49.0 0 0 17465 25.9292 D17 S
799 797 798 1 3 Osman, Mrs. Mara female 31.0 0 0 349244 8.6833 S
800 798 799 0 3 Ibrahim Shawah, Mr. Yousseff male 30.0 0 0 2685 7.2292 C
801 799 800 0 3 Van Impe, Mrs. Jean Baptiste (Rosalie Paula Govaert) female 30.0 1 1 345773 24.15 S
802 800 801 0 2 Ponesell, Mr. Martin male 34.0 0 0 250647 13.0 S
803 801 802 1 2 Collyer, Mrs. Harvey (Charlotte Annie Tate) female 31.0 1 1 C.A. 31921 26.25 S
804 802 803 1 1 Carter, Master. William Thornton II male 11.0 1 2 113760 120.0 B96 B98 S
805 803 804 1 3 Thomas, Master. Assad Alexander male 0.42 0 1 2625 8.5167 C
806 804 805 1 3 Hedman, Mr. Oskar Arvid male 27.0 0 0 347089 6.975 S
807 805 806 0 3 Johansson, Mr. Karl Johan male 31.0 0 0 347063 7.775 S
808 806 807 0 1 Andrews, Mr. Thomas Jr male 39.0 0 0 112050 0.0 A36 S
809 807 808 0 3 Pettersson, Miss. Ellen Natalia female 18.0 0 0 347087 7.775 S
810 808 809 0 2 Meyer, Mr. August male 39.0 0 0 248723 13.0 S
811 809 810 1 1 Chambers, Mrs. Norman Campbell (Bertha Griggs) female 33.0 1 0 113806 53.1 E8 S
812 810 811 0 3 Alexander, Mr. William male 26.0 0 0 3474 7.8875 S
813 811 812 0 3 Lester, Mr. James male 39.0 0 0 A/4 48871 24.15 S
814 812 813 0 2 Slemen, Mr. Richard James male 35.0 0 0 28206 10.5 S
815 813 814 0 3 Andersson, Miss. Ebba Iris Alfrida female 6.0 4 2 347082 31.275 S
816 814 815 0 3 Tomlin, Mr. Ernest Portage male 30.5 0 0 364499 8.05 S
817 815 816 0 1 Fry, Mr. Richard male 0 0 112058 0.0 B102 S
818 816 817 0 3 Heininen, Miss. Wendla Maria female 23.0 0 0 STON/O2. 3101290 7.925 S
819 817 818 0 2 Mallet, Mr. Albert male 31.0 1 1 S.C./PARIS 2079 37.0042 C
820 818 819 0 3 Holm, Mr. John Fredrik Alexander male 43.0 0 0 C 7075 6.45 S
821 819 820 0 3 Skoog, Master. Karl Thorsten male 10.0 3 2 347088 27.9 S
822 820 821 1 1 Hays, Mrs. Charles Melville (Clara Jennings Gregg) female 52.0 1 1 12749 93.5 B69 S
823 821 822 1 3 Lulic, Mr. Nikola male 27.0 0 0 315098 8.6625 S
824 822 823 0 1 Reuchlin, Jonkheer. John George male 38.0 0 0 19972 0.0 S
825 823 824 1 3 Moor, Mrs. (Beila) female 27.0 0 1 392096 12.475 E121 S
826 824 825 0 3 Panula, Master. Urho Abraham male 2.0 4 1 3101295 39.6875 S
827 825 826 0 3 Flynn, Mr. John male 0 0 368323 6.95 Q
828 826 827 0 3 Lam, Mr. Len male 0 0 1601 56.4958 S
829 827 828 1 2 Mallet, Master. Andre male 1.0 0 2 S.C./PARIS 2079 37.0042 C
830 828 829 1 3 McCormack, Mr. Thomas Joseph male 0 0 367228 7.75 Q
831 829 830 1 1 Stone, Mrs. George Nelson (Martha Evelyn) female 62.0 0 0 113572 80.0 B28
832 830 831 1 3 Yasbeck, Mrs. Antoni (Selini Alexander) female 15.0 1 0 2659 14.4542 C
833 831 832 1 2 Richards, Master. George Sibley male 0.83 1 1 29106 18.75 S
834 832 833 0 3 Saad, Mr. Amin male 0 0 2671 7.2292 C
835 833 834 0 3 Augustsson, Mr. Albert male 23.0 0 0 347468 7.8542 S
836 834 835 0 3 Allum, Mr. Owen George male 18.0 0 0 2223 8.3 S
837 835 836 1 1 Compton, Miss. Sara Rebecca female 39.0 1 1 PC 17756 83.1583 E49 C
838 836 837 0 3 Pasic, Mr. Jakob male 21.0 0 0 315097 8.6625 S
839 837 838 0 3 Sirota, Mr. Maurice male 0 0 392092 8.05 S
840 838 839 1 3 Chip, Mr. Chang male 32.0 0 0 1601 56.4958 S
841 839 840 1 1 Marechal, Mr. Pierre male 0 0 11774 29.7 C47 C
842 840 841 0 3 Alhomaki, Mr. Ilmari Rudolf male 20.0 0 0 SOTON/O2 3101287 7.925 S
843 841 842 0 2 Mudd, Mr. Thomas Charles male 16.0 0 0 S.O./P.P. 3 10.5 S
844 842 843 1 1 Serepeca, Miss. Augusta female 30.0 0 0 113798 31.0 C
845 843 844 0 3 Lemberopolous, Mr. Peter L male 34.5 0 0 2683 6.4375 C
846 844 845 0 3 Culumovic, Mr. Jeso male 17.0 0 0 315090 8.6625 S
847 845 846 0 3 Abbing, Mr. Anthony male 42.0 0 0 C.A. 5547 7.55 S
848 846 847 0 3 Sage, Mr. Douglas Bullen male 8 2 CA. 2343 69.55 S
849 847 848 0 3 Markoff, Mr. Marin male 35.0 0 0 349213 7.8958 C
850 848 849 0 2 Harper, Rev. John male 28.0 0 1 248727 33.0 S
851 849 850 1 1 Goldenberg, Mrs. Samuel L (Edwiga Grabowska) female 1 0 17453 89.1042 C92 C
852 850 851 0 3 Andersson, Master. Sigvard Harald Elias male 4.0 4 2 347082 31.275 S
853 851 852 0 3 Svensson, Mr. Johan male 74.0 0 0 347060 7.775 S
854 852 853 0 3 Boulos, Miss. Nourelain female 9.0 1 1 2678 15.2458 C
855 853 854 1 1 Lines, Miss. Mary Conover female 16.0 0 1 PC 17592 39.4 D28 S
856 854 855 0 2 Carter, Mrs. Ernest Courtenay (Lilian Hughes) female 44.0 1 0 244252 26.0 S
857 855 856 1 3 Aks, Mrs. Sam (Leah Rosen) female 18.0 0 1 392091 9.35 S
858 856 857 1 1 Wick, Mrs. George Dennick (Mary Hitchcock) female 45.0 1 1 36928 164.8667 S
859 857 858 1 1 Daly, Mr. Peter Denis male 51.0 0 0 113055 26.55 E17 S
860 858 859 1 3 Baclini, Mrs. Solomon (Latifa Qurban) female 24.0 0 3 2666 19.2583 C
861 859 860 0 3 Razi, Mr. Raihed male 0 0 2629 7.2292 C
862 860 861 0 3 Hansen, Mr. Claus Peter male 41.0 2 0 350026 14.1083 S
863 861 862 0 2 Giles, Mr. Frederick Edward male 21.0 1 0 28134 11.5 S
864 862 863 1 1 Swift, Mrs. Frederick Joel (Margaret Welles Barron) female 48.0 0 0 17466 25.9292 D17 S
865 863 864 0 3 Sage, Miss. Dorothy Edith "Dolly" female 8 2 CA. 2343 69.55 S
866 864 865 0 2 Gill, Mr. John William male 24.0 0 0 233866 13.0 S
867 865 866 1 2 Bystrom, Mrs. (Karolina) female 42.0 0 0 236852 13.0 S
868 866 867 1 2 Duran y More, Miss. Asuncion female 27.0 1 0 SC/PARIS 2149 13.8583 C
869 867 868 0 1 Roebling, Mr. Washington Augustus II male 31.0 0 0 PC 17590 50.4958 A24 S
870 868 869 0 3 van Melkebeke, Mr. Philemon male 0 0 345777 9.5 S
871 869 870 1 3 Johnson, Master. Harold Theodor male 4.0 1 1 347742 11.1333 S
872 870 871 0 3 Balkic, Mr. Cerin male 26.0 0 0 349248 7.8958 S
873 871 872 1 1 Beckwith, Mrs. Richard Leonard (Sallie Monypeny) female 47.0 1 1 11751 52.5542 D35 S
874 872 873 0 1 Carlsson, Mr. Frans Olof male 33.0 0 0 695 5.0 B51 B53 B55 S
875 873 874 0 3 Vander Cruyssen, Mr. Victor male 47.0 0 0 345765 9.0 S
876 874 875 1 2 Abelson, Mrs. Samuel (Hannah Wizosky) female 28.0 1 0 P/PP 3381 24.0 C
877 875 876 1 3 Najib, Miss. Adele Kiamie "Jane" female 15.0 0 0 2667 7.225 C
878 876 877 0 3 Gustafsson, Mr. Alfred Ossian male 20.0 0 0 7534 9.8458 S
879 877 878 0 3 Petroff, Mr. Nedelio male 19.0 0 0 349212 7.8958 S
880 878 879 0 3 Laleff, Mr. Kristo male 0 0 349217 7.8958 S
881 879 880 1 1 Potter, Mrs. Thomas Jr (Lily Alexenia Wilson) female 56.0 0 1 11767 83.1583 C50 C
882 880 881 1 2 Shelley, Mrs. William (Imanita Parrish Hall) female 25.0 0 1 230433 26.0 S
883 881 882 0 3 Markun, Mr. Johann male 33.0 0 0 349257 7.8958 S
884 882 883 0 3 Dahlberg, Miss. Gerda Ulrika female 22.0 0 0 7552 10.5167 S
885 883 884 0 2 Banfield, Mr. Frederick James male 28.0 0 0 C.A./SOTON 34068 10.5 S
886 884 885 0 3 Sutehall, Mr. Henry Jr male 25.0 0 0 SOTON/OQ 392076 7.05 S
887 885 886 0 3 Rice, Mrs. William (Margaret Norton) female 39.0 0 5 382652 29.125 Q
888 886 887 0 2 Montvila, Rev. Juozas male 27.0 0 0 211536 13.0 S
889 887 888 1 1 Graham, Miss. Margaret Edith female 19.0 0 0 112053 30.0 B42 S
890 888 889 0 3 Johnston, Miss. Catherine Helen "Carrie" female 1 2 W./C. 6607 23.45 S
891 889 890 1 1 Behr, Mr. Karl Howell male 26.0 0 0 111369 30.0 C148 C
892 890 891 0 3 Dooley, Mr. Patrick male 32.0 0 0 370376 7.75 Q

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env python
# coding: utf-8
# # Lab: Titanic Survival Exploration with Decision Trees
# ## Getting Started
# In this lab, you will see how decision trees work by implementing a decision tree in sklearn.
#
# We'll start by loading the dataset and displaying some of its rows.
# In[6]:
# Import libraries necessary for this project
import numpy as np
import pandas as pd
# from IPython.display import display # Allows the use of display() for DataFrames
# Pretty display for notebooks
# get_ipython().run_line_magic('matplotlib', 'inline')
# Set a random seed
import random
random.seed(42)
# Load the dataset
in_file = 'titanic_data.csv'
full_data = pd.read_csv(in_file)
# Print the first few entries of the RMS Titanic data
# display(full_data.head())
# Recall that these are the various features present for each passenger on the ship:
# - **Survived**: Outcome of survival (0 = No; 1 = Yes)
# - **Pclass**: Socio-economic class (1 = Upper class; 2 = Middle class; 3 = Lower class)
# - **Name**: Name of passenger
# - **Sex**: Sex of the passenger
# - **Age**: Age of the passenger (Some entries contain `NaN`)
# - **SibSp**: Number of siblings and spouses of the passenger aboard
# - **Parch**: Number of parents and children of the passenger
# - **Ticket**: Ticket number of the passenger
# - **Fare**: Fare paid by the passenger
# - **Cabin** Cabin number of the passenger (Some entries contain `NaN`)
# - **Embarked**: Port of embarkation of the passenger (C = Cherbourg; Q = Queenstown; S = Southampton)
#
# Since we're interested in the outcome of survival for each passenger or crew member, we can remove the **Survived** feature from this dataset and store it as its own separate variable `outcomes`. We will use these outcomes as our prediction targets.
# Run the code cell below to remove **Survived** as a feature of the dataset and store it in `outcomes`.
# In[7]:
# Store the 'Survived' feature in a new variable and remove it from the dataset
outcomes = full_data['Survived']
features_raw = full_data.drop('Survived', axis = 1)
# Show the new dataset with 'Survived' removed
# display(features_raw.head())
# The very same sample of the RMS Titanic data now shows the **Survived** feature removed from the DataFrame. Note that `data` (the passenger data) and `outcomes` (the outcomes of survival) are now *paired*. That means for any passenger `data.loc[i]`, they have the survival outcome `outcomes[i]`.
#
# ## Preprocessing the data
#
# Now, let's do some data preprocessing. First, we'll remove the names of the passengers, and then one-hot encode the features.
#
# **Question:** Why would it be a terrible idea to one-hot encode the data without removing the names?
# (Andw
# In[8]:
# Removing the names
features_no_names = features_raw.drop(['Name'], axis=1)
# One-hot encoding
features = pd.get_dummies(features_no_names)
# And now we'll fill in any blanks with zeroes.
# In[9]:
features = features.fillna(0.0)
# display(features.head())
# ## (TODO) Training the model
#
# Now we're ready to train a model in sklearn. First, let's split the data into training and testing sets. Then we'll train the model on the training set.
# In[15]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(features, outcomes, test_size=0.2, random_state=42)
# In[17]:
# Import the classifier from sklearn
from sklearn.tree import DecisionTreeClassifier
# TODO: Define the classifier, and fit it to the data
model = DecisionTreeClassifier()
model.fit(X_train, y_train)
# ## Testing the model
# Now, let's see how our model does, let's calculate the accuracy over both the training and the testing set.
# In[18]:
# Making predictions
y_train_pred = model.predict(X_train)
y_test_pred = model.predict(X_test)
# Calculate the accuracy
from sklearn.metrics import accuracy_score
train_accuracy = accuracy_score(y_train, y_train_pred)
test_accuracy = accuracy_score(y_test, y_test_pred)
print('The training accuracy is', train_accuracy)
print('The test accuracy is', test_accuracy)
# # Exercise: Improving the model
#
# Ok, high training accuracy and a lower testing accuracy. We may be overfitting a bit.
#
# So now it's your turn to shine! Train a new model, and try to specify some parameters in order to improve the testing accuracy, such as:
# - `max_depth`
# - `min_samples_leaf`
# - `min_samples_split`
#
# You can use your intuition, trial and error, or even better, feel free to use Grid Search!
#
# **Challenge:** Try to get to 85% accuracy on the testing set. If you'd like a hint, take a look at the solutions notebook next.
# In[23]:
# TODO: Train the model
new_model = DecisionTreeClassifier(max_depth=6, min_samples_leaf=6, min_samples_split=10)
new_model.fit(X_train, y_train)
# TODO: Make predictions
new_y_train_pred = new_model.predict(X_train)
new_y_test_pred = new_model.predict(X_test)
# TODO: Calculate the accuracy
new_train_accuracy = accuracy_score(y_train, new_y_train_pred)
new_test_accuracy = accuracy_score(y_test, new_y_test_pred)
print(f'The training accuracy on the new model is {new_train_accuracy:.4f}')
print(f'The test accuracy on the new model is {new_test_accuracy:.4f}')

View File

@@ -0,0 +1,243 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Lab: Titanic Survival Exploration with Decision Trees"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Getting Started\n",
"In this lab, you will see how decision trees work by implementing a decision tree in sklearn.\n",
"\n",
"We'll start by loading the dataset and displaying some of its rows."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Import libraries necessary for this project\n",
"import numpy as np\n",
"import pandas as pd\n",
"from IPython.display import display # Allows the use of display() for DataFrames\n",
"\n",
"# Pretty display for notebooks\n",
"%matplotlib inline\n",
"\n",
"# Set a random seed\n",
"import random\n",
"random.seed(42)\n",
"\n",
"# Load the dataset\n",
"in_file = 'titanic_data.csv'\n",
"full_data = pd.read_csv(in_file)\n",
"\n",
"# Print the first few entries of the RMS Titanic data\n",
"display(full_data.head())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Recall that these are the various features present for each passenger on the ship:\n",
"- **Survived**: Outcome of survival (0 = No; 1 = Yes)\n",
"- **Pclass**: Socio-economic class (1 = Upper class; 2 = Middle class; 3 = Lower class)\n",
"- **Name**: Name of passenger\n",
"- **Sex**: Sex of the passenger\n",
"- **Age**: Age of the passenger (Some entries contain `NaN`)\n",
"- **SibSp**: Number of siblings and spouses of the passenger aboard\n",
"- **Parch**: Number of parents and children of the passenger aboard\n",
"- **Ticket**: Ticket number of the passenger\n",
"- **Fare**: Fare paid by the passenger\n",
"- **Cabin** Cabin number of the passenger (Some entries contain `NaN`)\n",
"- **Embarked**: Port of embarkation of the passenger (C = Cherbourg; Q = Queenstown; S = Southampton)\n",
"\n",
"Since we're interested in the outcome of survival for each passenger or crew member, we can remove the **Survived** feature from this dataset and store it as its own separate variable `outcomes`. We will use these outcomes as our prediction targets. \n",
"Run the code cell below to remove **Survived** as a feature of the dataset and store it in `outcomes`."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Store the 'Survived' feature in a new variable and remove it from the dataset\n",
"outcomes = full_data['Survived']\n",
"features_raw = full_data.drop('Survived', axis = 1)\n",
"\n",
"# Show the new dataset with 'Survived' removed\n",
"display(features_raw.head())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The very same sample of the RMS Titanic data now shows the **Survived** feature removed from the DataFrame. Note that `data` (the passenger data) and `outcomes` (the outcomes of survival) are now *paired*. That means for any passenger `data.loc[i]`, they have the survival outcome `outcomes[i]`.\n",
"\n",
"## Preprocessing the data\n",
"\n",
"Now, let's do some data preprocessing. First, we'll remove the names of the passengers, and then one-hot encode the features.\n",
"\n",
"**Question:** Why would it be a terrible idea to one-hot encode the data without removing the names?\n",
"(Andw"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Removing the names\n",
"features_no_names = features_raw.drop(['Name'], axis=1)\n",
"\n",
"# One-hot encoding\n",
"features = pd.get_dummies(features_no_names)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"And now we'll fill in any blanks with zeroes."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"features = features.fillna(0.0)\n",
"display(features.head())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## (TODO) Training the model\n",
"\n",
"Now we're ready to train a model in sklearn. First, let's split the data into training and testing sets. Then we'll train the model on the training set."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from sklearn.model_selection import train_test_split\n",
"X_train, X_test, y_train, y_test = train_test_split(features, outcomes, test_size=0.2, random_state=42)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Import the classifier from sklearn\n",
"from sklearn.tree import DecisionTreeClassifier\n",
"\n",
"# TODO: Define the classifier, and fit it to the data\n",
"model = None"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Testing the model\n",
"Now, let's see how our model does, let's calculate the accuracy over both the training and the testing set."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Making predictions\n",
"y_train_pred = model.predict(X_train)\n",
"y_test_pred = model.predict(X_test)\n",
"\n",
"# Calculate the accuracy\n",
"from sklearn.metrics import accuracy_score\n",
"train_accuracy = accuracy_score(y_train, y_train_pred)\n",
"test_accuracy = accuracy_score(y_test, y_test_pred)\n",
"print('The training accuracy is', train_accuracy)\n",
"print('The test accuracy is', test_accuracy)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Exercise: Improving the model\n",
"\n",
"Ok, high training accuracy and a lower testing accuracy. We may be overfitting a bit.\n",
"\n",
"So now it's your turn to shine! Train a new model, and try to specify some parameters in order to improve the testing accuracy, such as:\n",
"- `max_depth`\n",
"- `min_samples_leaf`\n",
"- `min_samples_split`\n",
"\n",
"You can use your intuition, trial and error, or even better, feel free to use Grid Search!\n",
"\n",
"**Challenge:** Try to get to 85% accuracy on the testing set. If you'd like a hint, take a look at the solutions notebook next."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# TODO: Train the model\n",
"\n",
"# TODO: Make predictions\n",
"\n",
"# TODO: Calculate the accuracy"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.3"
}
},
"nbformat": 4,
"nbformat_minor": 1
}

View File

@@ -0,0 +1,764 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Lab: Titanic Survival Exploration with Decision Trees"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Getting Started\n",
"In this lab, you will see how decision trees work by implementing a decision tree in sklearn.\n",
"\n",
"We'll start by loading the dataset and displaying some of its rows."
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>PassengerId</th>\n",
" <th>Survived</th>\n",
" <th>Pclass</th>\n",
" <th>Name</th>\n",
" <th>Sex</th>\n",
" <th>Age</th>\n",
" <th>SibSp</th>\n",
" <th>Parch</th>\n",
" <th>Ticket</th>\n",
" <th>Fare</th>\n",
" <th>Cabin</th>\n",
" <th>Embarked</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>3</td>\n",
" <td>Braund, Mr. Owen Harris</td>\n",
" <td>male</td>\n",
" <td>22.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>A/5 21171</td>\n",
" <td>7.2500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>Cumings, Mrs. John Bradley (Florence Briggs Th...</td>\n",
" <td>female</td>\n",
" <td>38.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>PC 17599</td>\n",
" <td>71.2833</td>\n",
" <td>C85</td>\n",
" <td>C</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>3</td>\n",
" <td>1</td>\n",
" <td>3</td>\n",
" <td>Heikkinen, Miss. Laina</td>\n",
" <td>female</td>\n",
" <td>26.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>STON/O2. 3101282</td>\n",
" <td>7.9250</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>4</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>Futrelle, Mrs. Jacques Heath (Lily May Peel)</td>\n",
" <td>female</td>\n",
" <td>35.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>113803</td>\n",
" <td>53.1000</td>\n",
" <td>C123</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>5</td>\n",
" <td>0</td>\n",
" <td>3</td>\n",
" <td>Allen, Mr. William Henry</td>\n",
" <td>male</td>\n",
" <td>35.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>373450</td>\n",
" <td>8.0500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" PassengerId Survived Pclass \\\n",
"0 1 0 3 \n",
"1 2 1 1 \n",
"2 3 1 3 \n",
"3 4 1 1 \n",
"4 5 0 3 \n",
"\n",
" Name Sex Age SibSp \\\n",
"0 Braund, Mr. Owen Harris male 22.0 1 \n",
"1 Cumings, Mrs. John Bradley (Florence Briggs Th... female 38.0 1 \n",
"2 Heikkinen, Miss. Laina female 26.0 0 \n",
"3 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 35.0 1 \n",
"4 Allen, Mr. William Henry male 35.0 0 \n",
"\n",
" Parch Ticket Fare Cabin Embarked \n",
"0 0 A/5 21171 7.2500 NaN S \n",
"1 0 PC 17599 71.2833 C85 C \n",
"2 0 STON/O2. 3101282 7.9250 NaN S \n",
"3 0 113803 53.1000 C123 S \n",
"4 0 373450 8.0500 NaN S "
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Import libraries necessary for this project\n",
"import numpy as np\n",
"import pandas as pd\n",
"from IPython.display import display # Allows the use of display() for DataFrames\n",
"\n",
"# Pretty display for notebooks\n",
"%matplotlib inline\n",
"\n",
"# Set a random seed\n",
"import random\n",
"random.seed(42)\n",
"\n",
"# Load the dataset\n",
"in_file = 'titanic_data.csv'\n",
"full_data = pd.read_csv(in_file)\n",
"\n",
"# Print the first few entries of the RMS Titanic data\n",
"display(full_data.head())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Recall that these are the various features present for each passenger on the ship:\n",
"- **Survived**: Outcome of survival (0 = No; 1 = Yes)\n",
"- **Pclass**: Socio-economic class (1 = Upper class; 2 = Middle class; 3 = Lower class)\n",
"- **Name**: Name of passenger\n",
"- **Sex**: Sex of the passenger\n",
"- **Age**: Age of the passenger (Some entries contain `NaN`)\n",
"- **SibSp**: Number of siblings and spouses of the passenger aboard\n",
"- **Parch**: Number of parents and children of the passenger \n",
"- **Ticket**: Ticket number of the passenger\n",
"- **Fare**: Fare paid by the passenger\n",
"- **Cabin** Cabin number of the passenger (Some entries contain `NaN`)\n",
"- **Embarked**: Port of embarkation of the passenger (C = Cherbourg; Q = Queenstown; S = Southampton)\n",
"\n",
"Since we're interested in the outcome of survival for each passenger or crew member, we can remove the **Survived** feature from this dataset and store it as its own separate variable `outcomes`. We will use these outcomes as our prediction targets. \n",
"Run the code cell below to remove **Survived** as a feature of the dataset and store it in `outcomes`."
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>PassengerId</th>\n",
" <th>Pclass</th>\n",
" <th>Name</th>\n",
" <th>Sex</th>\n",
" <th>Age</th>\n",
" <th>SibSp</th>\n",
" <th>Parch</th>\n",
" <th>Ticket</th>\n",
" <th>Fare</th>\n",
" <th>Cabin</th>\n",
" <th>Embarked</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1</td>\n",
" <td>3</td>\n",
" <td>Braund, Mr. Owen Harris</td>\n",
" <td>male</td>\n",
" <td>22.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>A/5 21171</td>\n",
" <td>7.2500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>1</td>\n",
" <td>Cumings, Mrs. John Bradley (Florence Briggs Th...</td>\n",
" <td>female</td>\n",
" <td>38.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>PC 17599</td>\n",
" <td>71.2833</td>\n",
" <td>C85</td>\n",
" <td>C</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>Heikkinen, Miss. Laina</td>\n",
" <td>female</td>\n",
" <td>26.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>STON/O2. 3101282</td>\n",
" <td>7.9250</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>4</td>\n",
" <td>1</td>\n",
" <td>Futrelle, Mrs. Jacques Heath (Lily May Peel)</td>\n",
" <td>female</td>\n",
" <td>35.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>113803</td>\n",
" <td>53.1000</td>\n",
" <td>C123</td>\n",
" <td>S</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>5</td>\n",
" <td>3</td>\n",
" <td>Allen, Mr. William Henry</td>\n",
" <td>male</td>\n",
" <td>35.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>373450</td>\n",
" <td>8.0500</td>\n",
" <td>NaN</td>\n",
" <td>S</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" PassengerId Pclass Name \\\n",
"0 1 3 Braund, Mr. Owen Harris \n",
"1 2 1 Cumings, Mrs. John Bradley (Florence Briggs Th... \n",
"2 3 3 Heikkinen, Miss. Laina \n",
"3 4 1 Futrelle, Mrs. Jacques Heath (Lily May Peel) \n",
"4 5 3 Allen, Mr. William Henry \n",
"\n",
" Sex Age SibSp Parch Ticket Fare Cabin Embarked \n",
"0 male 22.0 1 0 A/5 21171 7.2500 NaN S \n",
"1 female 38.0 1 0 PC 17599 71.2833 C85 C \n",
"2 female 26.0 0 0 STON/O2. 3101282 7.9250 NaN S \n",
"3 female 35.0 1 0 113803 53.1000 C123 S \n",
"4 male 35.0 0 0 373450 8.0500 NaN S "
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Store the 'Survived' feature in a new variable and remove it from the dataset\n",
"outcomes = full_data['Survived']\n",
"features_raw = full_data.drop('Survived', axis = 1)\n",
"\n",
"# Show the new dataset with 'Survived' removed\n",
"display(features_raw.head())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The very same sample of the RMS Titanic data now shows the **Survived** feature removed from the DataFrame. Note that `data` (the passenger data) and `outcomes` (the outcomes of survival) are now *paired*. That means for any passenger `data.loc[i]`, they have the survival outcome `outcomes[i]`.\n",
"\n",
"## Preprocessing the data\n",
"\n",
"Now, let's do some data preprocessing. First, we'll remove the names of the passengers, and then one-hot encode the features.\n",
"\n",
"**Question:** Why would it be a terrible idea to one-hot encode the data without removing the names?\n",
"(Andw"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"# Removing the names\n",
"features_no_names = features_raw.drop(['Name'], axis=1)\n",
"\n",
"# One-hot encoding\n",
"features = pd.get_dummies(features_no_names)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"And now we'll fill in any blanks with zeroes."
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>PassengerId</th>\n",
" <th>Pclass</th>\n",
" <th>Age</th>\n",
" <th>SibSp</th>\n",
" <th>Parch</th>\n",
" <th>Fare</th>\n",
" <th>Sex_female</th>\n",
" <th>Sex_male</th>\n",
" <th>Ticket_110152</th>\n",
" <th>Ticket_110413</th>\n",
" <th>...</th>\n",
" <th>Cabin_F G73</th>\n",
" <th>Cabin_F2</th>\n",
" <th>Cabin_F33</th>\n",
" <th>Cabin_F38</th>\n",
" <th>Cabin_F4</th>\n",
" <th>Cabin_G6</th>\n",
" <th>Cabin_T</th>\n",
" <th>Embarked_C</th>\n",
" <th>Embarked_Q</th>\n",
" <th>Embarked_S</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1</td>\n",
" <td>3</td>\n",
" <td>22.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>7.2500</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>1</td>\n",
" <td>38.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>71.2833</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>26.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>7.9250</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>4</td>\n",
" <td>1</td>\n",
" <td>35.0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>53.1000</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>5</td>\n",
" <td>3</td>\n",
" <td>35.0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>8.0500</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>...</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>0</td>\n",
" <td>1</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>5 rows × 839 columns</p>\n",
"</div>"
],
"text/plain": [
" PassengerId Pclass Age SibSp Parch Fare Sex_female Sex_male \\\n",
"0 1 3 22.0 1 0 7.2500 0 1 \n",
"1 2 1 38.0 1 0 71.2833 1 0 \n",
"2 3 3 26.0 0 0 7.9250 1 0 \n",
"3 4 1 35.0 1 0 53.1000 1 0 \n",
"4 5 3 35.0 0 0 8.0500 0 1 \n",
"\n",
" Ticket_110152 Ticket_110413 ... Cabin_F G73 Cabin_F2 Cabin_F33 \\\n",
"0 0 0 ... 0 0 0 \n",
"1 0 0 ... 0 0 0 \n",
"2 0 0 ... 0 0 0 \n",
"3 0 0 ... 0 0 0 \n",
"4 0 0 ... 0 0 0 \n",
"\n",
" Cabin_F38 Cabin_F4 Cabin_G6 Cabin_T Embarked_C Embarked_Q Embarked_S \n",
"0 0 0 0 0 0 0 1 \n",
"1 0 0 0 0 1 0 0 \n",
"2 0 0 0 0 0 0 1 \n",
"3 0 0 0 0 0 0 1 \n",
"4 0 0 0 0 0 0 1 \n",
"\n",
"[5 rows x 839 columns]"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"features = features.fillna(0.0)\n",
"display(features.head())"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## (TODO) Training the model\n",
"\n",
"Now we're ready to train a model in sklearn. First, let's split the data into training and testing sets. Then we'll train the model on the training set."
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"from sklearn.model_selection import train_test_split\n",
"X_train, X_test, y_train, y_test = train_test_split(features, outcomes, test_size=0.2, random_state=42)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n",
" max_features=None, max_leaf_nodes=None,\n",
" min_impurity_decrease=0.0, min_impurity_split=None,\n",
" min_samples_leaf=1, min_samples_split=2,\n",
" min_weight_fraction_leaf=0.0, presort=False, random_state=None,\n",
" splitter='best')"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Import the classifier from sklearn\n",
"from sklearn.tree import DecisionTreeClassifier\n",
"\n",
"# TODO: Define the classifier, and fit it to the data\n",
"model = DecisionTreeClassifier()\n",
"model.fit(X_train, y_train)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Testing the model\n",
"Now, let's see how our model does, let's calculate the accuracy over both the training and the testing set."
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The training accuracy is 1.0\n",
"The test accuracy is 0.815642458101\n"
]
}
],
"source": [
"# Making predictions\n",
"y_train_pred = model.predict(X_train)\n",
"y_test_pred = model.predict(X_test)\n",
"\n",
"# Calculate the accuracy\n",
"from sklearn.metrics import accuracy_score\n",
"train_accuracy = accuracy_score(y_train, y_train_pred)\n",
"test_accuracy = accuracy_score(y_test, y_test_pred)\n",
"print('The training accuracy is', train_accuracy)\n",
"print('The test accuracy is', test_accuracy)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Exercise: Improving the model\n",
"\n",
"Ok, high training accuracy and a lower testing accuracy. We may be overfitting a bit.\n",
"\n",
"So now it's your turn to shine! Train a new model, and try to specify some parameters in order to improve the testing accuracy, such as:\n",
"- `max_depth`\n",
"- `min_samples_leaf`\n",
"- `min_samples_split`\n",
"\n",
"You can use your intuition, trial and error, or even better, feel free to use Grid Search!\n",
"\n",
"**Challenge:** Try to get to 85% accuracy on the testing set. If you'd like a hint, take a look at the solutions notebook next."
]
},
{
"cell_type": "code",
"execution_count": 23,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"The training accuracy on the new model is 0.8820\n",
"The test accuracy on the new model is 0.8603\n"
]
}
],
"source": [
"# TODO: Train the model\n",
"new_model = DecisionTreeClassifier(max_depth=10, min_samples_leaf=6, min_samples_split=8)\n",
"new_model.fit(X_train, y_train)\n",
"\n",
"# TODO: Make predictions\n",
"new_y_train_pred = new_model.predict(X_train)\n",
"new_y_test_pred = new_model.predict(X_test)\n",
"\n",
"# TODO: Calculate the accuracy\n",
"new_train_accuracy = accuracy_score(y_train, new_y_train_pred)\n",
"new_test_accuracy = accuracy_score(y_test, new_y_test_pred)\n",
"\n",
"print(f'The training accuracy on the new model is {new_train_accuracy:.4f}')\n",
"print(f'The test accuracy on the new model is {new_test_accuracy:.4f}')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.3"
}
},
"nbformat": 4,
"nbformat_minor": 1
}

View File

@@ -0,0 +1,92 @@
import numpy as np
# Setting a random seed, feel free to change it and see different solutions.
np.random.seed(42)
# TODO: Fill in code in the function below to implement a gradient descent
# step for linear regression, following a squared error rule. See the docstring
# for parameters and returned variables.
def MSEStep(X, y, W, b, learn_rate=0.005):
"""
This function implements the gradient descent step for squared error as a
performance metric.
Parameters
X : array of predictor features
y : array of outcome values
W : predictor feature coefficients
b : regression function intercept
learn_rate : learning rate
Returns
W_new : predictor feature coefficients following gradient descent step
b_new : intercept following gradient descent step
"""
# compute errors
y_pred = np.matmul(X, W) + b
error = y - y_pred
# compute steps
W_new = W + learn_rate * np.matmul(error, X)
b_new = b + learn_rate * error.sum()
return W_new, b_new
return W_new, b_new
# The parts of the script below will be run when you press the "Test Run"
# button. The gradient descent step will be performed multiple times on
# the provided dataset, and the returned list of regression coefficients
# will be plotted.
def miniBatchGD(X, y, batch_size=20, learn_rate=0.005, num_iter=25):
"""
This function performs mini-batch gradient descent on a given dataset.
Parameters
X : array of predictor features
y : array of outcome values
batch_size : how many data points will be sampled for each iteration
learn_rate : learning rate
num_iter : number of batches used
Returns
regression_coef : array of slopes and intercepts generated by gradient
descent procedure
"""
n_points = X.shape[0]
W = np.zeros(X.shape[1]) # coefficients
b = 0 # intercept
# run iterations
regression_coef = [np.hstack((W, b))]
for _ in range(num_iter):
batch = np.random.choice(range(n_points), batch_size)
X_batch = X[batch, :]
y_batch = y[batch]
W, b = MSEStep(X_batch, y_batch, W, b, learn_rate)
regression_coef.append(np.hstack((W, b)))
return regression_coef
if __name__ == "__main__":
# perform gradient descent
data = np.loadtxt('data.csv', delimiter=',')
X = data[:, :-1]
y = data[:, -1]
regression_coef = miniBatchGD(X, y)
# plot the results
import matplotlib.pyplot as plt
plt.figure()
X_min = X.min()
X_max = X.max()
counter = len(regression_coef)
for W, b in regression_coef:
counter -= 1
color = [1 - 0.92 ** counter for _ in range(3)]
plt.plot([X_min, X_max], [X_min * W + b, X_max * W + b], color=color)
plt.scatter(X, y, zorder=3)
plt.show()

View File

@@ -0,0 +1,100 @@
-0.72407,2.23863
-2.40724,-0.00156
2.64837,3.01665
0.36092,2.31019
0.67312,2.05950
-0.45460,1.24736
2.20168,2.82497
1.15605,2.21802
0.50694,1.43644
-0.85952,1.74980
-0.59970,1.63259
1.46804,2.43461
-1.05659,1.02226
1.29177,3.11769
-0.74565,0.81194
0.15033,2.81910
-1.49627,0.53105
-0.72071,1.64845
0.32924,1.91416
-0.28053,2.11376
-1.36115,1.70969
0.74678,2.92253
0.10621,3.29827
0.03256,1.58565
-0.98290,2.30455
-1.15661,1.79169
0.09024,1.54723
-1.03816,1.06893
-0.00604,1.78802
0.16278,1.84746
-0.69869,1.58732
1.03857,1.94799
-0.11783,3.09324
-0.95409,1.86155
-0.81839,1.88817
-1.28802,1.39474
0.62822,1.71526
-2.29674,1.75695
-0.85601,1.12981
-1.75223,1.67000
-1.19662,0.66711
0.97781,3.11987
-1.17110,0.56924
0.15835,2.28231
-0.58918,1.23798
-1.79678,1.35803
-0.95727,1.75579
0.64556,1.91470
0.24625,2.33029
0.45917,3.25263
1.21036,2.07602
-0.60116,1.54254
0.26851,2.79202
0.49594,1.96178
-2.67877,0.95898
0.49402,1.96690
1.18643,3.06144
-0.17741,1.85984
0.57938,1.82967
-2.14926,0.62285
2.27700,3.63838
-1.05695,1.11807
1.68288,2.91735
-1.53513,1.99668
0.00099,1.76149
0.45520,2.31938
-0.37855,0.90172
1.35638,3.49432
0.01763,1.87838
2.21725,2.61171
-0.44442,2.06623
0.89583,3.04041
1.30499,2.42824
0.10883,0.63190
1.79466,2.95265
-0.00733,1.87546
0.79862,3.44953
-0.12353,1.53740
-1.34999,1.59958
-0.67825,1.57832
-0.17901,1.73312
0.12577,2.00244
1.11943,2.08990
-3.02296,1.09255
0.64965,1.28183
1.05994,2.32358
0.53360,1.75136
-0.73591,1.43076
-0.09569,2.81376
1.04694,2.56597
0.46511,2.36401
-0.75463,2.30161
-0.94159,1.94500
-0.09314,1.87619
-0.98641,1.46602
-0.92159,1.21538
0.76953,2.39377
0.03283,1.55730
-1.07619,0.70874
0.20174,1.76894
1 -0.72407 2.23863
2 -2.40724 -0.00156
3 2.64837 3.01665
4 0.36092 2.31019
5 0.67312 2.05950
6 -0.45460 1.24736
7 2.20168 2.82497
8 1.15605 2.21802
9 0.50694 1.43644
10 -0.85952 1.74980
11 -0.59970 1.63259
12 1.46804 2.43461
13 -1.05659 1.02226
14 1.29177 3.11769
15 -0.74565 0.81194
16 0.15033 2.81910
17 -1.49627 0.53105
18 -0.72071 1.64845
19 0.32924 1.91416
20 -0.28053 2.11376
21 -1.36115 1.70969
22 0.74678 2.92253
23 0.10621 3.29827
24 0.03256 1.58565
25 -0.98290 2.30455
26 -1.15661 1.79169
27 0.09024 1.54723
28 -1.03816 1.06893
29 -0.00604 1.78802
30 0.16278 1.84746
31 -0.69869 1.58732
32 1.03857 1.94799
33 -0.11783 3.09324
34 -0.95409 1.86155
35 -0.81839 1.88817
36 -1.28802 1.39474
37 0.62822 1.71526
38 -2.29674 1.75695
39 -0.85601 1.12981
40 -1.75223 1.67000
41 -1.19662 0.66711
42 0.97781 3.11987
43 -1.17110 0.56924
44 0.15835 2.28231
45 -0.58918 1.23798
46 -1.79678 1.35803
47 -0.95727 1.75579
48 0.64556 1.91470
49 0.24625 2.33029
50 0.45917 3.25263
51 1.21036 2.07602
52 -0.60116 1.54254
53 0.26851 2.79202
54 0.49594 1.96178
55 -2.67877 0.95898
56 0.49402 1.96690
57 1.18643 3.06144
58 -0.17741 1.85984
59 0.57938 1.82967
60 -2.14926 0.62285
61 2.27700 3.63838
62 -1.05695 1.11807
63 1.68288 2.91735
64 -1.53513 1.99668
65 0.00099 1.76149
66 0.45520 2.31938
67 -0.37855 0.90172
68 1.35638 3.49432
69 0.01763 1.87838
70 2.21725 2.61171
71 -0.44442 2.06623
72 0.89583 3.04041
73 1.30499 2.42824
74 0.10883 0.63190
75 1.79466 2.95265
76 -0.00733 1.87546
77 0.79862 3.44953
78 -0.12353 1.53740
79 -1.34999 1.59958
80 -0.67825 1.57832
81 -0.17901 1.73312
82 0.12577 2.00244
83 1.11943 2.08990
84 -3.02296 1.09255
85 0.64965 1.28183
86 1.05994 2.32358
87 0.53360 1.75136
88 -0.73591 1.43076
89 -0.09569 2.81376
90 1.04694 2.56597
91 0.46511 2.36401
92 -0.75463 2.30161
93 -0.94159 1.94500
94 -0.09314 1.87619
95 -0.98641 1.46602
96 -0.92159 1.21538
97 0.76953 2.39377
98 0.03283 1.55730
99 -1.07619 0.70874
100 0.20174 1.76894

View File

@@ -0,0 +1,25 @@
def MSEStep(X, y, W, b, learn_rate = 0.001):
"""
This function implements the gradient descent step for squared error as a
performance metric.
Parameters
X : array of predictor features
y : array of outcome values
W : predictor feature coefficients
b : regression function intercept
learn_rate : learning rate
Returns
W_new : predictor feature coefficients following gradient descent step
b_new : intercept following gradient descent step
"""
# compute errors
y_pred = np.matmul(X, W) + b
error = y - y_pred
# compute steps
W_new = W + learn_rate * np.matmul(error, X)
b_new = b + learn_rate * error.sum()
return W_new, b_new

View File

@@ -0,0 +1,164 @@
Country,Life expectancy,BMI
Afghanistan,52.8,20.62058
Albania,76.8,26.44657
Algeria,75.5,24.59620
Andorra,84.6,27.63048
Angola,56.7,22.25083
Armenia,72.3,25.355420000000002
Australia,81.6,27.56373
Austria,80.4,26.467409999999997
Azerbaijan,69.2,25.65117
Bahamas,72.2,27.24594
Bangladesh,68.3,20.39742
Barbados,75.3,26.38439
Belarus,70.0,26.16443
Belgium,79.6,26.75915
Belize,70.7,27.02255
Benin,59.7,22.41835
Bhutan,70.7,22.82180
Bolivia,71.2,24.43335
Bosnia and Herzegovina,77.5,26.61163
Botswana,53.2,22.12984
Brazil,73.2,25.78623
Bulgaria,73.2,26.54286
Burkina Faso,58.0,21.27157
Burundi,59.1,21.50291
Cambodia,66.1,20.80496
Cameroon,56.6,23.68173
Canada,80.8,27.45210
Cape Verde,70.4,23.51522
Chad,54.3,21.48569
Chile,78.5,27.01542
China,73.4,22.92176
Colombia,76.2,24.94041
Comoros,67.1,22.06131
"Congo, Dem. Rep.",57.5,19.86692
"Congo, Rep.",58.8,21.87134
Costa Rica,79.8,26.47897
Cote d'Ivoire,55.4,22.56469
Croatia,76.2,26.59629
Cuba,77.6,25.06867
Cyprus,80.0,27.41899
Denmark,78.9,26.13287
Djibouti,61.8,23.38403
Ecuador,74.7,25.58841
Egypt,70.2,26.73243
El Salvador,73.7,26.36751
Eritrea,60.1,20.88509
Estonia,74.2,26.26446
Ethiopia,60.0,20.24700
Fiji,64.9,26.53078
Finland,79.6,26.73339
France,81.1,25.85329
French Polynesia,75.11,30.86752
Gabon,61.7,24.07620
Gambia,65.7,21.65029
Georgia,71.8,25.54942
Germany,80.0,27.16509
Ghana,62.0,22.84247
Greece,80.2,26.33786
Greenland,70.3,26.01359
Grenada,70.8,25.17988
Guatemala,71.2,25.29947
Guinea,57.1,22.52449
Guinea-Bissau,53.6,21.64338
Guyana,65.0,23.68465
Haiti,61.0,23.66302
Honduras,71.8,25.10872
Hungary,73.9,27.11568
Iceland,82.4,27.20687
India,64.7,20.95956
Indonesia,69.4,21.85576
Iran,73.1,25.31003
Iraq,66.6,26.71017
Ireland,80.1,27.65325
Israel,80.6,27.13151
Jamaica,75.1,24.00421
Japan,82.5,23.50004
Jordan,76.9,27.47362
Kazakhstan,67.1,26.29078
Kenya,60.8,21.59258
Kuwait,77.3,29.17211
Latvia,72.4,26.45693
Lesotho,44.5,21.90157
Liberia,59.9,21.89537
Libya,75.6,26.54164
Lithuania,72.1,26.86102
Luxembourg,81.0,27.43404
"Macedonia, FYR",74.5,26.34473
Madagascar,62.2,21.40347
Malawi,52.4,22.03468
Malaysia,74.5,24.73069
Maldives,78.5,23.21991
Mali,58.5,21.78881
Malta,80.7,27.68361
Marshall Islands,65.3,29.37337
Mauritania,67.9,22.62295
Mauritius,72.9,25.15669
Mexico,75.4,27.42468
Moldova,70.4,24.23690
Mongolia,64.8,24.88385
Montenegro,76.0,26.55412
Morocco,73.3,25.63182
Mozambique,54.0,21.93536
Myanmar,59.4,21.44932
Namibia,59.1,22.65008
Nepal,68.4,20.76344
Netherlands,80.3,26.01541
Nicaragua,77.0,25.77291
Niger,58.0,21.21958
Nigeria,59.2,23.03322
Norway,80.8,26.93424
Oman,76.2,26.24109
Pakistan,64.1,22.29914
Panama,77.3,26.26959
Papua New Guinea,58.6,25.01506
Paraguay,74.0,25.54223
Peru,78.2,24.77041
Philippines,69.8,22.87263
Poland,75.4,26.67380
Portugal,79.4,26.68445
Qatar,77.9,28.13138
Romania,73.2,25.41069
Russia,67.9,26.01131
Rwanda,64.1,22.55453
Samoa,72.3,30.42475
Sao Tome and Principe,66.0,23.51233
Senegal,63.5,21.92743
Serbia,74.3,26.51495
Sierra Leone,53.6,22.53139
Singapore,80.6,23.83996
Slovak Republic,74.9,26.92717
Slovenia,78.7,27.43983
Somalia,52.6,21.96917
South Africa,53.4,26.85538
Spain,81.1,27.49975
Sri Lanka,74.0,21.96671
Sudan,65.5,22.40484
Suriname,70.2,25.49887
Swaziland,45.1,23.16969
Sweden,81.1,26.37629
Switzerland,82.0,26.20195
Syria,76.1,26.91969
Tajikistan,69.6,23.77966
Tanzania,60.4,22.47792
Thailand,73.9,23.00803
Timor-Leste,69.9,20.59082
Togo,57.5,21.87875
Tonga,70.3,30.99563
Trinidad and Tobago,71.7,26.39669
Tunisia,76.8,25.15699
Turkey,77.8,26.70371
Turkmenistan,67.2,25.24796
Uganda,56.0,22.35833
Ukraine,67.8,25.42379
United Arab Emirates,75.6,28.05359
United Kingdom,79.7,27.39249
United States,78.3,28.45698
Uruguay,76.0,26.39123
Uzbekistan,69.6,25.32054
Vanuatu,63.4,26.78926
West Bank and Gaza,74.1,26.57750
Vietnam,74.1,20.91630
Zambia,51.1,20.68321
Zimbabwe,47.3,22.02660
1 Country Life expectancy BMI
2 Afghanistan 52.8 20.62058
3 Albania 76.8 26.44657
4 Algeria 75.5 24.59620
5 Andorra 84.6 27.63048
6 Angola 56.7 22.25083
7 Armenia 72.3 25.355420000000002
8 Australia 81.6 27.56373
9 Austria 80.4 26.467409999999997
10 Azerbaijan 69.2 25.65117
11 Bahamas 72.2 27.24594
12 Bangladesh 68.3 20.39742
13 Barbados 75.3 26.38439
14 Belarus 70.0 26.16443
15 Belgium 79.6 26.75915
16 Belize 70.7 27.02255
17 Benin 59.7 22.41835
18 Bhutan 70.7 22.82180
19 Bolivia 71.2 24.43335
20 Bosnia and Herzegovina 77.5 26.61163
21 Botswana 53.2 22.12984
22 Brazil 73.2 25.78623
23 Bulgaria 73.2 26.54286
24 Burkina Faso 58.0 21.27157
25 Burundi 59.1 21.50291
26 Cambodia 66.1 20.80496
27 Cameroon 56.6 23.68173
28 Canada 80.8 27.45210
29 Cape Verde 70.4 23.51522
30 Chad 54.3 21.48569
31 Chile 78.5 27.01542
32 China 73.4 22.92176
33 Colombia 76.2 24.94041
34 Comoros 67.1 22.06131
35 Congo, Dem. Rep. 57.5 19.86692
36 Congo, Rep. 58.8 21.87134
37 Costa Rica 79.8 26.47897
38 Cote d'Ivoire 55.4 22.56469
39 Croatia 76.2 26.59629
40 Cuba 77.6 25.06867
41 Cyprus 80.0 27.41899
42 Denmark 78.9 26.13287
43 Djibouti 61.8 23.38403
44 Ecuador 74.7 25.58841
45 Egypt 70.2 26.73243
46 El Salvador 73.7 26.36751
47 Eritrea 60.1 20.88509
48 Estonia 74.2 26.26446
49 Ethiopia 60.0 20.24700
50 Fiji 64.9 26.53078
51 Finland 79.6 26.73339
52 France 81.1 25.85329
53 French Polynesia 75.11 30.86752
54 Gabon 61.7 24.07620
55 Gambia 65.7 21.65029
56 Georgia 71.8 25.54942
57 Germany 80.0 27.16509
58 Ghana 62.0 22.84247
59 Greece 80.2 26.33786
60 Greenland 70.3 26.01359
61 Grenada 70.8 25.17988
62 Guatemala 71.2 25.29947
63 Guinea 57.1 22.52449
64 Guinea-Bissau 53.6 21.64338
65 Guyana 65.0 23.68465
66 Haiti 61.0 23.66302
67 Honduras 71.8 25.10872
68 Hungary 73.9 27.11568
69 Iceland 82.4 27.20687
70 India 64.7 20.95956
71 Indonesia 69.4 21.85576
72 Iran 73.1 25.31003
73 Iraq 66.6 26.71017
74 Ireland 80.1 27.65325
75 Israel 80.6 27.13151
76 Jamaica 75.1 24.00421
77 Japan 82.5 23.50004
78 Jordan 76.9 27.47362
79 Kazakhstan 67.1 26.29078
80 Kenya 60.8 21.59258
81 Kuwait 77.3 29.17211
82 Latvia 72.4 26.45693
83 Lesotho 44.5 21.90157
84 Liberia 59.9 21.89537
85 Libya 75.6 26.54164
86 Lithuania 72.1 26.86102
87 Luxembourg 81.0 27.43404
88 Macedonia, FYR 74.5 26.34473
89 Madagascar 62.2 21.40347
90 Malawi 52.4 22.03468
91 Malaysia 74.5 24.73069
92 Maldives 78.5 23.21991
93 Mali 58.5 21.78881
94 Malta 80.7 27.68361
95 Marshall Islands 65.3 29.37337
96 Mauritania 67.9 22.62295
97 Mauritius 72.9 25.15669
98 Mexico 75.4 27.42468
99 Moldova 70.4 24.23690
100 Mongolia 64.8 24.88385
101 Montenegro 76.0 26.55412
102 Morocco 73.3 25.63182
103 Mozambique 54.0 21.93536
104 Myanmar 59.4 21.44932
105 Namibia 59.1 22.65008
106 Nepal 68.4 20.76344
107 Netherlands 80.3 26.01541
108 Nicaragua 77.0 25.77291
109 Niger 58.0 21.21958
110 Nigeria 59.2 23.03322
111 Norway 80.8 26.93424
112 Oman 76.2 26.24109
113 Pakistan 64.1 22.29914
114 Panama 77.3 26.26959
115 Papua New Guinea 58.6 25.01506
116 Paraguay 74.0 25.54223
117 Peru 78.2 24.77041
118 Philippines 69.8 22.87263
119 Poland 75.4 26.67380
120 Portugal 79.4 26.68445
121 Qatar 77.9 28.13138
122 Romania 73.2 25.41069
123 Russia 67.9 26.01131
124 Rwanda 64.1 22.55453
125 Samoa 72.3 30.42475
126 Sao Tome and Principe 66.0 23.51233
127 Senegal 63.5 21.92743
128 Serbia 74.3 26.51495
129 Sierra Leone 53.6 22.53139
130 Singapore 80.6 23.83996
131 Slovak Republic 74.9 26.92717
132 Slovenia 78.7 27.43983
133 Somalia 52.6 21.96917
134 South Africa 53.4 26.85538
135 Spain 81.1 27.49975
136 Sri Lanka 74.0 21.96671
137 Sudan 65.5 22.40484
138 Suriname 70.2 25.49887
139 Swaziland 45.1 23.16969
140 Sweden 81.1 26.37629
141 Switzerland 82.0 26.20195
142 Syria 76.1 26.91969
143 Tajikistan 69.6 23.77966
144 Tanzania 60.4 22.47792
145 Thailand 73.9 23.00803
146 Timor-Leste 69.9 20.59082
147 Togo 57.5 21.87875
148 Tonga 70.3 30.99563
149 Trinidad and Tobago 71.7 26.39669
150 Tunisia 76.8 25.15699
151 Turkey 77.8 26.70371
152 Turkmenistan 67.2 25.24796
153 Uganda 56.0 22.35833
154 Ukraine 67.8 25.42379
155 United Arab Emirates 75.6 28.05359
156 United Kingdom 79.7 27.39249
157 United States 78.3 28.45698
158 Uruguay 76.0 26.39123
159 Uzbekistan 69.6 25.32054
160 Vanuatu 63.4 26.78926
161 West Bank and Gaza 74.1 26.57750
162 Vietnam 74.1 20.91630
163 Zambia 51.1 20.68321
164 Zimbabwe 47.3 22.02660

View File

@@ -0,0 +1,19 @@
from sklearn.linear_model import LinearRegression
import pandas as pd
df = pd.read_csv('data.csv')
bmi_life_data = df
bmi_life_model = LinearRegression()
# print(bmi_life_data[['Life expectancy']], bmi_life_data[['BMI']])
bmi_life_model.fit(bmi_life_data[['BMI']],
bmi_life_data[['Life expectancy']])
laos_life_exp = bmi_life_model.predict([[21.07931]])
print(laos_life_exp)

View File

@@ -0,0 +1,18 @@
from sklearn.linear_model import LinearRegression
from sklearn.datasets import load_boston
boston_data = load_boston()
x = boston_data['data']
y = boston_data['target']
model = LinearRegression()
sample_house = [[2.29690000e-01, 0.00000000e+00, 1.05900000e+01,
0.00000000e+00, 4.89000000e-01, 6.32600000e+00, 5.25000000e+01,
4.35490000e+00, 4.00000000e+00, 2.77000000e+02, 1.86000000e+01,
3.94870000e+02, 1.09700000e+01]]
model.fit(x, y)
prediction = model.predict(sample_house)
print(prediction)

View File

@@ -0,0 +1,21 @@
Var_X,Var_Y
-0.33532,6.66854
0.02160,3.86398
-1.19438,5.16161
-0.65046,8.43823
-0.28001,5.57201
1.93258,-11.13270
1.22620,-5.31226
0.74727,-4.63725
3.32853,3.80650
2.87457,-6.06084
-1.48662,7.22328
0.37629,2.38887
1.43918,-7.13415
0.24183,2.00412
-2.79140,4.29794
1.08176,-5.86553
2.81555,-5.20711
0.54924,-3.52863
2.36449,-10.16202
-1.01925,5.31123
1 Var_X Var_Y
2 -0.33532 6.66854
3 0.02160 3.86398
4 -1.19438 5.16161
5 -0.65046 8.43823
6 -0.28001 5.57201
7 1.93258 -11.13270
8 1.22620 -5.31226
9 0.74727 -4.63725
10 3.32853 3.80650
11 2.87457 -6.06084
12 -1.48662 7.22328
13 0.37629 2.38887
14 1.43918 -7.13415
15 0.24183 2.00412
16 -2.79140 4.29794
17 1.08176 -5.86553
18 2.81555 -5.20711
19 0.54924 -3.52863
20 2.36449 -10.16202
21 -1.01925 5.31123

View File

@@ -0,0 +1,23 @@
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
sns.set()
df = pd.read_csv('data.csv')
# print(df)
X = df[['Var_X']]
y = df[['Var_Y']]
poly_feat = PolynomialFeatures(degree=2)
X_poly = poly_feat.fit_transform(X)
poly_model = LinearRegression(fit_intercept=False).fit(X_poly, y)
print(poly_model)
# sns.lineplot(x='Var_X', y='Var_Y', data=df)
# plt.show()

View File

@@ -0,0 +1,22 @@
# TODO: Add import statements
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
# Assign the data to predictor and outcome variables
# TODO: Load the data
train_data = pd.read_csv('data.csv')
X = train_data['Var_X'].values.reshape(-1, 1)
y = train_data['Var_Y'].values
# Create polynomial features
# TODO: Create a PolynomialFeatures object, then fit and transform the
# predictor feature
poly_feat = PolynomialFeatures(degree = 4)
X_poly = poly_feat.fit_transform(X)
# Make and fit the polynomial regression model
# TODO: Create a LinearRegression object and fit it to the polynomial predictor
# features
poly_model = LinearRegression(fit_intercept = False).fit(X_poly, y)

View File

@@ -0,0 +1,100 @@
1.25664,2.04978,-6.23640,4.71926,-4.26931,0.20590,12.31798
-3.89012,-0.37511,6.14979,4.94585,-3.57844,0.00640,23.67628
5.09784,0.98120,-0.29939,5.85805,0.28297,-0.20626,-1.53459
0.39034,-3.06861,-5.63488,6.43941,0.39256,-0.07084,-24.68670
5.84727,-0.15922,11.41246,7.52165,1.69886,0.29022,17.54122
-2.86202,-0.84337,-1.08165,0.67115,-2.48911,0.52328,9.39789
-7.09328,-0.07233,6.76632,13.06072,0.12876,-0.01048,11.73565
-7.17614,0.62875,-2.89924,-5.21458,-2.70344,-0.22035,4.42482
8.67430,2.09933,-11.23591,-5.99532,-2.79770,-0.08710,-5.94615
-6.03324,-4.16724,2.42063,-3.61827,1.96815,0.17723,-13.11848
8.67485,1.48271,-1.31205,-1.81154,2.67940,0.04803,-9.25647
4.36248,-2.69788,-4.60562,-0.12849,3.40617,-0.07841,-29.94048
9.97205,-0.61515,2.63039,2.81044,5.68249,-0.04495,-20.46775
-1.44556,0.18337,4.61021,-2.54824,0.86388,0.17696,7.12822
-3.90381,0.53243,2.83416,-5.42397,-0.06367,-0.22810,6.05628
-12.39824,-1.54269,-2.66748,10.82084,5.92054,0.13415,-32.91328
5.75911,-0.82222,10.24701,0.33635,0.26025,-0.02588,17.75036
-7.12657,3.28707,-0.22508,13.42902,2.16708,-0.09153,-2.80277
7.22736,1.27122,0.99188,-8.87118,-6.86533,0.09410,33.98791
-10.31393,2.23819,-7.87166,-3.44388,-1.43267,-0.07893,-3.18407
-8.25971,-0.15799,-1.81740,1.12972,4.24165,-0.01607,-20.57366
13.37454,-0.91051,4.61334,0.93989,4.81350,-0.07428,-12.66661
1.49973,-0.50929,-2.66670,-1.28560,-0.18299,-0.00552,-6.56370
-10.46766,0.73077,3.93791,-1.73489,-3.26768,0.02366,23.19621
-1.15898,3.14709,-4.73329,13.61355,-3.87487,-0.14112,13.89143
4.42275,-2.09867,3.06395,-0.45331,-2.07717,0.22815,10.29282
-3.34113,-0.31138,4.49844,-2.32619,-2.95757,-0.00793,21.21512
-1.85433,-1.32509,8.06274,12.75080,-0.89005,-0.04312,14.54248
0.85474,-0.50002,-3.52152,-4.30405,4.13943,-0.02834,-24.77918
0.33271,-5.28025,-4.95832,22.48546,4.95051,0.17153,-45.01710
-0.07308,0.51247,-1.38120,7.86552,3.31641,0.06808,-12.63583
2.99294,2.85192,5.51751,8.53749,4.30806,-0.17462,0.84415
1.41135,-1.01899,2.27500,5.27479,-4.90004,0.19508,23.54972
3.84816,-0.66249,-1.35364,16.51379,0.32115,0.41051,-2.28650
3.30223,0.23152,-2.16852,0.75257,-0.05749,-0.03427,-4.22022
-6.12524,-2.56204,0.79878,-3.36284,1.00396,0.06219,-9.10749
-7.47524,1.31401,-3.30847,4.83057,1.00104,-0.19851,-7.69059
5.84884,-0.53504,-0.19543,10.27451,6.98704,0.22706,-29.21246
6.44377,0.47687,-0.08731,22.88008,-2.86604,0.03142,10.90274
6.35366,-2.04444,1.98872,-1.45189,-1.24062,0.23626,4.62178
6.85563,-0.94543,5.16637,2.85611,4.64812,0.29535,-7.83647
1.61758,1.31067,-2.16795,8.07492,-0.17166,-0.10273,0.06922
3.80137,1.02276,-3.15429,6.09774,3.18885,-0.00163,-16.11486
-6.81855,-0.15776,-10.69117,8.07818,4.14656,0.10691,-38.47710
-6.43852,4.30120,2.63923,-1.98297,-0.89599,-0.08174,20.77790
-2.35292,1.26425,-6.80877,3.31220,-6.17515,-0.04764,14.92507
9.13580,-1.21425,1.17227,-6.33648,-0.85276,-0.13366,-0.17285
-3.02986,-0.48694,0.24329,-0.38830,-4.70410,-0.18065,15.95300
3.27244,2.22393,-1.96640,17.53694,1.62378,0.11539,-4.29743
-4.44346,-1.96429,0.22209,15.29785,-1.98503,0.40131,4.07647
-2.61294,-0.24905,-4.02974,-23.82024,-5.94171,-0.04932,16.50504
3.65962,1.69832,0.78025,9.88639,-1.61555,-0.18570,9.99506
2.22893,-4.62231,-3.33440,0.07179,0.21983,0.14348,-19.94698
-5.43092,1.39655,-2.79175,0.16622,-2.38112,-0.09009,6.49039
-5.88117,-3.04210,-0.87931,3.96197,-1.01125,0.08132,-6.01714
0.51401,-0.30742,6.01407,-6.85848,-3.61343,-0.15710,24.56965
4.45547,2.34283,0.98094,-4.66298,-3.79507,0.37084,27.19791
0.05320,0.27458,6.95838,7.50119,-5.50256,0.06913,36.21698
4.72057,0.17165,4.83822,-1.03917,4.11211,-0.14773,-6.32623
-11.60674,-1.15594,-10.23150,0.49843,0.32477,-0.14543,-28.54003
-7.55406,0.45765,10.67537,-15.12397,3.49680,0.20350,11.97581
-1.73618,-1.56867,3.98355,-5.16723,-1.20911,0.19377,9.55247
2.01963,-1.12612,1.16531,-2.71553,-5.39782,0.01086,21.83478
-1.68542,-1.08901,-3.55426,3.14201,0.82668,0.04372,-13.11204
-3.09104,-0.23295,-5.62436,-3.03831,0.77772,0.02000,-14.74251
-3.87717,0.74098,-2.88109,-2.88103,3.36945,-0.30445,-18.44363
-0.42754,-0.42819,5.02998,-3.45859,-4.21739,0.25281,29.20439
8.31292,2.30543,-1.52645,-8.39725,-2.65715,-0.30785,12.65607
8.96352,2.15330,7.97777,-2.99501,2.19453,0.11162,13.62118
-0.90896,-0.03845,11.60698,5.39133,1.58423,-0.23637,13.73746
2.03663,-0.49245,4.30331,17.83947,-0.96290,0.10803,10.85762
-1.72766,1.38544,1.88234,-0.58255,-1.55674,0.08176,16.49896
-2.40833,-0.00177,2.32146,-1.06438,2.92114,-0.05635,-8.16292
-1.22998,-1.81632,-2.81740,12.29083,-1.40781,-0.15404,-6.76994
-3.85332,-1.24892,-6.24187,0.95304,-3.66314,0.02746,-0.87206
-7.18419,-0.91048,-2.41759,2.46251,-5.11125,-0.05417,11.48350
5.69279,-0.66299,-3.40195,1.77690,3.70297,-0.02102,-23.71307
5.82082,1.75872,1.50493,-1.14792,-0.66104,0.14593,11.82506
0.98854,-0.91971,11.94650,1.36820,2.53711,0.30359,13.23011
1.55873,0.25462,2.37448,16.04402,-0.06938,-0.36479,-0.67043
-0.66650,-2.27045,6.40325,7.64815,1.58676,-0.11790,-3.12393
4.58728,-2.90732,-0.05803,2.27259,2.29507,0.13907,-16.76419
-11.73607,-2.26595,1.63461,6.21257,0.73723,0.03777,-7.00464
-2.03125,1.83364,1.57590,5.52329,-3.64759,0.06059,23.96407
4.63339,1.37232,-0.62675,13.46151,3.69937,-0.09897,-13.66325
-0.93955,-1.39664,-4.69027,-5.30208,-2.70883,0.07360,-0.26176
3.19531,-1.43186,3.82859,-9.83963,-2.83611,0.09403,14.30309
-0.66991,-0.33925,-0.26224,-6.71810,0.52439,0.00654,-2.45750
3.32705,-0.20431,-0.61940,-5.82014,-3.30832,-0.13399,9.94820
-3.01400,-1.40133,7.13418,-15.85676,3.92442,0.29137,-0.19544
10.75129,-0.08744,4.35843,-9.89202,-0.71794,0.12349,12.68742
4.74271,-1.32895,-2.73218,9.15129,0.93902,-0.17934,-15.58698
3.96678,-1.93074,-1.98368,-12.52082,7.35129,-0.30941,-40.20406
2.98664,1.85034,2.54075,-2.98750,0.37193,0.16048,9.08819
-6.73878,-1.08637,-1.55835,-3.93097,-3.02271,0.11860,6.24185
-4.58240,-1.27825,7.55098,8.83930,-3.80318,0.04386,26.14768
-10.00364,2.66002,-4.26776,-3.73792,-0.72349,-0.24617,0.76214
-4.32624,-2.30314,-8.16044,4.46366,-3.33569,-0.01655,-10.05262
-1.90167,-0.15858,-10.43466,4.89762,-0.64606,-0.14519,-19.63970
2.43213,2.41613,2.49949,-8.03891,-1.64164,-0.63444,12.76193
1 1.25664 2.04978 -6.23640 4.71926 -4.26931 0.20590 12.31798
2 -3.89012 -0.37511 6.14979 4.94585 -3.57844 0.00640 23.67628
3 5.09784 0.98120 -0.29939 5.85805 0.28297 -0.20626 -1.53459
4 0.39034 -3.06861 -5.63488 6.43941 0.39256 -0.07084 -24.68670
5 5.84727 -0.15922 11.41246 7.52165 1.69886 0.29022 17.54122
6 -2.86202 -0.84337 -1.08165 0.67115 -2.48911 0.52328 9.39789
7 -7.09328 -0.07233 6.76632 13.06072 0.12876 -0.01048 11.73565
8 -7.17614 0.62875 -2.89924 -5.21458 -2.70344 -0.22035 4.42482
9 8.67430 2.09933 -11.23591 -5.99532 -2.79770 -0.08710 -5.94615
10 -6.03324 -4.16724 2.42063 -3.61827 1.96815 0.17723 -13.11848
11 8.67485 1.48271 -1.31205 -1.81154 2.67940 0.04803 -9.25647
12 4.36248 -2.69788 -4.60562 -0.12849 3.40617 -0.07841 -29.94048
13 9.97205 -0.61515 2.63039 2.81044 5.68249 -0.04495 -20.46775
14 -1.44556 0.18337 4.61021 -2.54824 0.86388 0.17696 7.12822
15 -3.90381 0.53243 2.83416 -5.42397 -0.06367 -0.22810 6.05628
16 -12.39824 -1.54269 -2.66748 10.82084 5.92054 0.13415 -32.91328
17 5.75911 -0.82222 10.24701 0.33635 0.26025 -0.02588 17.75036
18 -7.12657 3.28707 -0.22508 13.42902 2.16708 -0.09153 -2.80277
19 7.22736 1.27122 0.99188 -8.87118 -6.86533 0.09410 33.98791
20 -10.31393 2.23819 -7.87166 -3.44388 -1.43267 -0.07893 -3.18407
21 -8.25971 -0.15799 -1.81740 1.12972 4.24165 -0.01607 -20.57366
22 13.37454 -0.91051 4.61334 0.93989 4.81350 -0.07428 -12.66661
23 1.49973 -0.50929 -2.66670 -1.28560 -0.18299 -0.00552 -6.56370
24 -10.46766 0.73077 3.93791 -1.73489 -3.26768 0.02366 23.19621
25 -1.15898 3.14709 -4.73329 13.61355 -3.87487 -0.14112 13.89143
26 4.42275 -2.09867 3.06395 -0.45331 -2.07717 0.22815 10.29282
27 -3.34113 -0.31138 4.49844 -2.32619 -2.95757 -0.00793 21.21512
28 -1.85433 -1.32509 8.06274 12.75080 -0.89005 -0.04312 14.54248
29 0.85474 -0.50002 -3.52152 -4.30405 4.13943 -0.02834 -24.77918
30 0.33271 -5.28025 -4.95832 22.48546 4.95051 0.17153 -45.01710
31 -0.07308 0.51247 -1.38120 7.86552 3.31641 0.06808 -12.63583
32 2.99294 2.85192 5.51751 8.53749 4.30806 -0.17462 0.84415
33 1.41135 -1.01899 2.27500 5.27479 -4.90004 0.19508 23.54972
34 3.84816 -0.66249 -1.35364 16.51379 0.32115 0.41051 -2.28650
35 3.30223 0.23152 -2.16852 0.75257 -0.05749 -0.03427 -4.22022
36 -6.12524 -2.56204 0.79878 -3.36284 1.00396 0.06219 -9.10749
37 -7.47524 1.31401 -3.30847 4.83057 1.00104 -0.19851 -7.69059
38 5.84884 -0.53504 -0.19543 10.27451 6.98704 0.22706 -29.21246
39 6.44377 0.47687 -0.08731 22.88008 -2.86604 0.03142 10.90274
40 6.35366 -2.04444 1.98872 -1.45189 -1.24062 0.23626 4.62178
41 6.85563 -0.94543 5.16637 2.85611 4.64812 0.29535 -7.83647
42 1.61758 1.31067 -2.16795 8.07492 -0.17166 -0.10273 0.06922
43 3.80137 1.02276 -3.15429 6.09774 3.18885 -0.00163 -16.11486
44 -6.81855 -0.15776 -10.69117 8.07818 4.14656 0.10691 -38.47710
45 -6.43852 4.30120 2.63923 -1.98297 -0.89599 -0.08174 20.77790
46 -2.35292 1.26425 -6.80877 3.31220 -6.17515 -0.04764 14.92507
47 9.13580 -1.21425 1.17227 -6.33648 -0.85276 -0.13366 -0.17285
48 -3.02986 -0.48694 0.24329 -0.38830 -4.70410 -0.18065 15.95300
49 3.27244 2.22393 -1.96640 17.53694 1.62378 0.11539 -4.29743
50 -4.44346 -1.96429 0.22209 15.29785 -1.98503 0.40131 4.07647
51 -2.61294 -0.24905 -4.02974 -23.82024 -5.94171 -0.04932 16.50504
52 3.65962 1.69832 0.78025 9.88639 -1.61555 -0.18570 9.99506
53 2.22893 -4.62231 -3.33440 0.07179 0.21983 0.14348 -19.94698
54 -5.43092 1.39655 -2.79175 0.16622 -2.38112 -0.09009 6.49039
55 -5.88117 -3.04210 -0.87931 3.96197 -1.01125 0.08132 -6.01714
56 0.51401 -0.30742 6.01407 -6.85848 -3.61343 -0.15710 24.56965
57 4.45547 2.34283 0.98094 -4.66298 -3.79507 0.37084 27.19791
58 0.05320 0.27458 6.95838 7.50119 -5.50256 0.06913 36.21698
59 4.72057 0.17165 4.83822 -1.03917 4.11211 -0.14773 -6.32623
60 -11.60674 -1.15594 -10.23150 0.49843 0.32477 -0.14543 -28.54003
61 -7.55406 0.45765 10.67537 -15.12397 3.49680 0.20350 11.97581
62 -1.73618 -1.56867 3.98355 -5.16723 -1.20911 0.19377 9.55247
63 2.01963 -1.12612 1.16531 -2.71553 -5.39782 0.01086 21.83478
64 -1.68542 -1.08901 -3.55426 3.14201 0.82668 0.04372 -13.11204
65 -3.09104 -0.23295 -5.62436 -3.03831 0.77772 0.02000 -14.74251
66 -3.87717 0.74098 -2.88109 -2.88103 3.36945 -0.30445 -18.44363
67 -0.42754 -0.42819 5.02998 -3.45859 -4.21739 0.25281 29.20439
68 8.31292 2.30543 -1.52645 -8.39725 -2.65715 -0.30785 12.65607
69 8.96352 2.15330 7.97777 -2.99501 2.19453 0.11162 13.62118
70 -0.90896 -0.03845 11.60698 5.39133 1.58423 -0.23637 13.73746
71 2.03663 -0.49245 4.30331 17.83947 -0.96290 0.10803 10.85762
72 -1.72766 1.38544 1.88234 -0.58255 -1.55674 0.08176 16.49896
73 -2.40833 -0.00177 2.32146 -1.06438 2.92114 -0.05635 -8.16292
74 -1.22998 -1.81632 -2.81740 12.29083 -1.40781 -0.15404 -6.76994
75 -3.85332 -1.24892 -6.24187 0.95304 -3.66314 0.02746 -0.87206
76 -7.18419 -0.91048 -2.41759 2.46251 -5.11125 -0.05417 11.48350
77 5.69279 -0.66299 -3.40195 1.77690 3.70297 -0.02102 -23.71307
78 5.82082 1.75872 1.50493 -1.14792 -0.66104 0.14593 11.82506
79 0.98854 -0.91971 11.94650 1.36820 2.53711 0.30359 13.23011
80 1.55873 0.25462 2.37448 16.04402 -0.06938 -0.36479 -0.67043
81 -0.66650 -2.27045 6.40325 7.64815 1.58676 -0.11790 -3.12393
82 4.58728 -2.90732 -0.05803 2.27259 2.29507 0.13907 -16.76419
83 -11.73607 -2.26595 1.63461 6.21257 0.73723 0.03777 -7.00464
84 -2.03125 1.83364 1.57590 5.52329 -3.64759 0.06059 23.96407
85 4.63339 1.37232 -0.62675 13.46151 3.69937 -0.09897 -13.66325
86 -0.93955 -1.39664 -4.69027 -5.30208 -2.70883 0.07360 -0.26176
87 3.19531 -1.43186 3.82859 -9.83963 -2.83611 0.09403 14.30309
88 -0.66991 -0.33925 -0.26224 -6.71810 0.52439 0.00654 -2.45750
89 3.32705 -0.20431 -0.61940 -5.82014 -3.30832 -0.13399 9.94820
90 -3.01400 -1.40133 7.13418 -15.85676 3.92442 0.29137 -0.19544
91 10.75129 -0.08744 4.35843 -9.89202 -0.71794 0.12349 12.68742
92 4.74271 -1.32895 -2.73218 9.15129 0.93902 -0.17934 -15.58698
93 3.96678 -1.93074 -1.98368 -12.52082 7.35129 -0.30941 -40.20406
94 2.98664 1.85034 2.54075 -2.98750 0.37193 0.16048 9.08819
95 -6.73878 -1.08637 -1.55835 -3.93097 -3.02271 0.11860 6.24185
96 -4.58240 -1.27825 7.55098 8.83930 -3.80318 0.04386 26.14768
97 -10.00364 2.66002 -4.26776 -3.73792 -0.72349 -0.24617 0.76214
98 -4.32624 -2.30314 -8.16044 4.46366 -3.33569 -0.01655 -10.05262
99 -1.90167 -0.15858 -10.43466 4.89762 -0.64606 -0.14519 -19.63970
100 2.43213 2.41613 2.49949 -8.03891 -1.64164 -0.63444 12.76193

View File

@@ -0,0 +1,15 @@
import pandas as pd
import numpy as np
from sklearn.linear_model import Lasso
train_data = pd.read_csv('data.csv', header=None)
X = train_data.iloc[:, :-1]
y = train_data.iloc[:, -1:]
lasso_reg = Lasso()
lasso_reg.fit(X, y)
reg_coef = lasso_reg.coef_
print(reg_coef)

View File

@@ -0,0 +1,100 @@
1.25664,2.04978,-6.23640,4.71926,-4.26931,0.20590,12.31798
-3.89012,-0.37511,6.14979,4.94585,-3.57844,0.00640,23.67628
5.09784,0.98120,-0.29939,5.85805,0.28297,-0.20626,-1.53459
0.39034,-3.06861,-5.63488,6.43941,0.39256,-0.07084,-24.68670
5.84727,-0.15922,11.41246,7.52165,1.69886,0.29022,17.54122
-2.86202,-0.84337,-1.08165,0.67115,-2.48911,0.52328,9.39789
-7.09328,-0.07233,6.76632,13.06072,0.12876,-0.01048,11.73565
-7.17614,0.62875,-2.89924,-5.21458,-2.70344,-0.22035,4.42482
8.67430,2.09933,-11.23591,-5.99532,-2.79770,-0.08710,-5.94615
-6.03324,-4.16724,2.42063,-3.61827,1.96815,0.17723,-13.11848
8.67485,1.48271,-1.31205,-1.81154,2.67940,0.04803,-9.25647
4.36248,-2.69788,-4.60562,-0.12849,3.40617,-0.07841,-29.94048
9.97205,-0.61515,2.63039,2.81044,5.68249,-0.04495,-20.46775
-1.44556,0.18337,4.61021,-2.54824,0.86388,0.17696,7.12822
-3.90381,0.53243,2.83416,-5.42397,-0.06367,-0.22810,6.05628
-12.39824,-1.54269,-2.66748,10.82084,5.92054,0.13415,-32.91328
5.75911,-0.82222,10.24701,0.33635,0.26025,-0.02588,17.75036
-7.12657,3.28707,-0.22508,13.42902,2.16708,-0.09153,-2.80277
7.22736,1.27122,0.99188,-8.87118,-6.86533,0.09410,33.98791
-10.31393,2.23819,-7.87166,-3.44388,-1.43267,-0.07893,-3.18407
-8.25971,-0.15799,-1.81740,1.12972,4.24165,-0.01607,-20.57366
13.37454,-0.91051,4.61334,0.93989,4.81350,-0.07428,-12.66661
1.49973,-0.50929,-2.66670,-1.28560,-0.18299,-0.00552,-6.56370
-10.46766,0.73077,3.93791,-1.73489,-3.26768,0.02366,23.19621
-1.15898,3.14709,-4.73329,13.61355,-3.87487,-0.14112,13.89143
4.42275,-2.09867,3.06395,-0.45331,-2.07717,0.22815,10.29282
-3.34113,-0.31138,4.49844,-2.32619,-2.95757,-0.00793,21.21512
-1.85433,-1.32509,8.06274,12.75080,-0.89005,-0.04312,14.54248
0.85474,-0.50002,-3.52152,-4.30405,4.13943,-0.02834,-24.77918
0.33271,-5.28025,-4.95832,22.48546,4.95051,0.17153,-45.01710
-0.07308,0.51247,-1.38120,7.86552,3.31641,0.06808,-12.63583
2.99294,2.85192,5.51751,8.53749,4.30806,-0.17462,0.84415
1.41135,-1.01899,2.27500,5.27479,-4.90004,0.19508,23.54972
3.84816,-0.66249,-1.35364,16.51379,0.32115,0.41051,-2.28650
3.30223,0.23152,-2.16852,0.75257,-0.05749,-0.03427,-4.22022
-6.12524,-2.56204,0.79878,-3.36284,1.00396,0.06219,-9.10749
-7.47524,1.31401,-3.30847,4.83057,1.00104,-0.19851,-7.69059
5.84884,-0.53504,-0.19543,10.27451,6.98704,0.22706,-29.21246
6.44377,0.47687,-0.08731,22.88008,-2.86604,0.03142,10.90274
6.35366,-2.04444,1.98872,-1.45189,-1.24062,0.23626,4.62178
6.85563,-0.94543,5.16637,2.85611,4.64812,0.29535,-7.83647
1.61758,1.31067,-2.16795,8.07492,-0.17166,-0.10273,0.06922
3.80137,1.02276,-3.15429,6.09774,3.18885,-0.00163,-16.11486
-6.81855,-0.15776,-10.69117,8.07818,4.14656,0.10691,-38.47710
-6.43852,4.30120,2.63923,-1.98297,-0.89599,-0.08174,20.77790
-2.35292,1.26425,-6.80877,3.31220,-6.17515,-0.04764,14.92507
9.13580,-1.21425,1.17227,-6.33648,-0.85276,-0.13366,-0.17285
-3.02986,-0.48694,0.24329,-0.38830,-4.70410,-0.18065,15.95300
3.27244,2.22393,-1.96640,17.53694,1.62378,0.11539,-4.29743
-4.44346,-1.96429,0.22209,15.29785,-1.98503,0.40131,4.07647
-2.61294,-0.24905,-4.02974,-23.82024,-5.94171,-0.04932,16.50504
3.65962,1.69832,0.78025,9.88639,-1.61555,-0.18570,9.99506
2.22893,-4.62231,-3.33440,0.07179,0.21983,0.14348,-19.94698
-5.43092,1.39655,-2.79175,0.16622,-2.38112,-0.09009,6.49039
-5.88117,-3.04210,-0.87931,3.96197,-1.01125,0.08132,-6.01714
0.51401,-0.30742,6.01407,-6.85848,-3.61343,-0.15710,24.56965
4.45547,2.34283,0.98094,-4.66298,-3.79507,0.37084,27.19791
0.05320,0.27458,6.95838,7.50119,-5.50256,0.06913,36.21698
4.72057,0.17165,4.83822,-1.03917,4.11211,-0.14773,-6.32623
-11.60674,-1.15594,-10.23150,0.49843,0.32477,-0.14543,-28.54003
-7.55406,0.45765,10.67537,-15.12397,3.49680,0.20350,11.97581
-1.73618,-1.56867,3.98355,-5.16723,-1.20911,0.19377,9.55247
2.01963,-1.12612,1.16531,-2.71553,-5.39782,0.01086,21.83478
-1.68542,-1.08901,-3.55426,3.14201,0.82668,0.04372,-13.11204
-3.09104,-0.23295,-5.62436,-3.03831,0.77772,0.02000,-14.74251
-3.87717,0.74098,-2.88109,-2.88103,3.36945,-0.30445,-18.44363
-0.42754,-0.42819,5.02998,-3.45859,-4.21739,0.25281,29.20439
8.31292,2.30543,-1.52645,-8.39725,-2.65715,-0.30785,12.65607
8.96352,2.15330,7.97777,-2.99501,2.19453,0.11162,13.62118
-0.90896,-0.03845,11.60698,5.39133,1.58423,-0.23637,13.73746
2.03663,-0.49245,4.30331,17.83947,-0.96290,0.10803,10.85762
-1.72766,1.38544,1.88234,-0.58255,-1.55674,0.08176,16.49896
-2.40833,-0.00177,2.32146,-1.06438,2.92114,-0.05635,-8.16292
-1.22998,-1.81632,-2.81740,12.29083,-1.40781,-0.15404,-6.76994
-3.85332,-1.24892,-6.24187,0.95304,-3.66314,0.02746,-0.87206
-7.18419,-0.91048,-2.41759,2.46251,-5.11125,-0.05417,11.48350
5.69279,-0.66299,-3.40195,1.77690,3.70297,-0.02102,-23.71307
5.82082,1.75872,1.50493,-1.14792,-0.66104,0.14593,11.82506
0.98854,-0.91971,11.94650,1.36820,2.53711,0.30359,13.23011
1.55873,0.25462,2.37448,16.04402,-0.06938,-0.36479,-0.67043
-0.66650,-2.27045,6.40325,7.64815,1.58676,-0.11790,-3.12393
4.58728,-2.90732,-0.05803,2.27259,2.29507,0.13907,-16.76419
-11.73607,-2.26595,1.63461,6.21257,0.73723,0.03777,-7.00464
-2.03125,1.83364,1.57590,5.52329,-3.64759,0.06059,23.96407
4.63339,1.37232,-0.62675,13.46151,3.69937,-0.09897,-13.66325
-0.93955,-1.39664,-4.69027,-5.30208,-2.70883,0.07360,-0.26176
3.19531,-1.43186,3.82859,-9.83963,-2.83611,0.09403,14.30309
-0.66991,-0.33925,-0.26224,-6.71810,0.52439,0.00654,-2.45750
3.32705,-0.20431,-0.61940,-5.82014,-3.30832,-0.13399,9.94820
-3.01400,-1.40133,7.13418,-15.85676,3.92442,0.29137,-0.19544
10.75129,-0.08744,4.35843,-9.89202,-0.71794,0.12349,12.68742
4.74271,-1.32895,-2.73218,9.15129,0.93902,-0.17934,-15.58698
3.96678,-1.93074,-1.98368,-12.52082,7.35129,-0.30941,-40.20406
2.98664,1.85034,2.54075,-2.98750,0.37193,0.16048,9.08819
-6.73878,-1.08637,-1.55835,-3.93097,-3.02271,0.11860,6.24185
-4.58240,-1.27825,7.55098,8.83930,-3.80318,0.04386,26.14768
-10.00364,2.66002,-4.26776,-3.73792,-0.72349,-0.24617,0.76214
-4.32624,-2.30314,-8.16044,4.46366,-3.33569,-0.01655,-10.05262
-1.90167,-0.15858,-10.43466,4.89762,-0.64606,-0.14519,-19.63970
2.43213,2.41613,2.49949,-8.03891,-1.64164,-0.63444,12.76193
1 1.25664 2.04978 -6.23640 4.71926 -4.26931 0.20590 12.31798
2 -3.89012 -0.37511 6.14979 4.94585 -3.57844 0.00640 23.67628
3 5.09784 0.98120 -0.29939 5.85805 0.28297 -0.20626 -1.53459
4 0.39034 -3.06861 -5.63488 6.43941 0.39256 -0.07084 -24.68670
5 5.84727 -0.15922 11.41246 7.52165 1.69886 0.29022 17.54122
6 -2.86202 -0.84337 -1.08165 0.67115 -2.48911 0.52328 9.39789
7 -7.09328 -0.07233 6.76632 13.06072 0.12876 -0.01048 11.73565
8 -7.17614 0.62875 -2.89924 -5.21458 -2.70344 -0.22035 4.42482
9 8.67430 2.09933 -11.23591 -5.99532 -2.79770 -0.08710 -5.94615
10 -6.03324 -4.16724 2.42063 -3.61827 1.96815 0.17723 -13.11848
11 8.67485 1.48271 -1.31205 -1.81154 2.67940 0.04803 -9.25647
12 4.36248 -2.69788 -4.60562 -0.12849 3.40617 -0.07841 -29.94048
13 9.97205 -0.61515 2.63039 2.81044 5.68249 -0.04495 -20.46775
14 -1.44556 0.18337 4.61021 -2.54824 0.86388 0.17696 7.12822
15 -3.90381 0.53243 2.83416 -5.42397 -0.06367 -0.22810 6.05628
16 -12.39824 -1.54269 -2.66748 10.82084 5.92054 0.13415 -32.91328
17 5.75911 -0.82222 10.24701 0.33635 0.26025 -0.02588 17.75036
18 -7.12657 3.28707 -0.22508 13.42902 2.16708 -0.09153 -2.80277
19 7.22736 1.27122 0.99188 -8.87118 -6.86533 0.09410 33.98791
20 -10.31393 2.23819 -7.87166 -3.44388 -1.43267 -0.07893 -3.18407
21 -8.25971 -0.15799 -1.81740 1.12972 4.24165 -0.01607 -20.57366
22 13.37454 -0.91051 4.61334 0.93989 4.81350 -0.07428 -12.66661
23 1.49973 -0.50929 -2.66670 -1.28560 -0.18299 -0.00552 -6.56370
24 -10.46766 0.73077 3.93791 -1.73489 -3.26768 0.02366 23.19621
25 -1.15898 3.14709 -4.73329 13.61355 -3.87487 -0.14112 13.89143
26 4.42275 -2.09867 3.06395 -0.45331 -2.07717 0.22815 10.29282
27 -3.34113 -0.31138 4.49844 -2.32619 -2.95757 -0.00793 21.21512
28 -1.85433 -1.32509 8.06274 12.75080 -0.89005 -0.04312 14.54248
29 0.85474 -0.50002 -3.52152 -4.30405 4.13943 -0.02834 -24.77918
30 0.33271 -5.28025 -4.95832 22.48546 4.95051 0.17153 -45.01710
31 -0.07308 0.51247 -1.38120 7.86552 3.31641 0.06808 -12.63583
32 2.99294 2.85192 5.51751 8.53749 4.30806 -0.17462 0.84415
33 1.41135 -1.01899 2.27500 5.27479 -4.90004 0.19508 23.54972
34 3.84816 -0.66249 -1.35364 16.51379 0.32115 0.41051 -2.28650
35 3.30223 0.23152 -2.16852 0.75257 -0.05749 -0.03427 -4.22022
36 -6.12524 -2.56204 0.79878 -3.36284 1.00396 0.06219 -9.10749
37 -7.47524 1.31401 -3.30847 4.83057 1.00104 -0.19851 -7.69059
38 5.84884 -0.53504 -0.19543 10.27451 6.98704 0.22706 -29.21246
39 6.44377 0.47687 -0.08731 22.88008 -2.86604 0.03142 10.90274
40 6.35366 -2.04444 1.98872 -1.45189 -1.24062 0.23626 4.62178
41 6.85563 -0.94543 5.16637 2.85611 4.64812 0.29535 -7.83647
42 1.61758 1.31067 -2.16795 8.07492 -0.17166 -0.10273 0.06922
43 3.80137 1.02276 -3.15429 6.09774 3.18885 -0.00163 -16.11486
44 -6.81855 -0.15776 -10.69117 8.07818 4.14656 0.10691 -38.47710
45 -6.43852 4.30120 2.63923 -1.98297 -0.89599 -0.08174 20.77790
46 -2.35292 1.26425 -6.80877 3.31220 -6.17515 -0.04764 14.92507
47 9.13580 -1.21425 1.17227 -6.33648 -0.85276 -0.13366 -0.17285
48 -3.02986 -0.48694 0.24329 -0.38830 -4.70410 -0.18065 15.95300
49 3.27244 2.22393 -1.96640 17.53694 1.62378 0.11539 -4.29743
50 -4.44346 -1.96429 0.22209 15.29785 -1.98503 0.40131 4.07647
51 -2.61294 -0.24905 -4.02974 -23.82024 -5.94171 -0.04932 16.50504
52 3.65962 1.69832 0.78025 9.88639 -1.61555 -0.18570 9.99506
53 2.22893 -4.62231 -3.33440 0.07179 0.21983 0.14348 -19.94698
54 -5.43092 1.39655 -2.79175 0.16622 -2.38112 -0.09009 6.49039
55 -5.88117 -3.04210 -0.87931 3.96197 -1.01125 0.08132 -6.01714
56 0.51401 -0.30742 6.01407 -6.85848 -3.61343 -0.15710 24.56965
57 4.45547 2.34283 0.98094 -4.66298 -3.79507 0.37084 27.19791
58 0.05320 0.27458 6.95838 7.50119 -5.50256 0.06913 36.21698
59 4.72057 0.17165 4.83822 -1.03917 4.11211 -0.14773 -6.32623
60 -11.60674 -1.15594 -10.23150 0.49843 0.32477 -0.14543 -28.54003
61 -7.55406 0.45765 10.67537 -15.12397 3.49680 0.20350 11.97581
62 -1.73618 -1.56867 3.98355 -5.16723 -1.20911 0.19377 9.55247
63 2.01963 -1.12612 1.16531 -2.71553 -5.39782 0.01086 21.83478
64 -1.68542 -1.08901 -3.55426 3.14201 0.82668 0.04372 -13.11204
65 -3.09104 -0.23295 -5.62436 -3.03831 0.77772 0.02000 -14.74251
66 -3.87717 0.74098 -2.88109 -2.88103 3.36945 -0.30445 -18.44363
67 -0.42754 -0.42819 5.02998 -3.45859 -4.21739 0.25281 29.20439
68 8.31292 2.30543 -1.52645 -8.39725 -2.65715 -0.30785 12.65607
69 8.96352 2.15330 7.97777 -2.99501 2.19453 0.11162 13.62118
70 -0.90896 -0.03845 11.60698 5.39133 1.58423 -0.23637 13.73746
71 2.03663 -0.49245 4.30331 17.83947 -0.96290 0.10803 10.85762
72 -1.72766 1.38544 1.88234 -0.58255 -1.55674 0.08176 16.49896
73 -2.40833 -0.00177 2.32146 -1.06438 2.92114 -0.05635 -8.16292
74 -1.22998 -1.81632 -2.81740 12.29083 -1.40781 -0.15404 -6.76994
75 -3.85332 -1.24892 -6.24187 0.95304 -3.66314 0.02746 -0.87206
76 -7.18419 -0.91048 -2.41759 2.46251 -5.11125 -0.05417 11.48350
77 5.69279 -0.66299 -3.40195 1.77690 3.70297 -0.02102 -23.71307
78 5.82082 1.75872 1.50493 -1.14792 -0.66104 0.14593 11.82506
79 0.98854 -0.91971 11.94650 1.36820 2.53711 0.30359 13.23011
80 1.55873 0.25462 2.37448 16.04402 -0.06938 -0.36479 -0.67043
81 -0.66650 -2.27045 6.40325 7.64815 1.58676 -0.11790 -3.12393
82 4.58728 -2.90732 -0.05803 2.27259 2.29507 0.13907 -16.76419
83 -11.73607 -2.26595 1.63461 6.21257 0.73723 0.03777 -7.00464
84 -2.03125 1.83364 1.57590 5.52329 -3.64759 0.06059 23.96407
85 4.63339 1.37232 -0.62675 13.46151 3.69937 -0.09897 -13.66325
86 -0.93955 -1.39664 -4.69027 -5.30208 -2.70883 0.07360 -0.26176
87 3.19531 -1.43186 3.82859 -9.83963 -2.83611 0.09403 14.30309
88 -0.66991 -0.33925 -0.26224 -6.71810 0.52439 0.00654 -2.45750
89 3.32705 -0.20431 -0.61940 -5.82014 -3.30832 -0.13399 9.94820
90 -3.01400 -1.40133 7.13418 -15.85676 3.92442 0.29137 -0.19544
91 10.75129 -0.08744 4.35843 -9.89202 -0.71794 0.12349 12.68742
92 4.74271 -1.32895 -2.73218 9.15129 0.93902 -0.17934 -15.58698
93 3.96678 -1.93074 -1.98368 -12.52082 7.35129 -0.30941 -40.20406
94 2.98664 1.85034 2.54075 -2.98750 0.37193 0.16048 9.08819
95 -6.73878 -1.08637 -1.55835 -3.93097 -3.02271 0.11860 6.24185
96 -4.58240 -1.27825 7.55098 8.83930 -3.80318 0.04386 26.14768
97 -10.00364 2.66002 -4.26776 -3.73792 -0.72349 -0.24617 0.76214
98 -4.32624 -2.30314 -8.16044 4.46366 -3.33569 -0.01655 -10.05262
99 -1.90167 -0.15858 -10.43466 4.89762 -0.64606 -0.14519 -19.63970
100 2.43213 2.41613 2.49949 -8.03891 -1.64164 -0.63444 12.76193

View File

@@ -0,0 +1,25 @@
import pandas as pd
import numpy as np
from sklearn.linear_model import Lasso
from sklearn.preprocessing import StandardScaler
train_data = pd.read_csv('data.csv', header=None)
X = train_data.iloc[:, :-1]
y = train_data.iloc[:, -1]
# Create the standardization scaling object
scaler = StandardScaler()
# Scale and fit the standardization paramaeters
X_scaled = scaler.fit_transform(X)
# Create the LR model with Lasso regularization
lasso_reg = Lasso()
# Fit the model
lasso_reg.fit(X_scaled, y)
# Get the regression coeficients
reg_coef = lasso_reg.coef_
print(reg_coef)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,878 @@
#!/usr/bin/env python
# coding: utf-8
# ## Our Mission ##
#
# Spam detection is one of the major applications of Machine Learning in the interwebs today. Pretty much all of the major email service providers have spam detection systems built in and automatically classify such mail as 'Junk Mail'.
#
# In this mission we will be using the Naive Bayes algorithm to create a model that can classify SMS messages as spam or not spam, based on the training we give to the model. It is important to have some level of intuition as to what a spammy text message might look like. Usually they have words like 'free', 'win', 'winner', 'cash', 'prize' and the like in them as these texts are designed to catch your eye and in some sense tempt you to open them. Also, spam messages tend to have words written in all capitals and also tend to use a lot of exclamation marks. To the recipient, it is usually pretty straightforward to identify a spam text and our objective here is to train a model to do that for us!
#
# Being able to identify spam messages is a binary classification problem as messages are classified as either 'Spam' or 'Not Spam' and nothing else. Also, this is a supervised learning problem, as we will be feeding a labelled dataset into the model, that it can learn from, to make future predictions.
#
# # Overview
#
# This project has been broken down in to the following steps:
#
# - Step 0: Introduction to the Naive Bayes Theorem
# - Step 1.1: Understanding our dataset
# - Step 1.2: Data Preprocessing
# - Step 2.1: Bag of Words(BoW)
# - Step 2.2: Implementing BoW from scratch
# - Step 2.3: Implementing Bag of Words in scikit-learn
# - Step 3.1: Training and testing sets
# - Step 3.2: Applying Bag of Words processing to our dataset.
# - Step 4.1: Bayes Theorem implementation from scratch
# - Step 4.2: Naive Bayes implementation from scratch
# - Step 5: Naive Bayes implementation using scikit-learn
# - Step 6: Evaluating our model
# - Step 7: Conclusion
#
# ### Step 0: Introduction to the Naive Bayes Theorem ###
#
# Bayes Theorem is one of the earliest probabilistic inference algorithms. It was developed by Reverend Bayes (which he used to try and infer the existence of God no less), and still performs extremely well for certain use cases.
#
# It's best to understand this theorem using an example. Let's say you are a member of the Secret Service and you have been deployed to protect the Democratic presidential nominee during one of his/her campaign speeches. Being a public event that is open to all, your job is not easy and you have to be on the constant lookout for threats. So one place to start is to put a certain threat-factor for each person. So based on the features of an individual, like the age, sex, and other smaller factors like whether the person is carrying a bag, looks nervous, etc., you can make a judgment call as to whether that person is a viable threat.
#
# If an individual ticks all the boxes up to a level where it crosses a threshold of doubt in your mind, you can take action and remove that person from the vicinity. Bayes Theorem works in the same way, as we are computing the probability of an event (a person being a threat) based on the probabilities of certain related events (age, sex, presence of bag or not, nervousness of the person, etc.).
#
# One thing to consider is the independence of these features amongst each other. For example if a child looks nervous at the event then the likelihood of that person being a threat is not as much as say if it was a grown man who was nervous. To break this down a bit further, here there are two features we are considering, age AND nervousness. Say we look at these features individually, we could design a model that flags ALL persons that are nervous as potential threats. However, it is likely that we will have a lot of false positives as there is a strong chance that minors present at the event will be nervous. Hence by considering the age of a person along with the 'nervousness' feature we would definitely get a more accurate result as to who are potential threats and who aren't.
#
# This is the 'Naive' bit of the theorem where it considers each feature to be independent of each other which may not always be the case and hence that can affect the final judgement.
#
# In short, Bayes Theorem calculates the probability of a certain event happening (in our case, a message being spam) based on the joint probabilistic distributions of certain other events (in our case, the appearance of certain words in a message). We will dive into the workings of Bayes Theorem later in the mission, but first, let us understand the data we are going to work with.
# ### Step 1.1: Understanding our dataset ###
#
#
# We will be using a dataset originally compiled and posted on the UCI Machine Learning repository which has a very good collection of datasets for experimental research purposes. If you're interested, you can review the [abstract](https://archive.ics.uci.edu/ml/datasets/SMS+Spam+Collection) and the original [compressed data file](https://archive.ics.uci.edu/ml/machine-learning-databases/00228/) on the UCI site. For this exercise, however, we've gone ahead and downloaded the data for you.
#
#
# **Here's a preview of the data:**
#
# <img src="images/dqnb.png" height="1242" width="1242">
#
# The columns in the data set are currently not named and as you can see, there are 2 columns.
#
# The first column takes two values, 'ham' which signifies that the message is not spam, and 'spam' which signifies that the message is spam.
#
# The second column is the text content of the SMS message that is being classified.
# >**Instructions:**
# * Import the dataset into a pandas dataframe using the **read_table** method. The file has already been downloaded, and you can access it using the filepath 'smsspamcollection/SMSSpamCollection'. Because this is a tab separated dataset we will be using '\\t' as the value for the 'sep' argument which specifies this format.
# * Also, rename the column names by specifying a list ['label', 'sms_message'] to the 'names' argument of read_table().
# * Print the first five values of the dataframe with the new column names.
# In[12]:
# '!' allows you to run bash commands from jupyter notebook.
print("List all the files in the current directory\n")
# get_ipython().system('ls')
# The required data table could be found under smsspamcollection/SMSSpamCollection
print("\n List all the files inside the smsspamcollection directory\n")
# get_ipython().system('ls smsspamcollection')
# >**Instructions:**
# * Convert the values in the 'label' column to numerical values using map method as follows:
# {'ham':0, 'spam':1} This maps the 'ham' value to 0 and the 'spam' value to 1.
# * Also, to get an idea of the size of the dataset we are dealing with, print out number of rows and columns using
# 'shape'.
# In[23]:
import pandas as pd
# Dataset available using filepath 'smsspamcollection/SMSSpamCollection
headers = ['label', 'sms_message']
df = pd.read_csv('smsspamcollection_SMSSpamCollection', sep='\t', names=headers)
# Output printing out first 5 rows
# df.head()
# ### Step 1.2: Data Preprocessing ###
#
# Now that we have a basic understanding of what our dataset looks like, let's convert our labels to binary variables, 0 to represent 'ham'(i.e. not spam) and 1 to represent 'spam' for ease of computation.
#
# You might be wondering why do we need to do this step? The answer to this lies in how scikit-learn handles inputs. Scikit-learn only deals with numerical values and hence if we were to leave our label values as strings, scikit-learn would do the conversion internally(more specifically, the string labels will be cast to unknown float values).
#
# Our model would still be able to make predictions if we left our labels as strings but we could have issues later when calculating performance metrics, for example when calculating our precision and recall scores. Hence, to avoid unexpected 'gotchas' later, it is good practice to have our categorical values be fed into our model as integers.
# In[24]:
'''
Solution
'''
print(df)
df['label'] = df.label.map(lambda x: dict(ham=0, spam=1)[x], df.label.values.tolist())
df.head()
print(f'Our data has {df.shape[0]} rows and {df.shape[1]} cols.')
# In[27]:
df.head()
# ### Step 2.1: Bag of Words ###
#
# What we have here in our data set is a large collection of text data (5,572 rows of data). Most ML algorithms rely on numerical data to be fed into them as input, and email/sms messages are usually text heavy.
#
# Here we'd like to introduce the Bag of Words (BoW) concept which is a term used to specify the problems that have a 'bag of words' or a collection of text data that needs to be worked with. The basic idea of BoW is to take a piece of text and count the frequency of the words in that text. It is important to note that the BoW concept treats each word individually and the order in which the words occur does not matter.
#
# Using a process which we will go through now, we can convert a collection of documents to a matrix, with each document being a row and each word (token) being the column, and the corresponding (row, column) values being the frequency of occurrence of each word or token in that document.
#
# For example:
#
# Let's say we have 4 documents, which are text messages
# in our case, as follows:
#
# `['Hello, how are you!',
# 'Win money, win from home.',
# 'Call me now',
# 'Hello, Call you tomorrow?']`
#
# Our objective here is to convert this set of texts to a frequency distribution matrix, as follows:
#
# <img src="images/countvectorizer.png" height="542" width="542">
#
# Here as we can see, the documents are numbered in the rows, and each word is a column name, with the corresponding value being the frequency of that word in the document.
#
# Let's break this down and see how we can do this conversion using a small set of documents.
#
# To handle this, we will be using sklearn's
# [count vectorizer](http://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.CountVectorizer.html#sklearn.feature_extraction.text.CountVectorizer) method which does the following:
#
# * It tokenizes the string (separates the string into individual words) and gives an integer ID to each token.
# * It counts the occurrence of each of those tokens.
#
# **Please Note:**
#
# * The CountVectorizer method automatically converts all tokenized words to their lower case form so that it does not treat words like 'He' and 'he' differently. It does this using the `lowercase` parameter which is by default set to `True`.
#
# * It also ignores all punctuation so that words followed by a punctuation mark (for example: 'hello!') are not treated differently than the same words not prefixed or suffixed by a punctuation mark (for example: 'hello'). It does this using the `token_pattern` parameter which has a default regular expression which selects tokens of 2 or more alphanumeric characters.
#
# * The third parameter to take note of is the `stop_words` parameter. Stop words refer to the most commonly used words in a language. They include words like 'am', 'an', 'and', 'the' etc. By setting this parameter value to `english`, CountVectorizer will automatically ignore all words(from our input text) that are found in the built in list of english stop words in scikit-learn. This is extremely helpful as stop words can skew our calculations when we are trying to find certain key words that are indicative of spam.
#
# We will dive into the application of each of these into our model in a later step, but for now it is important to be aware of such preprocessing techniques available to us when dealing with textual data.
# ### Step 2.2: Implementing Bag of Words from scratch ###
#
# Before we dive into scikit-learn's Bag of Words (BoW) library to do the dirty work for us, let's implement it ourselves first so that we can understand what's happening behind the scenes.
#
# **Step 1: Convert all strings to their lower case form.**
#
# Let's say we have a document set:
#
# ```
# documents = ['Hello, how are you!',
# 'Win money, win from home.',
# 'Call me now.',
# 'Hello, Call hello you tomorrow?']
# ```
# >>**Instructions:**
# * Convert all the strings in the documents set to their lower case. Save them into a list called 'lower_case_documents'. You can convert strings to their lower case in python by using the lower() method.
#
# In[48]:
'''
Solution:
'''
documents = ['Hello, how are you!',
'Win money, win from home.',
'Call me now.',
'Hello, Call hello you tomorrow?']
lower_case_documents = []
for i in documents:
lower_case_documents.append(i.lower())
print(lower_case_documents)
# **Step 2: Removing all punctuation**
#
# >>**Instructions:**
# Remove all punctuation from the strings in the document set. Save the strings into a list called
# 'sans_punctuation_documents'.
# In[73]:
'''
Solution:
'''
sans_punctuation_documents = []
replace_chars = {',': '', '!': '', '.': '', '?': ''}
import string
# sans_punctuation_documents = [s.replace(',', '') for s in lower_case_documents]
for item in lower_case_documents:
for i, j in replace_chars.items():
item = item.replace(i, j)
sans_punctuation_documents.append(item)
print(sans_punctuation_documents)
# **Step 3: Tokenization**
#
# Tokenizing a sentence in a document set means splitting up the sentence into individual words using a delimiter. The delimiter specifies what character we will use to identify the beginning and end of a word. Most commonly, we use a single space as the delimiter character for identifying words, and this is true in our documents in this case also.
# >>**Instructions:**
# Tokenize the strings stored in 'sans_punctuation_documents' using the split() method. Store the final document set
# in a list called 'preprocessed_documents'.
#
# In[75]:
'''
Solution:
'''
preprocessed_documents = []
for i in sans_punctuation_documents:
preprocessed_documents.append(i.split())
print(preprocessed_documents)
# **Step 4: Count frequencies**
#
# Now that we have our document set in the required format, we can proceed to counting the occurrence of each word in each document of the document set. We will use the `Counter` method from the Python `collections` library for this purpose.
#
# `Counter` counts the occurrence of each item in the list and returns a dictionary with the key as the item being counted and the corresponding value being the count of that item in the list.
# >>**Instructions:**
# Using the Counter() method and preprocessed_documents as the input, create a dictionary with the keys being each word in each document and the corresponding values being the frequency of occurrence of that word. Save each Counter dictionary as an item in a list called 'frequency_list'.
#
# In[78]:
'''
Solution
'''
frequency_list = []
import pprint
from collections import Counter
for i in preprocessed_documents:
frequency_list.append(Counter(i))
pprint.pprint(frequency_list)
# Congratulations! You have implemented the Bag of Words process from scratch! As we can see in our previous output, we have a frequency distribution dictionary which gives a clear view of the text that we are dealing with.
#
# We should now have a solid understanding of what is happening behind the scenes in the `sklearn.feature_extraction.text.CountVectorizer` method of scikit-learn.
#
# We will now implement `sklearn.feature_extraction.text.CountVectorizer` method in the next step.
# ### Step 2.3: Implementing Bag of Words in scikit-learn ###
#
# Now that we have implemented the BoW concept from scratch, let's go ahead and use scikit-learn to do this process in a clean and succinct way. We will use the same document set as we used in the previous step.
# In[ ]:
'''
Here we will look to create a frequency matrix on a smaller document set to make sure we understand how the
document-term matrix generation happens. We have created a sample document set 'documents'.
'''
documents = ['Hello, how are you!',
'Win money, win from home.',
'Call me now.',
'Hello, Call hello you tomorrow?']
# >>**Instructions:**
# Import the sklearn.feature_extraction.text.CountVectorizer method and create an instance of it called 'count_vector'.
# In[82]:
'''
Solution
'''
from sklearn.feature_extraction.text import CountVectorizer
count_vector = CountVectorizer()
# **Data preprocessing with CountVectorizer()**
#
# In Step 2.2, we implemented a version of the CountVectorizer() method from scratch that entailed cleaning our data first. This cleaning involved converting all of our data to lower case and removing all punctuation marks. CountVectorizer() has certain parameters which take care of these steps for us. They are:
#
# * `lowercase = True`
#
# The `lowercase` parameter has a default value of `True` which converts all of our text to its lower case form.
#
#
# * `token_pattern = (?u)\\b\\w\\w+\\b`
#
# The `token_pattern` parameter has a default regular expression value of `(?u)\\b\\w\\w+\\b` which ignores all punctuation marks and treats them as delimiters, while accepting alphanumeric strings of length greater than or equal to 2, as individual tokens or words.
#
#
# * `stop_words`
#
# The `stop_words` parameter, if set to `english` will remove all words from our document set that match a list of English stop words defined in scikit-learn. Considering the small size of our dataset and the fact that we are dealing with SMS messages and not larger text sources like e-mail, we will not use stop words, and we won't be setting this parameter value.
#
# You can take a look at all the parameter values of your `count_vector` object by simply printing out the object as follows:
# In[83]:
'''
Practice node:
Print the 'count_vector' object which is an instance of 'CountVectorizer()'
'''
# No need to revise this code
print(count_vector)
# >>**Instructions:**
# Fit your document dataset to the CountVectorizer object you have created using fit(), and get the list of words
# which have been categorized as features using the get_feature_names() method.
# In[84]:
'''
Solution:
'''
# No need to revise this code
count_vector.fit(documents)
count_vector.get_feature_names()
# The `get_feature_names()` method returns our feature names for this dataset, which is the set of words that make up our vocabulary for 'documents'.
# >>**Instructions:**
# Create a matrix with each row representing one of the 4 documents, and each column representing a word (feature name).
# Each value in the matrix will represent the frequency of the word in that column occurring in the particular document in that row.
# You can do this using the transform() method of CountVectorizer, passing in the document data set as the argument. The transform() method returns a matrix of NumPy integers, which you can convert to an array using
# toarray(). Call the array 'doc_array'.
#
# In[100]:
'''
Solution
'''
doc_array = count_vector.transform(documents)
doc_array = doc_array.toarray()
print(documents,'\n', doc_array)
# Now we have a clean representation of the documents in terms of the frequency distribution of the words in them. To make it easier to understand our next step is to convert this array into a dataframe and name the columns appropriately.
# >>**Instructions:**
# Convert the 'doc_array' we created into a dataframe, with the column names as the words (feature names). Call the dataframe 'frequency_matrix'.
#
# In[105]:
'''
Solution
'''
names = count_vector.get_feature_names()
frequency_matrix = pd.DataFrame(doc_array, columns=names)
frequency_matrix
# Congratulations! You have successfully implemented a Bag of Words problem for a document dataset that we created.
#
# One potential issue that can arise from using this method is that if our dataset of text is extremely large (say if we have a large collection of news articles or email data), there will be certain values that are more common than others simply due to the structure of the language itself. For example, words like 'is', 'the', 'an', pronouns, grammatical constructs, etc., could skew our matrix and affect our analyis.
#
# There are a couple of ways to mitigate this. One way is to use the `stop_words` parameter and set its value to `english`. This will automatically ignore all the words in our input text that are found in a built-in list of English stop words in scikit-learn.
#
# Another way of mitigating this is by using the [tfidf](http://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.TfidfVectorizer.html#sklearn.feature_extraction.text.TfidfVectorizer) method. This method is out of scope for the context of this lesson.
# ### Step 3.1: Training and testing sets ###
#
# Now that we understand how to use the Bag of Words approach, we can return to our original, larger UCI dataset and proceed with our analysis. Our first step is to split our dataset into a training set and a testing set so we can first train, and then test our model.
#
# >>**Instructions:**
# Split the dataset into a training and testing set using the train_test_split method in sklearn, and print out the number of rows we have in each of our training and testing data. Split the data
# using the following variables:
# * `X_train` is our training data for the 'sms_message' column.
# * `y_train` is our training data for the 'label' column
# * `X_test` is our testing data for the 'sms_message' column.
# * `y_test` is our testing data for the 'label' column.
#
# In[106]:
'''
Solution
NOTE: sklearn.cross_validation was used here previously but it has been
deprecated in favor of sklearn.model_selection, and will be removed in v0.20.
'''
# split into training and testing sets
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(df['sms_message'],
df['label'],
random_state=1)
print('Number of rows in the total set: {}'.format(df.shape[0]))
print('Number of rows in the training set: {}'.format(X_train.shape[0]))
print('Number of rows in the test set: {}'.format(X_test.shape[0]))
# ### Step 3.2: Applying Bag of Words processing to our dataset. ###
#
# Now that we have split the data, our next objective is to follow the steps from Step 2: Bag of words and convert our data into the desired matrix format. To do this we will be using CountVectorizer() as we did before. There are two steps to consider here:
#
# * First, we have to fit our training data (`X_train`) into `CountVectorizer()` and return the matrix.
# * Secondly, we have to transform our testing data (`X_test`) to return the matrix.
#
# Note that `X_train` is our training data for the 'sms_message' column in our dataset and we will be using this to train our model.
#
# `X_test` is our testing data for the 'sms_message' column and this is the data we will be using (after transformation to a matrix) to make predictions on. We will then compare those predictions with `y_test` in a later step.
#
# For now, we have provided the code that does the matrix transformations for you!
# In[ ]:
'''
[Practice Node]
The code for this segment is in 2 parts. First, we are learning a vocabulary dictionary for the training data
and then transforming the data into a document-term matrix; secondly, for the testing data we are only
transforming the data into a document-term matrix.
This is similar to the process we followed in Step 2.3.
We will provide the transformed data to students in the variables 'training_data' and 'testing_data'.
'''
# In[107]:
'''
Solution
'''
# Instantiate the CountVectorizer method
count_vector = CountVectorizer()
# Fit the training data and then return the matrix
training_data = count_vector.fit_transform(X_train)
# Transform testing data and return the matrix. Note we are not fitting the testing data into the CountVectorizer()
testing_data = count_vector.transform(X_test)
# ### Step 4.1: Bayes Theorem implementation from scratch ###
#
# Now that we have our dataset in the format that we need, we can move onto the next portion of our mission which is the algorithm we will use to make our predictions to classify a message as spam or not spam. Remember that at the start of the mission we briefly discussed the Bayes theorem but now we shall go into a little more detail. In layman's terms, the Bayes theorem calculates the probability of an event occurring, based on certain other probabilities that are related to the event in question. It is composed of a prior(the probabilities that we are aware of or that is given to us) and the posterior(the probabilities we are looking to compute using the priors).
#
# Let us implement the Bayes Theorem from scratch using a simple example. Let's say we are trying to find the odds of an individual having diabetes, given that he or she was tested for it and got a positive result.
# In the medical field, such probabilies play a very important role as it usually deals with life and death situations.
#
# We assume the following:
#
# `P(D)` is the probability of a person having Diabetes. It's value is `0.01` or in other words, 1% of the general population has diabetes(Disclaimer: these values are assumptions and are not reflective of any medical study).
#
# `P(Pos)` is the probability of getting a positive test result.
#
# `P(Neg)` is the probability of getting a negative test result.
#
# `P(Pos|D)` is the probability of getting a positive result on a test done for detecting diabetes, given that you have diabetes. This has a value `0.9`. In other words the test is correct 90% of the time. This is also called the Sensitivity or True Positive Rate.
#
# `P(Neg|~D)` is the probability of getting a negative result on a test done for detecting diabetes, given that you do not have diabetes. This also has a value of `0.9` and is therefore correct, 90% of the time. This is also called the Specificity or True Negative Rate.
#
# The Bayes formula is as follows:
#
# <img src="images/bayes_formula.png" height="242" width="242">
#
# * `P(A)` is the prior probability of A occurring independently. In our example this is `P(D)`. This value is given to us.
#
# * `P(B)` is the prior probability of B occurring independently. In our example this is `P(Pos)`.
#
# * `P(A|B)` is the posterior probability that A occurs given B. In our example this is `P(D|Pos)`. That is, **the probability of an individual having diabetes, given that, that individual got a positive test result. This is the value that we are looking to calculate.**
#
# * `P(B|A)` is the likelihood probability of B occurring, given A. In our example this is `P(Pos|D)`. This value is given to us.
# Putting our values into the formula for Bayes theorem we get:
#
# `P(D|Pos) = P(D) * P(Pos|D) / P(Pos)`
#
# The probability of getting a positive test result `P(Pos)` can be calculated using the Sensitivity and Specificity as follows:
#
# `P(Pos) = [P(D) * Sensitivity] + [P(~D) * (1-Specificity))]`
# In[ ]:
'''
Instructions:
Calculate probability of getting a positive test result, P(Pos)
'''
# In[108]:
'''
Solution (skeleton code will be provided)
'''
# P(D)
p_diabetes = 0.01
# P(~D)
p_no_diabetes = 0.99
# Sensitivity or P(Pos|D)
p_pos_diabetes = 0.9
# Specificity or P(Neg|~D)
p_neg_no_diabetes = 0.9
# P(Pos)
p_pos = (p_diabetes * p_pos_diabetes) + (p_no_diabetes * (1 - p_neg_no_diabetes))
print('The probability of getting a positive test result P(Pos) is: {}',format(p_pos))
# **Using all of this information we can calculate our posteriors as follows:**
#
# The probability of an individual having diabetes, given that, that individual got a positive test result:
#
# `P(D|Pos) = (P(D) * Sensitivity)) / P(Pos)`
#
# The probability of an individual not having diabetes, given that, that individual got a positive test result:
#
# `P(~D|Pos) = (P(~D) * (1-Specificity)) / P(Pos)`
#
# The sum of our posteriors will always equal `1`.
# In[ ]:
'''
Instructions:
Compute the probability of an individual having diabetes, given that, that individual got a positive test result.
In other words, compute P(D|Pos).
The formula is: P(D|Pos) = (P(D) * P(Pos|D) / P(Pos)
'''
# In[109]:
'''
Solution
'''
# P(D|Pos)
p_diabetes_pos = (p_diabetes * p_pos_diabetes) / p_pos
print('Probability of an individual having diabetes, given that that individual got a positive test result is:',format(p_diabetes_pos))
# In[ ]:
'''
Instructions:
Compute the probability of an individual not having diabetes, given that, that individual got a positive test result.
In other words, compute P(~D|Pos).
The formula is: P(~D|Pos) = P(~D) * P(Pos|~D) / P(Pos)
Note that P(Pos|~D) can be computed as 1 - P(Neg|~D).
Therefore:
P(Pos|~D) = p_pos_no_diabetes = 1 - 0.9 = 0.1
'''
# In[113]:
'''
Solution
'''
# P(Pos|~D)
p_pos_no_diabetes = 0.1
# P(~D|Pos)
p_no_diabetes_pos = (p_no_diabetes * p_pos_no_diabetes) / p_pos
print('Probability of an individual not having diabetes, given that that individual got a positive test result is:',p_no_diabetes_pos)
# Congratulations! You have implemented Bayes Theorem from scratch. Your analysis shows that even if you get a positive test result, there is only an 8.3% chance that you actually have diabetes and a 91.67% chance that you do not have diabetes. This is of course assuming that only 1% of the entire population has diabetes which is only an assumption.
# **What does the term 'Naive' in 'Naive Bayes' mean ?**
#
# The term 'Naive' in Naive Bayes comes from the fact that the algorithm considers the features that it is using to make the predictions to be independent of each other, which may not always be the case. So in our Diabetes example, we are considering only one feature, that is the test result. Say we added another feature, 'exercise'. Let's say this feature has a binary value of `0` and `1`, where the former signifies that the individual exercises less than or equal to 2 days a week and the latter signifies that the individual exercises greater than or equal to 3 days a week. If we had to use both of these features, namely the test result and the value of the 'exercise' feature, to compute our final probabilities, Bayes' theorem would fail. Naive Bayes' is an extension of Bayes' theorem that assumes that all the features are independent of each other.
# ### Step 4.2: Naive Bayes implementation from scratch ###
#
#
# Now that you have understood the ins and outs of Bayes Theorem, we will extend it to consider cases where we have more than feature.
#
# Let's say that we have two political parties' candidates, 'Jill Stein' of the Green Party and 'Gary Johnson' of the Libertarian Party and we have the probabilities of each of these candidates saying the words 'freedom', 'immigration' and 'environment' when they give a speech:
#
# * Probability that Jill Stein says 'freedom': 0.1 ---------> `P(F|J)`
# * Probability that Jill Stein says 'immigration': 0.1 -----> `P(I|J)`
# * Probability that Jill Stein says 'environment': 0.8 -----> `P(E|J)`
#
#
# * Probability that Gary Johnson says 'freedom': 0.7 -------> `P(F|G)`
# * Probability that Gary Johnson says 'immigration': 0.2 ---> `P(I|G)`
# * Probability that Gary Johnson says 'environment': 0.1 ---> `P(E|G)`
#
#
# And let us also assume that the probability of Jill Stein giving a speech, `P(J)` is `0.5` and the same for Gary Johnson, `P(G) = 0.5`.
#
#
# Given this, what if we had to find the probabilities of Jill Stein saying the words 'freedom' and 'immigration'? This is where the Naive Bayes' theorem comes into play as we are considering two features, 'freedom' and 'immigration'.
#
# Now we are at a place where we can define the formula for the Naive Bayes' theorem:
#
# <img src="images/naivebayes.png" height="342" width="342">
#
# Here, `y` is the class variable (in our case the name of the candidate) and `x1` through `xn` are the feature vectors (in our case the individual words). The theorem makes the assumption that each of the feature vectors or words (`xi`) are independent of each other.
# To break this down, we have to compute the following posterior probabilities:
#
# * `P(J|F,I)`: Given the words freedom and immigration were said, what's the probability that it was said by Jill?
#
# Using the formula and our knowledge of Bayes' theorem, we can compute this as follows: `P(J|F,I)` = `(P(J) * P(F|J) * P(I|J)) / P(F,I)`. Here `P(F,I)` is the probability of the words 'freedom' and 'immigration' being said in a speech.
#
#
# * `P(G|F,I)`: Probability that words Freedom and Immigration are said by Gary Johnson `
#
# Using the formula, we can compute this as follows: `P(G|F,I)` = `(P(G) * P(F|G) * P(I|G)) / P(F,I)`
# In[ ]:
'''
Instructions: Compute the probability of the words 'freedom' and 'immigration' being said in a speech, or
P(F,I).
The first step is multiplying the probabilities of Jill Stein giving a speech with her individual
probabilities of saying the words 'freedom' and 'immigration'. Store this in a variable called p_j_text.
The second step is multiplying the probabilities of Gary Johnson giving a speech with his individual
probabilities of saying the words 'freedom' and 'immigration'. Store this in a variable called p_g_text.
The third step is to add both of these probabilities and you will get P(F,I).
'''
# In[143]:
'''
Solution: Step 1
'''
# P(J)
p_j = 0.5
# P(F|J)
p_f_j = 0.1
# P(I|J)
p_i_j = 0.1
p_j_text = p_f_j * p_i_j * p_j
print(p_j_text)
# In[144]:
'''
Solution: Step 2
'''
# P(G)
p_g = 0.5
# P(F|G)
p_f_g = 0.7
# P(I|G)
p_i_g = 0.2
p_g_text = p_f_g * p_i_g * p_g
print(p_g_text)
# In[145]:
'''
Solution: Step 3: Compute P(F,I) and store in p_f_i
'''
p_f_i = p_j_text + p_g_text
print(f'Probability of words freedom and immigration being said are: {p_f_i:.2%}')
# Now we can compute the probability of `P(J|F,I)`, that is the probability of Jill Stein saying the words Freedom and Immigration and `P(G|F,I)`, that is the probability of Gary Johnson saying the words Freedom and Immigration.
# In[146]:
'''
Instructions:
Compute P(J|F,I) using the formula P(J|F,I) = (P(J) * P(F|J) * P(I|J)) / P(F,I) and store it in a variable p_j_fi
'''
# In[147]:
'''
Solution
'''
p_j_fi = (p_j * p_f_j * p_i_j) / p_f_i
print(f'The probability of Jill Stein saying the words Freedom and Immigration: {p_j_fi:.2%}')
# In[148]:
'''
Instructions:
Compute P(G|F,I) using the formula P(G|F,I) = (P(G) * P(F|G) * P(I|G)) / P(F,I) and store it in a variable p_g_fi
'''
# In[149]:
'''
Solution
'''
p_g_fi = (p_g * p_f_g * p_i_g) / p_f_i
print(f'The probability of Gary Johnson saying the words Freedom and Immigration: {p_g_fi:.2%}')
# And as we can see, just like in the Bayes' theorem case, the sum of our posteriors is equal to 1. Congratulations! You have implemented the Naive Bayes' theorem from scratch. Our analysis shows that there is only a 6.6% chance that Jill Stein of the Green Party uses the words 'freedom' and 'immigration' in her speech as compared the the 93.3% chance for Gary Johnson of the Libertarian party.
# Another more generic example of Naive Bayes' in action is as when we search for the term 'Sacramento Kings' in a search engine. In order for us to get the results pertaining to the Scramento Kings NBA basketball team, the search engine needs to be able to associate the two words together and not treat them individually, in which case we would get results of images tagged with 'Sacramento' like pictures of city landscapes and images of 'Kings' which could be pictures of crowns or kings from history when what we are looking to get are images of the basketball team. This is a classic case of the search engine treating the words as independent entities and hence being 'naive' in its approach.
#
#
# Applying this to our problem of classifying messages as spam, the Naive Bayes algorithm *looks at each word individually and not as associated entities* with any kind of link between them. In the case of spam detectors, this usually works as there are certain red flag words which can almost guarantee its classification as spam, for example emails with words like 'viagra' are usually classified as spam.
# ### Step 5: Naive Bayes implementation using scikit-learn ###
#
# Thankfully, sklearn has several Naive Bayes implementations that we can use and so we do not have to do the math from scratch. We will be using sklearn's `sklearn.naive_bayes` method to make predictions on our dataset.
#
# Specifically, we will be using the multinomial Naive Bayes implementation. This particular classifier is suitable for classification with discrete features (such as in our case, word counts for text classification). It takes in integer word counts as its input. On the other hand Gaussian Naive Bayes is better suited for continuous data as it assumes that the input data has a Gaussian(normal) distribution.
# In[ ]:
'''
Instructions:
We have loaded the training data into the variable 'training_data' and the testing data into the
variable 'testing_data'.
Import the MultinomialNB classifier and fit the training data into the classifier using fit(). Name your classifier
'naive_bayes'. You will be training the classifier using 'training_data' and y_train' from our split earlier.
'''
# In[156]:
'''
Solution
'''
from sklearn.naive_bayes import MultinomialNB
naive_bayes = MultinomialNB()
naive_bayes.fit(training_data, y_train)
# In[ ]:
'''
Instructions:
Now that our algorithm has been trained using the training data set we can now make some predictions on the test data
stored in 'testing_data' using predict(). Save your predictions into the 'predictions' variable.
'''
# In[159]:
'''
Solution
'''
predictions = naive_bayes.predict(testing_data)
print(predictions)
# Now that predictions have been made on our test set, we need to check the accuracy of our predictions.
# ### Step 6: Evaluating our model ###
#
# Now that we have made predictions on our test set, our next goal is to evaluate how well our model is doing. There are various mechanisms for doing so, but first let's do quick recap of them.
#
# **Accuracy** measures how often the classifier makes the correct prediction. Its the ratio of the number of correct predictions to the total number of predictions (the number of test data points).
#
# **Precision** tells us what proportion of messages we classified as spam, actually were spam.
# It is a ratio of true positives(words classified as spam, and which are actually spam) to all positives(all words classified as spam, irrespective of whether that was the correct classification), in other words it is the ratio of
#
# `[True Positives/(True Positives + False Positives)]`
#
# **Recall(sensitivity)** tells us what proportion of messages that actually were spam were classified by us as spam.
# It is a ratio of true positives(words classified as spam, and which are actually spam) to all the words that were actually spam, in other words it is the ratio of
#
# `[True Positives/(True Positives + False Negatives)]`
#
# For classification problems that are skewed in their classification distributions like in our case, for example if we had a 100 text messages and only 2 were spam and the rest 98 weren't, accuracy by itself is not a very good metric. We could classify 90 messages as not spam (including the 2 that were spam but we classify them as not spam, hence they would be false negatives) and 10 as spam (all 10 false positives) and still get a reasonably good accuracy score. For such cases, precision and recall come in very handy. These two metrics can be combined to get the F1 score, which is the weighted average of the precision and recall scores. This score can range from 0 to 1, with 1 being the best possible F1 score.
# We will be using all 4 metrics to make sure our model does well. For all 4 metrics whose values can range from 0 to 1, having a score as close to 1 as possible is a good indicator of how well our model is doing.
# In[ ]:
'''
Instructions:
Compute the accuracy, precision, recall and F1 scores of your model using your test data 'y_test' and the predictions
you made earlier stored in the 'predictions' variable.
'''
# In[161]:
'''
Solution
'''
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
print('Accuracy score: ', format(accuracy_score(y_test, predictions)))
print('Precision score: ', format(precision_score(y_test, predictions)))
print('Recall score: ', format(recall_score(y_test, predictions)))
print('F1 score: ', format(f1_score(y_test, predictions)))
# ### Step 7: Conclusion ###
#
# One of the major advantages that Naive Bayes has over other classification algorithms is its ability to handle an extremely large number of features. In our case, each word is treated as a feature and there are thousands of different words. Also, it performs well even with the presence of irrelevant features and is relatively unaffected by them. The other major advantage it has is its relative simplicity. Naive Bayes' works well right out of the box and tuning it's parameters is rarely ever necessary, except usually in cases where the distribution of the data is known.
# It rarely ever overfits the data. Another important advantage is that its model training and prediction times are very fast for the amount of data it can handle. All in all, Naive Bayes' really is a gem of an algorithm!
#
# Congratulations! You have successfully designed a model that can efficiently predict if an SMS message is spam or not!
#
# Thank you for learning with us!

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,29 @@
import pandas as pd
# TODO: Set weight1, weight2, and bias
weight1 = 0.0
weight2 = 0.0
bias = 0.0
# DON'T CHANGE ANYTHING BELOW
# Inputs and outputs
test_inputs = [(0, 0), (0, 1), (1, 0), (1, 1)]
correct_outputs = [False, False, False, True]
outputs = []
# Generate and check output
for test_input, correct_output in zip(test_inputs, correct_outputs):
linear_combination = weight1 * test_input[0] + weight2 * test_input[1] + bias
output = int(linear_combination >= 0)
is_correct_string = 'Yes' if output == correct_output else 'No'
outputs.append([test_input[0], test_input[1], linear_combination, output, is_correct_string])
# Print output
num_wrong = len([output[4] for output in outputs if output[4] == 'No'])
output_frame = pd.DataFrame(outputs, columns=['Input 1', ' Input 2', ' Linear Combination', ' Activation Output', ' Is Correct'])
if not num_wrong:
print('Nice! You got it all correct.\n')
else:
print('You got {} wrong. Keep trying!\n'.format(num_wrong))
print(output_frame.to_string(index=False))

View File

@@ -0,0 +1,29 @@
import pandas as pd
# TODO: Set weight1, weight2, and bias
weight1 = 1.0
weight2 = 1.0
bias = -1.25
# DON'T CHANGE ANYTHING BELOW
# Inputs and outputs
test_inputs = [(0, 0), (0, 1), (1, 0), (1, 1)]
correct_outputs = [False, False, False, True]
outputs = []
# Generate and check output
for test_input, correct_output in zip(test_inputs, correct_outputs):
linear_combination = weight1 * test_input[0] + weight2 * test_input[1] + bias
output = int(linear_combination >= 0)
is_correct_string = 'Yes' if output == correct_output else 'No'
outputs.append([test_input[0], test_input[1], linear_combination, output, is_correct_string])
# Print output
num_wrong = len([output[4] for output in outputs if output[4] == 'No'])
output_frame = pd.DataFrame(outputs, columns=['Input 1', ' Input 2', ' Linear Combination', ' Activation Output', ' Is Correct'])
if not num_wrong:
print('Nice! You got it all correct.\n')
else:
print('You got {} wrong. Keep trying!\n'.format(num_wrong))
print(output_frame.to_string(index=False))

View File

@@ -0,0 +1,29 @@
import pandas as pd
# TODO: Set weight1, weight2, and bias
weight1 = 0.0
weight2 = -1.0
bias = 0.8
# DON'T CHANGE ANYTHING BELOW
# Inputs and outputs
test_inputs = [(0, 0), (0, 1), (1, 0), (1, 1)]
correct_outputs = [True, False, True, False]
outputs = []
# Generate and check output
for test_input, correct_output in zip(test_inputs, correct_outputs):
linear_combination = weight1 * test_input[0] + weight2 * test_input[1] + bias
output = int(linear_combination >= 0)
is_correct_string = 'Yes' if output == correct_output else 'No'
outputs.append([test_input[0], test_input[1], linear_combination, output, is_correct_string])
# Print output
num_wrong = len([output[4] for output in outputs if output[4] == 'No'])
output_frame = pd.DataFrame(outputs, columns=['Input 1', ' Input 2', ' Linear Combination', ' Activation Output', ' Is Correct'])
if not num_wrong:
print('Nice! You got it all correct.\n')
else:
print('You got {} wrong. Keep trying!\n'.format(num_wrong))
print(output_frame.to_string(index=False))

View File

@@ -0,0 +1,100 @@
0.78051,-0.063669,1
0.28774,0.29139,1
0.40714,0.17878,1
0.2923,0.4217,1
0.50922,0.35256,1
0.27785,0.10802,1
0.27527,0.33223,1
0.43999,0.31245,1
0.33557,0.42984,1
0.23448,0.24986,1
0.0084492,0.13658,1
0.12419,0.33595,1
0.25644,0.42624,1
0.4591,0.40426,1
0.44547,0.45117,1
0.42218,0.20118,1
0.49563,0.21445,1
0.30848,0.24306,1
0.39707,0.44438,1
0.32945,0.39217,1
0.40739,0.40271,1
0.3106,0.50702,1
0.49638,0.45384,1
0.10073,0.32053,1
0.69907,0.37307,1
0.29767,0.69648,1
0.15099,0.57341,1
0.16427,0.27759,1
0.33259,0.055964,1
0.53741,0.28637,1
0.19503,0.36879,1
0.40278,0.035148,1
0.21296,0.55169,1
0.48447,0.56991,1
0.25476,0.34596,1
0.21726,0.28641,1
0.67078,0.46538,1
0.3815,0.4622,1
0.53838,0.32774,1
0.4849,0.26071,1
0.37095,0.38809,1
0.54527,0.63911,1
0.32149,0.12007,1
0.42216,0.61666,1
0.10194,0.060408,1
0.15254,0.2168,1
0.45558,0.43769,1
0.28488,0.52142,1
0.27633,0.21264,1
0.39748,0.31902,1
0.5533,1,0
0.44274,0.59205,0
0.85176,0.6612,0
0.60436,0.86605,0
0.68243,0.48301,0
1,0.76815,0
0.72989,0.8107,0
0.67377,0.77975,0
0.78761,0.58177,0
0.71442,0.7668,0
0.49379,0.54226,0
0.78974,0.74233,0
0.67905,0.60921,0
0.6642,0.72519,0
0.79396,0.56789,0
0.70758,0.76022,0
0.59421,0.61857,0
0.49364,0.56224,0
0.77707,0.35025,0
0.79785,0.76921,0
0.70876,0.96764,0
0.69176,0.60865,0
0.66408,0.92075,0
0.65973,0.66666,0
0.64574,0.56845,0
0.89639,0.7085,0
0.85476,0.63167,0
0.62091,0.80424,0
0.79057,0.56108,0
0.58935,0.71582,0
0.56846,0.7406,0
0.65912,0.71548,0
0.70938,0.74041,0
0.59154,0.62927,0
0.45829,0.4641,0
0.79982,0.74847,0
0.60974,0.54757,0
0.68127,0.86985,0
0.76694,0.64736,0
0.69048,0.83058,0
0.68122,0.96541,0
0.73229,0.64245,0
0.76145,0.60138,0
0.58985,0.86955,0
0.73145,0.74516,0
0.77029,0.7014,0
0.73156,0.71782,0
0.44556,0.57991,0
0.85275,0.85987,0
0.51912,0.62359,0
1 0.78051 -0.063669 1
2 0.28774 0.29139 1
3 0.40714 0.17878 1
4 0.2923 0.4217 1
5 0.50922 0.35256 1
6 0.27785 0.10802 1
7 0.27527 0.33223 1
8 0.43999 0.31245 1
9 0.33557 0.42984 1
10 0.23448 0.24986 1
11 0.0084492 0.13658 1
12 0.12419 0.33595 1
13 0.25644 0.42624 1
14 0.4591 0.40426 1
15 0.44547 0.45117 1
16 0.42218 0.20118 1
17 0.49563 0.21445 1
18 0.30848 0.24306 1
19 0.39707 0.44438 1
20 0.32945 0.39217 1
21 0.40739 0.40271 1
22 0.3106 0.50702 1
23 0.49638 0.45384 1
24 0.10073 0.32053 1
25 0.69907 0.37307 1
26 0.29767 0.69648 1
27 0.15099 0.57341 1
28 0.16427 0.27759 1
29 0.33259 0.055964 1
30 0.53741 0.28637 1
31 0.19503 0.36879 1
32 0.40278 0.035148 1
33 0.21296 0.55169 1
34 0.48447 0.56991 1
35 0.25476 0.34596 1
36 0.21726 0.28641 1
37 0.67078 0.46538 1
38 0.3815 0.4622 1
39 0.53838 0.32774 1
40 0.4849 0.26071 1
41 0.37095 0.38809 1
42 0.54527 0.63911 1
43 0.32149 0.12007 1
44 0.42216 0.61666 1
45 0.10194 0.060408 1
46 0.15254 0.2168 1
47 0.45558 0.43769 1
48 0.28488 0.52142 1
49 0.27633 0.21264 1
50 0.39748 0.31902 1
51 0.5533 1 0
52 0.44274 0.59205 0
53 0.85176 0.6612 0
54 0.60436 0.86605 0
55 0.68243 0.48301 0
56 1 0.76815 0
57 0.72989 0.8107 0
58 0.67377 0.77975 0
59 0.78761 0.58177 0
60 0.71442 0.7668 0
61 0.49379 0.54226 0
62 0.78974 0.74233 0
63 0.67905 0.60921 0
64 0.6642 0.72519 0
65 0.79396 0.56789 0
66 0.70758 0.76022 0
67 0.59421 0.61857 0
68 0.49364 0.56224 0
69 0.77707 0.35025 0
70 0.79785 0.76921 0
71 0.70876 0.96764 0
72 0.69176 0.60865 0
73 0.66408 0.92075 0
74 0.65973 0.66666 0
75 0.64574 0.56845 0
76 0.89639 0.7085 0
77 0.85476 0.63167 0
78 0.62091 0.80424 0
79 0.79057 0.56108 0
80 0.58935 0.71582 0
81 0.56846 0.7406 0
82 0.65912 0.71548 0
83 0.70938 0.74041 0
84 0.59154 0.62927 0
85 0.45829 0.4641 0
86 0.79982 0.74847 0
87 0.60974 0.54757 0
88 0.68127 0.86985 0
89 0.76694 0.64736 0
90 0.69048 0.83058 0
91 0.68122 0.96541 0
92 0.73229 0.64245 0
93 0.76145 0.60138 0
94 0.58985 0.86955 0
95 0.73145 0.74516 0
96 0.77029 0.7014 0
97 0.73156 0.71782 0
98 0.44556 0.57991 0
99 0.85275 0.85987 0
100 0.51912 0.62359 0

View File

@@ -0,0 +1,66 @@
import numpy as np
# Setting the random seed, feel free to change it and see different solutions.
np.random.seed(42)
import pandas as pd
def stepFunction(t):
if t >= 0:
return 1
return 0
def prediction(X, W, b):
return stepFunction((np.matmul(X, W) + b)[0])
# TODO: Fill in the code below to implement the perceptron trick.
# The function should receive as inputs the data X, the labels y,
# the weights W (as an array), and the bias b,
# update the weights and bias W, b, according to the perceptron algorithm,
# and return W and b.
def perceptronStep(X, y, W, b, learn_rate=0.01):
for i in range(len(X)):
y_hat = prediction(X[i], W, b)
if y[i] - y_hat == 1:
W[0] += X[i][0] * learn_rate
W[1] += X[i][1] * learn_rate
b += learn_rate
elif y[i] - y_hat == -1:
W[0] -= X[i][0] * learn_rate
W[1] -= X[i][1] * learn_rate
b -= learn_rate
return W, b
# This function runs the perceptron algorithm repeatedly on the dataset,
# and returns a few of the boundary lines obtained in the iterations,
# for plotting purposes.
# Feel free to play with the learning rate and the num_epochs,
# and see your results plotted below.
def trainPerceptronAlgorithm(X, y, learn_rate=0.01, num_epochs=25):
x_min, x_max = min(X.T[0]), max(X.T[0])
y_min, y_max = min(X.T[1]), max(X.T[1])
W = np.array(np.random.rand(2, 1))
b = np.random.rand(1)[0] + x_max
# These are the solution lines that get plotted below.
boundary_lines = []
for i in range(num_epochs):
# In each epoch, we apply the perceptron step.
W, b = perceptronStep(X, y, W, b, learn_rate)
boundary_lines.append((-W[0] / W[1], -b / W[1]))
return boundary_lines
df = pd.read_csv('data.csv', header=None)
X = df.iloc[:, :-1]
X = np.array(X)
y = df.iloc[:, 1]
y = np.array(y)
print(trainPerceptronAlgorithm(X, y))
# print(X)

View File

@@ -0,0 +1,96 @@
0.24539,0.81725,0
0.21774,0.76462,0
0.20161,0.69737,0
0.20161,0.58041,0
0.2477,0.49561,0
0.32834,0.44883,0
0.39516,0.48099,0
0.39286,0.57164,0
0.33525,0.62135,0
0.33986,0.71199,0
0.34447,0.81433,0
0.28226,0.82602,0
0.26613,0.75,0
0.26613,0.63596,0
0.32604,0.54825,0
0.28917,0.65643,0
0.80069,0.71491,0
0.80069,0.64181,0
0.80069,0.50146,0
0.79839,0.36988,0
0.73157,0.25,0
0.63249,0.18275,0
0.60023,0.27047,0
0.66014,0.34649,0
0.70161,0.42251,0
0.70853,0.53947,0
0.71544,0.63304,0
0.74309,0.72076,0
0.75,0.63596,0
0.75,0.46345,0
0.72235,0.35526,0
0.66935,0.28509,0
0.20622,0.94298,1
0.26613,0.8962,1
0.38134,0.8962,1
0.42051,0.94591,1
0.49885,0.86404,1
0.31452,0.93421,1
0.53111,0.72076,1
0.45276,0.74415,1
0.53571,0.6038,1
0.60484,0.71491,1
0.60945,0.58333,1
0.51267,0.47807,1
0.50806,0.59211,1
0.46198,0.30556,1
0.5288,0.41082,1
0.38594,0.35819,1
0.31682,0.31433,1
0.29608,0.20906,1
0.36982,0.27632,1
0.42972,0.18275,1
0.51498,0.10965,1
0.53111,0.20906,1
0.59793,0.095029,1
0.73848,0.086257,1
0.83065,0.18275,1
0.8629,0.10965,1
0.88364,0.27924,1
0.93433,0.30848,1
0.93433,0.19444,1
0.92512,0.43421,1
0.87903,0.43421,1
0.87903,0.58626,1
0.9182,0.71491,1
0.85138,0.8348,1
0.85599,0.94006,1
0.70853,0.94298,1
0.70853,0.87281,1
0.59793,0.93129,1
0.61175,0.83187,1
0.78226,0.82895,1
0.78917,0.8962,1
0.90668,0.89912,1
0.14862,0.92251,1
0.15092,0.85819,1
0.097926,0.85819,1
0.079493,0.91374,1
0.079493,0.77632,1
0.10945,0.79678,1
0.12327,0.67982,1
0.077189,0.6886,1
0.081797,0.58626,1
0.14862,0.58041,1
0.14862,0.5307,1
0.14171,0.41959,1
0.08871,0.49269,1
0.095622,0.36696,1
0.24539,0.3962,1
0.1947,0.29678,1
0.16935,0.22368,1
0.15553,0.13596,1
0.23848,0.12427,1
0.33065,0.12427,1
0.095622,0.2617,1
0.091014,0.20322,1
1 0.24539 0.81725 0
2 0.21774 0.76462 0
3 0.20161 0.69737 0
4 0.20161 0.58041 0
5 0.2477 0.49561 0
6 0.32834 0.44883 0
7 0.39516 0.48099 0
8 0.39286 0.57164 0
9 0.33525 0.62135 0
10 0.33986 0.71199 0
11 0.34447 0.81433 0
12 0.28226 0.82602 0
13 0.26613 0.75 0
14 0.26613 0.63596 0
15 0.32604 0.54825 0
16 0.28917 0.65643 0
17 0.80069 0.71491 0
18 0.80069 0.64181 0
19 0.80069 0.50146 0
20 0.79839 0.36988 0
21 0.73157 0.25 0
22 0.63249 0.18275 0
23 0.60023 0.27047 0
24 0.66014 0.34649 0
25 0.70161 0.42251 0
26 0.70853 0.53947 0
27 0.71544 0.63304 0
28 0.74309 0.72076 0
29 0.75 0.63596 0
30 0.75 0.46345 0
31 0.72235 0.35526 0
32 0.66935 0.28509 0
33 0.20622 0.94298 1
34 0.26613 0.8962 1
35 0.38134 0.8962 1
36 0.42051 0.94591 1
37 0.49885 0.86404 1
38 0.31452 0.93421 1
39 0.53111 0.72076 1
40 0.45276 0.74415 1
41 0.53571 0.6038 1
42 0.60484 0.71491 1
43 0.60945 0.58333 1
44 0.51267 0.47807 1
45 0.50806 0.59211 1
46 0.46198 0.30556 1
47 0.5288 0.41082 1
48 0.38594 0.35819 1
49 0.31682 0.31433 1
50 0.29608 0.20906 1
51 0.36982 0.27632 1
52 0.42972 0.18275 1
53 0.51498 0.10965 1
54 0.53111 0.20906 1
55 0.59793 0.095029 1
56 0.73848 0.086257 1
57 0.83065 0.18275 1
58 0.8629 0.10965 1
59 0.88364 0.27924 1
60 0.93433 0.30848 1
61 0.93433 0.19444 1
62 0.92512 0.43421 1
63 0.87903 0.43421 1
64 0.87903 0.58626 1
65 0.9182 0.71491 1
66 0.85138 0.8348 1
67 0.85599 0.94006 1
68 0.70853 0.94298 1
69 0.70853 0.87281 1
70 0.59793 0.93129 1
71 0.61175 0.83187 1
72 0.78226 0.82895 1
73 0.78917 0.8962 1
74 0.90668 0.89912 1
75 0.14862 0.92251 1
76 0.15092 0.85819 1
77 0.097926 0.85819 1
78 0.079493 0.91374 1
79 0.079493 0.77632 1
80 0.10945 0.79678 1
81 0.12327 0.67982 1
82 0.077189 0.6886 1
83 0.081797 0.58626 1
84 0.14862 0.58041 1
85 0.14862 0.5307 1
86 0.14171 0.41959 1
87 0.08871 0.49269 1
88 0.095622 0.36696 1
89 0.24539 0.3962 1
90 0.1947 0.29678 1
91 0.16935 0.22368 1
92 0.15553 0.13596 1
93 0.23848 0.12427 1
94 0.33065 0.12427 1
95 0.095622 0.2617 1
96 0.091014 0.20322 1

View File

@@ -0,0 +1,30 @@
# Import statements
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# Read the data.
data = np.asarray(pd.read_csv('data.csv', header=None))
# Assign the features to the variable X, and the labels to the variable y.
X = data[:, 0:2]
y = data[:, 2]
# TODO: Create the model and assign it to the variable model.
# Find the right parameters for this model to achieve 100% accuracy
# on the dataset.
model = SVC(kernel='rbf', gamma=27)
# TODO: Fit the model.
model.fit(X, y)
# TODO: Make predictions. Store them in the variable y_pred.
y_pred = model.predict(X)
# TODO: Calculate the accuracy and assign it to the variable acc.
acc = accuracy_score(y, y_pred)