ds: r7 octave

This commit is contained in:
2025-12-25 14:29:00 +03:00
parent 01eccd02ea
commit 1fe3bbbe2e
17 changed files with 2151 additions and 1 deletions

View File

@ -24,7 +24,7 @@ summary(lmmod)
# mr = sd / sqrt(n): вариация
# t = (xm1 - xm2) / sqrt(mr1 ^ 2 + mr2 ^ 2)
# f = (n1 + n2) - 2: степень свободы
# p: по таблице: Отличаются с вероятностью ошибки ...
# p = 0,03 probability of obtaining test results at least as extreme as the result actually observed
ggplot(df, aes(x = education, y = log_income)) +
geom_point() +

48
ds/25-1/r/7.R Normal file
View File

@ -0,0 +1,48 @@
setwd('/home/sek1ro/git/public/lab/ds/25-1/r')
survey <- read.csv('survey.csv')
head(survey)
survey$price20 <- ifelse(survey$Price == 20, 1, 0)
survey$price30 <- ifelse(survey$Price == 30, 1, 0)
head(survey)
survey$one <- 1
#https://stats.stackexchange.com/questions/48178/how-to-interpret-the-intercept-term-in-a-glm
model <- glm(
MYDEPV ~ Income + Age + price20 + price30,
binomial(link = "logit"),
survey
)
summary(model)
quantile(residuals(model))
#https://library.virginia.edu/data/articles/understanding-deviance-residuals
#Residuals are the differences between what we observe and what our model predicts.
#Residuals greater than the absolute value of 3 are in the tails of a standard normal distribution and usually indicate strain in the model.
beta_income <- coef(model)["Income"]
pct_income <- (exp(beta_income) - 1) * 100
pct_income
beta_price30 <- coef(model)["price30"]
pct_price30 <- (exp(beta_price30 * 20) - 1) * 100
pct_price30
survey$odds_ratio <- exp(predict(model))
survey$prediction <- survey$odds_ratio / (1 + survey$odds_ratio)
head(survey)
sum(survey$MYDEPV)
sum(survey$prediction)
new_person <- data.frame(
Income = 58,
Age = 25,
price20 = 1,
price30 = 0
)
prob <- predict(model, new_person, type="response")
prob

BIN
ds/25-1/r/7/ex2.pdf Normal file

Binary file not shown.

View File

@ -0,0 +1,30 @@
function [J, grad] = costFunction(theta, X, y)
%COSTFUNCTION Compute cost and gradient for logistic regression
% J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the
% parameter for logistic regression and the gradient of the cost
% w.r.t. to the parameters.
% Initialize some useful values
m = length(y); % number of training examples
% You need to return the following variables correctly
J = 0;
grad = zeros(size(theta));
% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta.
% You should set J to the cost.
% Compute the partial derivatives and set grad to the partial
% derivatives of the cost w.r.t. each parameter in theta
%
% Note: grad should have the same dimensions as theta
%
h = sigmoid(X * theta);
base = - y .* log(h) - (1 - y) .* log(1 - h);
J = 1 / m * sum(base, 1);
grad = 1 / m * (X' * (h - y));
% =============================================================
end

View File

@ -0,0 +1,27 @@
function [J, grad] = costFunctionReg(theta, X, y, lambda)
%COSTFUNCTIONREG Compute cost and gradient for logistic regression with regularization
% J = COSTFUNCTIONREG(theta, X, y, lambda) computes the cost of using
% theta as the parameter for regularized logistic regression and the
% gradient of the cost w.r.t. to the parameters.
% Initialize some useful values
m = length(y); % number of training examples
% You need to return the following variables correctly
J = 0;
grad = zeros(size(theta));
% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta.
% You should set J to the cost.
% Compute the partial derivatives and set grad to the partial
% derivatives of the cost w.r.t. each parameter in theta
[J_, grad_] = costFunction(theta, X, y);
J = J_ + lambda / (2 * m) * (theta' * theta);
grad = grad_ + theta * lambda / m;
grad(1) = grad_(1);
% =============================================================
end

View File

@ -0,0 +1,135 @@
%% Machine Learning Online Class - Exercise 2: Logistic Regression
%
% Instructions
% ------------
%
% This file contains code that helps you get started on the logistic
% regression exercise. You will need to complete the following functions
% in this exericse:
%
% sigmoid.m
% costFunction.m
% predict.m
% costFunctionReg.m
%
% For this exercise, you will not need to change any code in this file,
% or any other files other than those mentioned above.
%
%% Initialization
clear ; close all; clc
%% Load Data
% The first two columns contains the exam scores and the third column
% contains the label.
data = load('ex2data1.txt');
X = data(:, [1, 2]); y = data(:, 3);
%% ==================== Part 1: Plotting ====================
% We start the exercise by first plotting the data to understand the
% the problem we are working with.
fprintf(['Plotting data with + indicating (y = 1) examples and o ' ...
'indicating (y = 0) examples.\n']);
plotData(X, y);
% Put some labels
hold on;
% Labels and Legend
xlabel('Exam 1 score')
ylabel('Exam 2 score')
% Specified in plot order
legend('Admitted', 'Not admitted')
hold off;
fprintf('\nProgram paused. Press enter to continue.\n');
pause;
%% ============ Part 2: Compute Cost and Gradient ============
% In this part of the exercise, you will implement the cost and gradient
% for logistic regression. You neeed to complete the code in
% costFunction.m
% Setup the data matrix appropriately, and add ones for the intercept term
[m, n] = size(X);
% Add intercept term to x and X_test
X = [ones(m, 1) X];
% Initialize fitting parameters
initial_theta = zeros(n + 1, 1);
% Compute and display initial cost and gradient
[cost, grad] = costFunction(initial_theta, X, y);
fprintf('Cost at initial theta (zeros): %f\n', cost);
fprintf('Gradient at initial theta (zeros): \n');
fprintf(' %f \n', grad);
fprintf('\nProgram paused. Press enter to continue.\n');
pause;
%% ============= Part 3: Optimizing using fminunc =============
% In this exercise, you will use a built-in function (fminunc) to find the
% optimal parameters theta.
% Set options for fminunc
options = optimset('GradObj', 'on', 'MaxIter', 400);
% Run fminunc to obtain the optimal theta
% This function will return theta and the cost
[theta, cost] = ...
fminunc(@(t)(costFunction(t, X, y)), initial_theta, options);
% Print theta to screen
fprintf('Cost at theta found by fminunc: %f\n', cost);
fprintf('theta: \n');
fprintf(' %f \n', theta);
% Plot Boundary
plotDecisionBoundary(theta, X, y);
% Put some labels
hold on;
% Labels and Legend
xlabel('Exam 1 score')
ylabel('Exam 2 score')
% Specified in plot order
legend('Admitted', 'Not admitted')
hold off;
fprintf('\nProgram paused. Press enter to continue.\n');
pause;
%% ============== Part 4: Predict and Accuracies ==============
% After learning the parameters, you'll like to use it to predict the outcomes
% on unseen data. In this part, you will use the logistic regression model
% to predict the probability that a student with score 20 on exam 1 and
% score 80 on exam 2 will be admitted.
%
% Furthermore, you will compute the training and test set accuracies of
% our model.
%
% Your task is to complete the code in predict.m
% Predict probability for a student with score 45 on exam 1
% and score 85 on exam 2
prob = sigmoid([1 45 85] * theta);
fprintf(['For a student with scores 45 and 85, we predict an admission ' ...
'probability of %f\n\n'], prob);
% Compute accuracy on our training set
p = predict(theta, X);
fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
fprintf('\nProgram paused. Press enter to continue.\n');
pause;

View File

@ -0,0 +1,124 @@
%% Machine Learning Online Class - Exercise 2: Logistic Regression
%
% Instructions
% ------------
%
% This file contains code that helps you get started on the second part
% of the exercise which covers regularization with logistic regression.
%
% You will need to complete the following functions in this exericse:
%
% sigmoid.m
% costFunction.m
% predict.m
% costFunctionReg.m
%
% For this exercise, you will not need to change any code in this file,
% or any other files other than those mentioned above.
%
%% Initialization
clear ; close all; clc
%% Load Data
% The first two columns contains the exam scores and the third column
% contains the label.
data = load('ex2data2.txt');
X = data(:, [1, 2]); y = data(:, 3);
plotData(X, y);
% Put some labels
hold on;
% Labels and Legend
xlabel('Microchip Test 1')
ylabel('Microchip Test 2')
% Specified in plot order
legend('y = 1', 'y = 0')
hold off;
%% =========== Part 1: Regularized Logistic Regression ============
% In this part, you are given a dataset with data points that are not
% linearly separable. However, you would still like to use logistic
% regression to classify the data points.
%
% To do so, you introduce more features to use -- in particular, you add
% polynomial features to our data matrix (similar to polynomial
% regression).
%
% Add Polynomial Features
% Note that mapFeature also adds a column of ones for us, so the intercept
% term is handled
X = mapFeature(X(:,1), X(:,2));
% Initialize fitting parameters
initial_theta = zeros(size(X, 2), 1);
% Set regularization parameter lambda to 1
lambda = 1;
% Compute and display initial cost and gradient for regularized logistic
% regression
[cost, grad] = costFunctionReg(initial_theta, X, y, lambda);
fprintf('Cost at initial theta (zeros): %f\n', cost);
fprintf('\nProgram paused. Press enter to continue.\n');
pause;
%% ============= Part 2: Regularization and Accuracies =============
% Optional Exercise:
% In this part, you will get to try different values of lambda and
% see how regularization affects the decision coundart
%
% Try the following values of lambda (0, 1, 10, 100).
%
% How does the decision boundary change when you vary lambda? How does
% the training set accuracy vary?
%
% Initialize fitting parameters
initial_theta = zeros(size(X, 2), 1);
% Set regularization parameter lambda to 1 (you should vary this)
lambda = 1;
% Set Options
options = optimset('GradObj', 'on', 'MaxIter', 400);
[theta, J, exit_flag] = ...
fminunc(@(t)(costFunction(t, X, y)), initial_theta, options);
plotDecisionBoundary(theta, X, y);
p = predict(theta, X);
fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
% Optimize=
[theta, J, exit_flag] = ...
fminunc(@(t)(costFunctionReg(t, X, y, lambda)), initial_theta, options);
% Plot Boundary
plotDecisionBoundary(theta, X, y);
hold on;
title(sprintf('lambda = %g', lambda))
% Labels and Legend
xlabel('Microchip Test 1')
ylabel('Microchip Test 2')
legend('y = 1', 'y = 0', 'Decision boundary')
hold off;
% Compute accuracy on our training set
p = predict(theta, X);
fprintf('Train Accuracy Reg: %f\n', mean(double(p == y)) * 100);

View File

@ -0,0 +1,100 @@
34.62365962451697,78.0246928153624,0
30.28671076822607,43.89499752400101,0
35.84740876993872,72.90219802708364,0
60.18259938620976,86.30855209546826,1
79.0327360507101,75.3443764369103,1
45.08327747668339,56.3163717815305,0
61.10666453684766,96.51142588489624,1
75.02474556738889,46.55401354116538,1
76.09878670226257,87.42056971926803,1
84.43281996120035,43.53339331072109,1
95.86155507093572,38.22527805795094,0
75.01365838958247,30.60326323428011,0
82.30705337399482,76.48196330235604,1
69.36458875970939,97.71869196188608,1
39.53833914367223,76.03681085115882,0
53.9710521485623,89.20735013750205,1
69.07014406283025,52.74046973016765,1
67.94685547711617,46.67857410673128,0
70.66150955499435,92.92713789364831,1
76.97878372747498,47.57596364975532,1
67.37202754570876,42.83843832029179,0
89.67677575072079,65.79936592745237,1
50.534788289883,48.85581152764205,0
34.21206097786789,44.20952859866288,0
77.9240914545704,68.9723599933059,1
62.27101367004632,69.95445795447587,1
80.1901807509566,44.82162893218353,1
93.114388797442,38.80067033713209,0
61.83020602312595,50.25610789244621,0
38.78580379679423,64.99568095539578,0
61.379289447425,72.80788731317097,1
85.40451939411645,57.05198397627122,1
52.10797973193984,63.12762376881715,0
52.04540476831827,69.43286012045222,1
40.23689373545111,71.16774802184875,0
54.63510555424817,52.21388588061123,0
33.91550010906887,98.86943574220611,0
64.17698887494485,80.90806058670817,1
74.78925295941542,41.57341522824434,0
34.1836400264419,75.2377203360134,0
83.90239366249155,56.30804621605327,1
51.54772026906181,46.85629026349976,0
94.44336776917852,65.56892160559052,1
82.36875375713919,40.61825515970618,0
51.04775177128865,45.82270145776001,0
62.22267576120188,52.06099194836679,0
77.19303492601364,70.45820000180959,1
97.77159928000232,86.7278223300282,1
62.07306379667647,96.76882412413983,1
91.56497449807442,88.69629254546599,1
79.94481794066932,74.16311935043758,1
99.2725269292572,60.99903099844988,1
90.54671411399852,43.39060180650027,1
34.52451385320009,60.39634245837173,0
50.2864961189907,49.80453881323059,0
49.58667721632031,59.80895099453265,0
97.64563396007767,68.86157272420604,1
32.57720016809309,95.59854761387875,0
74.24869136721598,69.82457122657193,1
71.79646205863379,78.45356224515052,1
75.3956114656803,85.75993667331619,1
35.28611281526193,47.02051394723416,0
56.25381749711624,39.26147251058019,0
30.05882244669796,49.59297386723685,0
44.66826172480893,66.45008614558913,0
66.56089447242954,41.09209807936973,0
40.45755098375164,97.53518548909936,1
49.07256321908844,51.88321182073966,0
80.27957401466998,92.11606081344084,1
66.74671856944039,60.99139402740988,1
32.72283304060323,43.30717306430063,0
64.0393204150601,78.03168802018232,1
72.34649422579923,96.22759296761404,1
60.45788573918959,73.09499809758037,1
58.84095621726802,75.85844831279042,1
99.82785779692128,72.36925193383885,1
47.26426910848174,88.47586499559782,1
50.45815980285988,75.80985952982456,1
60.45555629271532,42.50840943572217,0
82.22666157785568,42.71987853716458,0
88.9138964166533,69.80378889835472,1
94.83450672430196,45.69430680250754,1
67.31925746917527,66.58935317747915,1
57.23870631569862,59.51428198012956,1
80.36675600171273,90.96014789746954,1
68.46852178591112,85.59430710452014,1
42.0754545384731,78.84478600148043,0
75.47770200533905,90.42453899753964,1
78.63542434898018,96.64742716885644,1
52.34800398794107,60.76950525602592,0
94.09433112516793,77.15910509073893,1
90.44855097096364,87.50879176484702,1
55.48216114069585,35.57070347228866,0
74.49269241843041,84.84513684930135,1
89.84580670720979,45.35828361091658,1
83.48916274498238,48.38028579728175,1
42.2617008099817,87.10385094025457,1
99.31500880510394,68.77540947206617,1
55.34001756003703,64.9319380069486,1
74.77589300092767,89.52981289513276,1

View File

@ -0,0 +1,118 @@
0.051267,0.69956,1
-0.092742,0.68494,1
-0.21371,0.69225,1
-0.375,0.50219,1
-0.51325,0.46564,1
-0.52477,0.2098,1
-0.39804,0.034357,1
-0.30588,-0.19225,1
0.016705,-0.40424,1
0.13191,-0.51389,1
0.38537,-0.56506,1
0.52938,-0.5212,1
0.63882,-0.24342,1
0.73675,-0.18494,1
0.54666,0.48757,1
0.322,0.5826,1
0.16647,0.53874,1
-0.046659,0.81652,1
-0.17339,0.69956,1
-0.47869,0.63377,1
-0.60541,0.59722,1
-0.62846,0.33406,1
-0.59389,0.005117,1
-0.42108,-0.27266,1
-0.11578,-0.39693,1
0.20104,-0.60161,1
0.46601,-0.53582,1
0.67339,-0.53582,1
-0.13882,0.54605,1
-0.29435,0.77997,1
-0.26555,0.96272,1
-0.16187,0.8019,1
-0.17339,0.64839,1
-0.28283,0.47295,1
-0.36348,0.31213,1
-0.30012,0.027047,1
-0.23675,-0.21418,1
-0.06394,-0.18494,1
0.062788,-0.16301,1
0.22984,-0.41155,1
0.2932,-0.2288,1
0.48329,-0.18494,1
0.64459,-0.14108,1
0.46025,0.012427,1
0.6273,0.15863,1
0.57546,0.26827,1
0.72523,0.44371,1
0.22408,0.52412,1
0.44297,0.67032,1
0.322,0.69225,1
0.13767,0.57529,1
-0.0063364,0.39985,1
-0.092742,0.55336,1
-0.20795,0.35599,1
-0.20795,0.17325,1
-0.43836,0.21711,1
-0.21947,-0.016813,1
-0.13882,-0.27266,1
0.18376,0.93348,0
0.22408,0.77997,0
0.29896,0.61915,0
0.50634,0.75804,0
0.61578,0.7288,0
0.60426,0.59722,0
0.76555,0.50219,0
0.92684,0.3633,0
0.82316,0.27558,0
0.96141,0.085526,0
0.93836,0.012427,0
0.86348,-0.082602,0
0.89804,-0.20687,0
0.85196,-0.36769,0
0.82892,-0.5212,0
0.79435,-0.55775,0
0.59274,-0.7405,0
0.51786,-0.5943,0
0.46601,-0.41886,0
0.35081,-0.57968,0
0.28744,-0.76974,0
0.085829,-0.75512,0
0.14919,-0.57968,0
-0.13306,-0.4481,0
-0.40956,-0.41155,0
-0.39228,-0.25804,0
-0.74366,-0.25804,0
-0.69758,0.041667,0
-0.75518,0.2902,0
-0.69758,0.68494,0
-0.4038,0.70687,0
-0.38076,0.91886,0
-0.50749,0.90424,0
-0.54781,0.70687,0
0.10311,0.77997,0
0.057028,0.91886,0
-0.10426,0.99196,0
-0.081221,1.1089,0
0.28744,1.087,0
0.39689,0.82383,0
0.63882,0.88962,0
0.82316,0.66301,0
0.67339,0.64108,0
1.0709,0.10015,0
-0.046659,-0.57968,0
-0.23675,-0.63816,0
-0.15035,-0.36769,0
-0.49021,-0.3019,0
-0.46717,-0.13377,0
-0.28859,-0.060673,0
-0.61118,-0.067982,0
-0.66302,-0.21418,0
-0.59965,-0.41886,0
-0.72638,-0.082602,0
-0.83007,0.31213,0
-0.72062,0.53874,0
-0.59389,0.49488,0
-0.48445,0.99927,0
-0.0063364,0.99927,0
0.63265,-0.030612,0

View File

@ -0,0 +1,21 @@
function out = mapFeature(X1, X2)
% MAPFEATURE Feature mapping function to polynomial features
%
% MAPFEATURE(X1, X2) maps the two input features
% to quadratic features used in the regularization exercise.
%
% Returns a new feature array with more features, comprising of
% X1, X2, X1.^2, X2.^2, X1*X2, X1*X2.^2, etc..
%
% Inputs X1, X2 must be the same size
%
degree = 6;
out = ones(size(X1(:,1)));
for i = 1:degree
for j = 0:i
out(:, end+1) = (X1.^(i-j)).*(X2.^j);
end
end
end

View File

@ -0,0 +1,27 @@
function plotData(X, y)
%PLOTDATA Plots the data points X and y into a new figure
% PLOTDATA(x,y) plots the data points with + for the positive examples
% and o for the negative examples. X is assumed to be a Mx2 matrix.
% Create New Figure
figure; hold on;
% ====================== YOUR CODE HERE ======================
% Instructions: Plot the positive and negative examples on a
% 2D plot, using the option 'k+' for the positive
% examples and 'ko' for the negative examples.
%
pos = find(y == 1);
neg = find(y == 0);
plot(X(pos, 1), X(pos, 2), 'k+', 'LineWidth', 2, 'MarkerSize', 7);
plot(X(neg, 1), X(neg, 2), 'ko', 'MarkerFaceColor', 'y', 'MarkerSize', 7);
% =========================================================================
hold off;
end

View File

@ -0,0 +1,48 @@
function plotDecisionBoundary(theta, X, y)
%PLOTDECISIONBOUNDARY Plots the data points X and y into a new figure with
%the decision boundary defined by theta
% PLOTDECISIONBOUNDARY(theta, X,y) plots the data points with + for the
% positive examples and o for the negative examples. X is assumed to be
% a either
% 1) Mx3 matrix, where the first column is an all-ones column for the
% intercept.
% 2) MxN, N>3 matrix, where the first column is all-ones
% Plot Data
plotData(X(:,2:3), y);
hold on
if size(X, 2) <= 3
% Only need 2 points to define a line, so choose two endpoints
plot_x = [min(X(:,2))-2, max(X(:,2))+2];
% Calculate the decision boundary line
plot_y = (-1./theta(3)).*(theta(2).*plot_x + theta(1));
% Plot, and adjust axes for better viewing
plot(plot_x, plot_y)
% Legend, specific for the exercise
legend('Admitted', 'Not admitted', 'Decision Boundary')
axis([30, 100, 30, 100])
else
% Here is the grid range
u = linspace(-1, 1.5, 50);
v = linspace(-1, 1.5, 50);
z = zeros(length(u), length(v));
% Evaluate z = theta*x over the grid
for i = 1:length(u)
for j = 1:length(v)
z(i,j) = mapFeature(u(i), v(j))*theta;
end
end
z = z'; % important to transpose z before calling contour
% Plot z = 0
% Notice you need to specify the range [0, 0]
contour(u, v, z, [0, 0], 'LineWidth', 2)
end
hold off
end

View File

@ -0,0 +1,23 @@
function p = predict(theta, X)
%PREDICT Predict whether the label is 0 or 1 using learned logistic
%regression parameters theta
% p = PREDICT(theta, X) computes the predictions for X using a
% threshold at 0.5 (i.e., if sigmoid(theta'*x) >= 0.5, predict 1)
m = size(X, 1); % Number of training examples
% You need to return the following variables correctly
p = zeros(m, 1);
% ====================== YOUR CODE HERE ======================
% Instructions: Complete the following code to make predictions using
% your learned logistic regression parameters.
% You should set p to a vector of 0's and 1's
%
p = sigmoid(X * theta) >= 0.5;
% =========================================================================
end

View File

@ -0,0 +1,16 @@
function g = sigmoid(z)
%SIGMOID Compute sigmoid functoon
% J = SIGMOID(z) computes the sigmoid of z.
% You need to return the following variables correctly
g = zeros(size(z));
% ====================== YOUR CODE HERE ======================
% Instructions: Compute the sigmoid of each value of z (z can be a matrix,
% vector or scalar).
g = 1 ./ (1 + exp(-z));
% =============================================================
end

View File

@ -0,0 +1,333 @@
function submit(partId)
%SUBMIT Submit your code and output to the ml-class servers
% SUBMIT() will connect to the ml-class server and submit your solution
fprintf('==\n== [ml-class] Submitting Solutions | Programming Exercise %s\n==\n', ...
homework_id());
if ~exist('partId', 'var') || isempty(partId)
partId = promptPart();
end
% Check valid partId
partNames = validParts();
if ~isValidPartId(partId)
fprintf('!! Invalid homework part selected.\n');
fprintf('!! Expected an integer from 1 to %d.\n', numel(partNames) + 1);
fprintf('!! Submission Cancelled\n');
return
end
[login password] = loginPrompt();
if isempty(login)
fprintf('!! Submission Cancelled\n');
return
end
fprintf('\n== Connecting to ml-class ... ');
if exist('OCTAVE_VERSION')
fflush(stdout);
end
% Setup submit list
if partId == numel(partNames) + 1
submitParts = 1:numel(partNames);
else
submitParts = [partId];
end
for s = 1:numel(submitParts)
% Submit this part
partId = submitParts(s);
% Get Challenge
[login, ch, signature] = getChallenge(login);
if isempty(login) || isempty(ch) || isempty(signature)
% Some error occured, error string in first return element.
fprintf('\n!! Error: %s\n\n', login);
return
end
% Attempt Submission with Challenge
ch_resp = challengeResponse(login, password, ch);
[result, str] = submitSolution(login, ch_resp, partId, output(partId), ...
source(partId), signature);
fprintf('\n== [ml-class] Submitted Homework %s - Part %d - %s\n', ...
homework_id(), partId, partNames{partId});
fprintf('== %s\n', strtrim(str));
if exist('OCTAVE_VERSION')
fflush(stdout);
end
end
end
% ================== CONFIGURABLES FOR EACH HOMEWORK ==================
function id = homework_id()
id = '2';
end
function [partNames] = validParts()
partNames = { 'Sigmoid Function ', ...
'Logistic Regression Cost', ...
'Logistic Regression Gradient', ...
'Predict', ...
'Regularized Logistic Regression Cost' ...
'Regularized Logistic Regression Gradient' ...
};
end
function srcs = sources()
% Separated by part
srcs = { { 'sigmoid.m' }, ...
{ 'costFunction.m' }, ...
{ 'costFunction.m' }, ...
{ 'predict.m' }, ...
{ 'costFunctionReg.m' }, ...
{ 'costFunctionReg.m' } };
end
function out = output(partId)
% Random Test Cases
X = [ones(20,1) (exp(1) * sin(1:1:20))' (exp(0.5) * cos(1:1:20))'];
y = sin(X(:,1) + X(:,2)) > 0;
if partId == 1
out = sprintf('%0.5f ', sigmoid(X));
elseif partId == 2
out = sprintf('%0.5f ', costFunction([0.25 0.5 -0.5]', X, y));
elseif partId == 3
[cost, grad] = costFunction([0.25 0.5 -0.5]', X, y);
out = sprintf('%0.5f ', grad);
elseif partId == 4
out = sprintf('%0.5f ', predict([0.25 0.5 -0.5]', X));
elseif partId == 5
out = sprintf('%0.5f ', costFunctionReg([0.25 0.5 -0.5]', X, y, 0.1));
elseif partId == 6
[cost, grad] = costFunctionReg([0.25 0.5 -0.5]', X, y, 0.1);
out = sprintf('%0.5f ', grad);
end
end
function url = challenge_url()
url = 'http://www.ml-class.org/course/homework/challenge';
end
function url = submit_url()
url = 'http://www.ml-class.org/course/homework/submit';
end
% ========================= CHALLENGE HELPERS =========================
function src = source(partId)
src = '';
src_files = sources();
if partId <= numel(src_files)
flist = src_files{partId};
for i = 1:numel(flist)
fid = fopen(flist{i});
while ~feof(fid)
line = fgets(fid);
src = [src line];
end
fclose(fid);
src = [src '||||||||'];
end
end
end
function ret = isValidPartId(partId)
partNames = validParts();
ret = (~isempty(partId)) && (partId >= 1) && (partId <= numel(partNames) + 1);
end
function partId = promptPart()
fprintf('== Select which part(s) to submit:\n', ...
homework_id());
partNames = validParts();
srcFiles = sources();
for i = 1:numel(partNames)
fprintf('== %d) %s [', i, partNames{i});
fprintf(' %s ', srcFiles{i}{:});
fprintf(']\n');
end
fprintf('== %d) All of the above \n==\nEnter your choice [1-%d]: ', ...
numel(partNames) + 1, numel(partNames) + 1);
selPart = input('', 's');
partId = str2num(selPart);
if ~isValidPartId(partId)
partId = -1;
end
end
function [email,ch,signature] = getChallenge(email)
str = urlread(challenge_url(), 'post', {'email_address', email});
str = strtrim(str);
[email, str] = strtok (str, '|');
[ch, str] = strtok (str, '|');
[signature, str] = strtok (str, '|');
end
function [result, str] = submitSolution(email, ch_resp, part, output, ...
source, signature)
params = {'homework', homework_id(), ...
'part', num2str(part), ...
'email', email, ...
'output', output, ...
'source', source, ...
'challenge_response', ch_resp, ...
'signature', signature};
str = urlread(submit_url(), 'post', params);
% Parse str to read for success / failure
result = 0;
end
% =========================== LOGIN HELPERS ===========================
function [login password] = loginPrompt()
% Prompt for password
[login password] = basicPrompt();
if isempty(login) || isempty(password)
login = []; password = [];
end
end
function [login password] = basicPrompt()
login = input('Login (Email address): ', 's');
password = input('Password: ', 's');
end
function [str] = challengeResponse(email, passwd, challenge)
salt = ')~/|]QMB3[!W`?OVt7qC"@+}';
str = sha1([challenge sha1([salt email passwd])]);
sel = randperm(numel(str));
sel = sort(sel(1:16));
str = str(sel);
end
% =============================== SHA-1 ================================
function hash = sha1(str)
% Initialize variables
h0 = uint32(1732584193);
h1 = uint32(4023233417);
h2 = uint32(2562383102);
h3 = uint32(271733878);
h4 = uint32(3285377520);
% Convert to word array
strlen = numel(str);
% Break string into chars and append the bit 1 to the message
mC = [double(str) 128];
mC = [mC zeros(1, 4-mod(numel(mC), 4), 'uint8')];
numB = strlen * 8;
if exist('idivide')
numC = idivide(uint32(numB + 65), 512, 'ceil');
else
numC = ceil(double(numB + 65)/512);
end
numW = numC * 16;
mW = zeros(numW, 1, 'uint32');
idx = 1;
for i = 1:4:strlen + 1
mW(idx) = bitor(bitor(bitor( ...
bitshift(uint32(mC(i)), 24), ...
bitshift(uint32(mC(i+1)), 16)), ...
bitshift(uint32(mC(i+2)), 8)), ...
uint32(mC(i+3)));
idx = idx + 1;
end
% Append length of message
mW(numW - 1) = uint32(bitshift(uint64(numB), -32));
mW(numW) = uint32(bitshift(bitshift(uint64(numB), 32), -32));
% Process the message in successive 512-bit chs
for cId = 1 : double(numC)
cSt = (cId - 1) * 16 + 1;
cEnd = cId * 16;
ch = mW(cSt : cEnd);
% Extend the sixteen 32-bit words into eighty 32-bit words
for j = 17 : 80
ch(j) = ch(j - 3);
ch(j) = bitxor(ch(j), ch(j - 8));
ch(j) = bitxor(ch(j), ch(j - 14));
ch(j) = bitxor(ch(j), ch(j - 16));
ch(j) = bitrotate(ch(j), 1);
end
% Initialize hash value for this ch
a = h0;
b = h1;
c = h2;
d = h3;
e = h4;
% Main loop
for i = 1 : 80
if(i >= 1 && i <= 20)
f = bitor(bitand(b, c), bitand(bitcmp(b), d));
k = uint32(1518500249);
elseif(i >= 21 && i <= 40)
f = bitxor(bitxor(b, c), d);
k = uint32(1859775393);
elseif(i >= 41 && i <= 60)
f = bitor(bitor(bitand(b, c), bitand(b, d)), bitand(c, d));
k = uint32(2400959708);
elseif(i >= 61 && i <= 80)
f = bitxor(bitxor(b, c), d);
k = uint32(3395469782);
end
t = bitrotate(a, 5);
t = bitadd(t, f);
t = bitadd(t, e);
t = bitadd(t, k);
t = bitadd(t, ch(i));
e = d;
d = c;
c = bitrotate(b, 30);
b = a;
a = t;
end
h0 = bitadd(h0, a);
h1 = bitadd(h1, b);
h2 = bitadd(h2, c);
h3 = bitadd(h3, d);
h4 = bitadd(h4, e);
end
hash = reshape(dec2hex(double([h0 h1 h2 h3 h4]), 8)', [1 40]);
hash = lower(hash);
end
function ret = bitadd(iA, iB)
ret = double(iA) + double(iB);
ret = bitset(ret, 33, 0);
ret = uint32(ret);
end
function ret = bitrotate(iA, places)
t = bitshift(iA, places - 32);
ret = bitshift(iA, places);
ret = bitor(ret, t);
end

View File

@ -0,0 +1,349 @@
function submitWeb(partId)
%SUBMITWEB Generates a base64 encoded string for web-based submissions
% SUBMITWEB() will generate a base64 encoded string so that you can submit your
% solutions via a web form
fprintf('==\n== [ml-class] Submitting Solutions | Programming Exercise %s\n==\n', ...
homework_id());
if ~exist('partId', 'var') || isempty(partId)
partId = promptPart();
end
% Check valid partId
partNames = validParts();
if ~isValidPartId(partId)
fprintf('!! Invalid homework part selected.\n');
fprintf('!! Expected an integer from 1 to %d.\n', numel(partNames));
fprintf('!! Submission Cancelled\n');
return
end
[login] = loginPrompt();
if isempty(login)
fprintf('!! Submission Cancelled\n');
return
end
[result] = submitSolution(login, partId, output(partId), ...
source(partId));
result = base64encode(result);
fprintf('\nSave as submission file [submit_ex%s_part%d.txt]: ', ...
homework_id(), partId);
saveAsFile = input('', 's');
if (isempty(saveAsFile))
saveAsFile = sprintf('submit_ex%s_part%d.txt', homework_id(), partId);
end
fid = fopen(saveAsFile, 'w');
if (fid)
fwrite(fid, result);
fclose(fid);
fprintf('\nSaved your solutions to %s.\n\n', saveAsFile);
fprintf(['You can now submit your solutions through the web \n' ...
'form in the programming exercises. Select the corresponding \n' ...
'programming exercise to access the form.\n']);
else
fprintf('Unable to save to %s\n\n', saveAsFile);
fprintf(['You can create a submission file by saving the \n' ...
'following text in a file: (press enter to continue)\n\n']);
pause;
fprintf(result);
end
end
% ================== CONFIGURABLES FOR EACH HOMEWORK ==================
function id = homework_id()
id = '2';
end
function [partNames] = validParts()
partNames = { 'Sigmoid Function ', ...
'Logistic Regression Cost', ...
'Logistic Regression Gradient', ...
'Predict', ...
'Regularized Logistic Regression Cost' ...
'Regularized Logistic Regression Gradient' ...
};
end
function srcs = sources()
% Separated by part
srcs = { { 'sigmoid.m' }, ...
{ 'costFunction.m' }, ...
{ 'costFunction.m' }, ...
{ 'predict.m' }, ...
{ 'costFunctionReg.m' }, ...
{ 'costFunctionReg.m' } };
end
function out = output(partId)
% Random Test Cases
X = [ones(20,1) (exp(1) * sin(1:1:20))' (exp(0.5) * cos(1:1:20))'];
y = sin(X(:,1) + X(:,2)) > 0;
if partId == 1
out = sprintf('%0.5f ', sigmoid(X));
elseif partId == 2
out = sprintf('%0.5f ', costFunction([0.25 0.5 -0.5]', X, y));
elseif partId == 3
[cost, grad] = costFunction([0.25 0.5 -0.5]', X, y);
out = sprintf('%0.5f ', grad);
elseif partId == 4
out = sprintf('%0.5f ', predict([0.25 0.5 -0.5]', X));
elseif partId == 5
out = sprintf('%0.5f ', costFunctionReg([0.25 0.5 -0.5]', X, y, 0.1));
elseif partId == 6
[cost, grad] = costFunctionReg([0.25 0.5 -0.5]', X, y, 0.1);
out = sprintf('%0.5f ', grad);
end
end
% ========================= SUBMIT HELPERS =========================
function src = source(partId)
src = '';
src_files = sources();
if partId <= numel(src_files)
flist = src_files{partId};
for i = 1:numel(flist)
fid = fopen(flist{i});
while ~feof(fid)
line = fgets(fid);
src = [src line];
end
fclose(fid);
src = [src '||||||||'];
end
end
end
function ret = isValidPartId(partId)
partNames = validParts();
ret = (~isempty(partId)) && (partId >= 1) && (partId <= numel(partNames));
end
function partId = promptPart()
fprintf('== Select which part(s) to submit:\n', ...
homework_id());
partNames = validParts();
srcFiles = sources();
for i = 1:numel(partNames)
fprintf('== %d) %s [', i, partNames{i});
fprintf(' %s ', srcFiles{i}{:});
fprintf(']\n');
end
fprintf('\nEnter your choice [1-%d]: ', ...
numel(partNames));
selPart = input('', 's');
partId = str2num(selPart);
if ~isValidPartId(partId)
partId = -1;
end
end
function [result, str] = submitSolution(email, part, output, source)
result = ['a:5:{' ...
p_s('homework') p_s64(homework_id()) ...
p_s('part') p_s64(part) ...
p_s('email') p_s64(email) ...
p_s('output') p_s64(output) ...
p_s('source') p_s64(source) ...
'}'];
end
function s = p_s(str)
s = ['s:' num2str(numel(str)) ':"' str '";'];
end
function s = p_s64(str)
str = base64encode(str, '');
s = ['s:' num2str(numel(str)) ':"' str '";'];
end
% =========================== LOGIN HELPERS ===========================
function [login] = loginPrompt()
% Prompt for password
[login] = basicPrompt();
end
function [login] = basicPrompt()
login = input('Login (Email address): ', 's');
end
% =========================== Base64 Encoder ============================
% Thanks to Peter John Acklam
%
function y = base64encode(x, eol)
%BASE64ENCODE Perform base64 encoding on a string.
%
% BASE64ENCODE(STR, EOL) encode the given string STR. EOL is the line ending
% sequence to use; it is optional and defaults to '\n' (ASCII decimal 10).
% The returned encoded string is broken into lines of no more than 76
% characters each, and each line will end with EOL unless it is empty. Let
% EOL be empty if you do not want the encoded string broken into lines.
%
% STR and EOL don't have to be strings (i.e., char arrays). The only
% requirement is that they are vectors containing values in the range 0-255.
%
% This function may be used to encode strings into the Base64 encoding
% specified in RFC 2045 - MIME (Multipurpose Internet Mail Extensions). The
% Base64 encoding is designed to represent arbitrary sequences of octets in a
% form that need not be humanly readable. A 65-character subset
% ([A-Za-z0-9+/=]) of US-ASCII is used, enabling 6 bits to be represented per
% printable character.
%
% Examples
% --------
%
% If you want to encode a large file, you should encode it in chunks that are
% a multiple of 57 bytes. This ensures that the base64 lines line up and
% that you do not end up with padding in the middle. 57 bytes of data fills
% one complete base64 line (76 == 57*4/3):
%
% If ifid and ofid are two file identifiers opened for reading and writing,
% respectively, then you can base64 encode the data with
%
% while ~feof(ifid)
% fwrite(ofid, base64encode(fread(ifid, 60*57)));
% end
%
% or, if you have enough memory,
%
% fwrite(ofid, base64encode(fread(ifid)));
%
% See also BASE64DECODE.
% Author: Peter John Acklam
% Time-stamp: 2004-02-03 21:36:56 +0100
% E-mail: pjacklam@online.no
% URL: http://home.online.no/~pjacklam
if isnumeric(x)
x = num2str(x);
end
% make sure we have the EOL value
if nargin < 2
eol = sprintf('\n');
else
if sum(size(eol) > 1) > 1
error('EOL must be a vector.');
end
if any(eol(:) > 255)
error('EOL can not contain values larger than 255.');
end
end
if sum(size(x) > 1) > 1
error('STR must be a vector.');
end
x = uint8(x);
eol = uint8(eol);
ndbytes = length(x); % number of decoded bytes
nchunks = ceil(ndbytes / 3); % number of chunks/groups
nebytes = 4 * nchunks; % number of encoded bytes
% add padding if necessary, to make the length of x a multiple of 3
if rem(ndbytes, 3)
x(end+1 : 3*nchunks) = 0;
end
x = reshape(x, [3, nchunks]); % reshape the data
y = repmat(uint8(0), 4, nchunks); % for the encoded data
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Split up every 3 bytes into 4 pieces
%
% aaaaaabb bbbbcccc ccdddddd
%
% to form
%
% 00aaaaaa 00bbbbbb 00cccccc 00dddddd
%
y(1,:) = bitshift(x(1,:), -2); % 6 highest bits of x(1,:)
y(2,:) = bitshift(bitand(x(1,:), 3), 4); % 2 lowest bits of x(1,:)
y(2,:) = bitor(y(2,:), bitshift(x(2,:), -4)); % 4 highest bits of x(2,:)
y(3,:) = bitshift(bitand(x(2,:), 15), 2); % 4 lowest bits of x(2,:)
y(3,:) = bitor(y(3,:), bitshift(x(3,:), -6)); % 2 highest bits of x(3,:)
y(4,:) = bitand(x(3,:), 63); % 6 lowest bits of x(3,:)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Now perform the following mapping
%
% 0 - 25 -> A-Z
% 26 - 51 -> a-z
% 52 - 61 -> 0-9
% 62 -> +
% 63 -> /
%
% We could use a mapping vector like
%
% ['A':'Z', 'a':'z', '0':'9', '+/']
%
% but that would require an index vector of class double.
%
z = repmat(uint8(0), size(y));
i = y <= 25; z(i) = 'A' + double(y(i));
i = 26 <= y & y <= 51; z(i) = 'a' - 26 + double(y(i));
i = 52 <= y & y <= 61; z(i) = '0' - 52 + double(y(i));
i = y == 62; z(i) = '+';
i = y == 63; z(i) = '/';
y = z;
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Add padding if necessary.
%
npbytes = 3 * nchunks - ndbytes; % number of padding bytes
if npbytes
y(end-npbytes+1 : end) = '='; % '=' is used for padding
end
if isempty(eol)
% reshape to a row vector
y = reshape(y, [1, nebytes]);
else
nlines = ceil(nebytes / 76); % number of lines
neolbytes = length(eol); % number of bytes in eol string
% pad data so it becomes a multiple of 76 elements
y = [y(:) ; zeros(76 * nlines - numel(y), 1)];
y(nebytes + 1 : 76 * nlines) = 0;
y = reshape(y, 76, nlines);
% insert eol strings
eol = eol(:);
y(end + 1 : end + neolbytes, :) = eol(:, ones(1, nlines));
% remove padding, but keep the last eol string
m = nebytes + neolbytes * (nlines - 1);
n = (76+neolbytes)*nlines - neolbytes;
y(m+1 : n) = '';
% extract and reshape to row vector
y = reshape(y, 1, m+neolbytes);
end
% output is a character array
y = char(y);
end

751
ds/25-1/r/survey.csv Normal file
View File

@ -0,0 +1,751 @@
MYDEPV,Price,Income,Age
1,10,33,37
0,20,21,55
1,30,59,55
1,20,76,44
0,30,24,37
0,20,22,32
1,10,28,32
1,10,49,38
0,30,76,43
1,20,59,55
0,30,45,32
0,30,21,46
0,30,49,44
0,10,23,30
1,10,55,55
0,20,29,32
1,10,49,44
0,20,45,32
0,20,24,37
0,10,30,32
0,10,24,55
1,10,59,55
0,30,31,32
0,20,33,32
0,30,22,32
0,30,29,32
0,10,30,32
0,20,28,32
1,30,59,55
0,30,56,43
1,30,77,43
1,20,97,18
0,20,23,32
0,30,25,37
0,30,23,32
0,30,88,43
0,30,49,44
1,30,76,44
1,20,67,25
1,10,55,55
0,20,26,37
1,20,49,44
1,20,68,25
0,30,45,32
1,20,68,43
0,20,32,35
1,30,22,55
1,30,55,55
1,20,66,43
0,20,29,32
1,10,49,44
1,10,28,32
1,10,23,37
0,20,45,32
0,30,22,37
1,10,66,25
0,20,30,32
0,20,43,27
0,20,34,55
0,30,32,32
1,10,67,25
0,20,25,27
1,20,49,38
0,30,33,55
0,20,30,32
1,10,34,37
0,30,33,32
0,10,32,27
0,20,30,32
1,20,66,25
0,30,29,32
1,10,25,37
1,20,55,55
0,30,22,32
1,10,28,38
0,20,22,44
0,30,28,32
0,10,45,32
1,20,65,22
1,10,78,21
1,30,66,25
1,20,99,25
0,10,21,44
0,20,23,37
0,30,22,37
1,30,88,43
0,30,28,32
1,30,49,55
1,10,55,55
0,20,29,32
0,30,87,43
1,30,66,25
1,20,77,22
1,10,26,37
0,30,45,32
0,20,43,22
1,30,64,33
0,20,45,32
0,10,30,32
0,30,56,43
0,20,30,32
0,30,30,32
1,10,78,25
1,20,77,43
1,20,49,38
0,30,32,35
0,10,29,32
1,20,89,22
0,30,30,32
1,30,55,55
0,20,22,32
0,20,32,32
0,30,30,32
0,30,49,44
1,10,77,43
1,20,59,55
0,20,30,32
0,30,22,27
1,20,68,25
1,10,59,55
1,30,17,23
0,20,22,32
1,10,44,43
1,20,76,21
0,20,29,32
1,10,59,55
0,20,29,32
0,30,23,30
1,20,49,44
0,20,33,32
0,20,23,32
1,10,64,33
1,10,49,44
1,30,57,25
1,10,28,32
0,10,22,32
0,30,22,44
0,20,33,23
0,30,46,43
0,30,22,32
1,20,59,55
0,10,22,32
1,20,59,55
1,10,33,24
0,10,55,44
0,30,49,38
1,30,77,25
0,20,22,37
1,30,55,55
1,30,22,25
1,10,44,37
0,30,21,37
1,20,49,44
1,20,55,55
0,30,33,32
0,10,30,32
0,10,29,32
0,30,49,38
1,10,21,37
1,10,55,25
0,30,22,32
0,20,28,32
0,10,25,27
1,20,98,43
1,20,43,37
0,30,49,38
1,20,76,43
0,10,30,32
0,30,32,27
1,10,59,55
0,20,21,27
0,30,55,44
1,20,77,24
0,30,34,37
1,10,59,55
1,10,65,25
1,10,78,65
0,20,19,46
1,10,65,22
1,20,59,55
1,30,55,27
0,20,29,32
1,20,49,38
0,20,23,38
1,20,34,37
0,30,30,32
1,30,59,55
1,10,22,25
1,10,55,23
0,30,29,32
0,20,22,32
0,20,33,27
1,10,56,43
1,10,49,44
1,10,68,25
0,10,22,32
0,20,33,32
0,20,22,27
0,30,28,32
0,20,45,32
0,30,28,38
0,30,24,27
0,20,30,32
0,20,29,32
0,10,24,32
1,10,28,32
1,10,55,55
0,20,20,47
0,20,30,32
0,30,28,32
0,10,30,32
0,20,22,37
0,30,20,47
0,30,45,32
0,10,30,32
0,20,22,27
1,10,33,25
0,10,30,32
0,10,21,55
0,10,45,32
1,30,68,25
0,10,30,32
1,30,65,22
0,30,49,44
1,30,44,25
0,20,28,32
1,10,49,32
1,10,66,43
0,30,45,32
1,10,65,25
1,20,55,23
1,30,78,21
1,10,66,22
0,20,25,37
0,10,43,22
1,10,66,43
0,30,21,55
0,20,23,27
0,30,29,32
1,20,56,43
0,30,24,27
1,10,55,44
1,20,59,55
0,10,34,25
0,20,34,23
1,20,66,25
0,30,34,25
0,20,32,32
0,10,33,27
1,10,88,23
0,30,29,32
0,30,22,27
1,20,17,23
1,10,54,25
1,20,77,25
1,10,59,55
0,10,33,32
0,20,32,37
0,10,22,37
1,20,55,37
1,30,59,55
0,10,29,32
1,10,32,32
1,20,28,38
1,10,66,25
0,10,45,32
1,20,55,55
0,10,19,46
0,30,21,44
1,20,49,44
0,10,33,32
0,20,30,32
1,30,89,22
0,30,30,32
0,20,34,25
1,30,55,55
0,30,30,32
0,20,55,44
0,20,30,32
1,30,59,55
0,30,34,55
0,10,33,23
0,10,30,32
0,10,45,32
0,10,29,32
0,30,78,43
0,30,30,32
0,30,22,37
1,20,49,44
1,20,49,38
1,30,33,24
1,20,57,25
1,10,17,23
1,10,55,55
1,10,76,21
0,10,29,32
0,10,23,32
0,20,33,55
0,30,29,32
0,20,29,32
0,10,29,32
0,20,28,32
1,20,55,25
0,10,22,32
0,30,32,32
1,20,78,43
1,10,87,43
1,10,49,44
0,10,43,27
0,30,26,37
0,20,29,32
0,20,30,32
0,10,22,66
1,30,66,25
0,10,30,32
1,10,88,43
0,20,33,34
1,10,99,25
1,20,55,55
1,10,34,33
0,30,28,32
0,10,29,32
0,30,29,32
0,30,49,38
0,30,33,44
0,10,33,32
1,10,59,55
1,20,55,44
1,20,66,43
1,10,67,25
0,10,30,32
0,30,21,37
0,30,30,32
0,20,22,37
0,20,30,32
0,30,45,32
0,30,28,38
1,20,65,25
0,30,30,32
1,10,76,44
1,10,49,44
1,30,34,33
0,30,22,27
0,10,33,44
0,10,30,32
1,30,55,44
1,20,77,25
0,10,21,27
1,10,76,43
0,20,22,45
0,30,29,32
0,20,21,37
0,30,33,37
0,20,43,24
1,10,59,55
1,10,55,37
1,20,49,44
1,30,88,23
0,20,25,37
0,30,55,37
0,10,34,55
0,20,28,32
0,30,30,32
0,30,28,32
0,10,45,32
1,10,59,55
0,30,29,32
0,20,24,32
0,10,30,32
1,10,77,25
1,20,87,43
0,10,23,38
1,10,28,38
1,30,98,43
0,20,33,32
1,10,59,55
0,20,45,32
1,20,67,25
0,10,21,46
0,20,22,32
1,20,28,38
1,10,21,37
1,30,44,22
0,10,33,27
0,20,28,32
0,30,29,32
1,20,78,65
0,20,24,27
0,30,67,43
1,30,97,18
0,30,28,32
0,10,30,32
0,30,24,55
0,20,33,37
0,30,33,32
1,20,33,24
1,30,55,25
0,10,33,34
1,10,55,55
0,10,24,37
0,10,30,32
0,10,22,37
0,30,22,45
0,30,30,32
1,30,55,55
1,30,66,25
0,30,29,32
1,10,22,55
0,30,29,32
0,10,30,32
0,30,30,32
0,30,33,23
0,20,31,32
1,30,55,55
0,30,29,32
0,10,32,35
0,30,33,32
0,30,30,32
1,10,49,44
0,30,23,38
1,20,64,33
1,20,78,43
1,10,67,43
1,30,78,65
0,20,33,23
0,30,49,44
1,10,28,32
0,20,28,32
0,20,30,32
0,20,30,32
0,30,22,37
1,10,49,44
1,10,88,43
1,10,32,32
1,20,33,43
1,20,56,26
0,30,44,37
0,20,32,27
0,10,22,37
0,20,33,27
1,10,98,43
0,20,21,37
0,30,30,32
0,30,31,32
0,30,33,23
0,30,30,32
0,20,29,32
0,30,29,32
1,20,49,38
1,30,59,55
1,30,59,55
0,30,43,27
0,30,21,54
0,10,22,44
1,10,56,26
0,30,30,32
0,10,22,27
1,10,68,25
1,10,66,25
1,30,77,25
0,20,28,32
0,30,49,44
0,10,33,32
1,30,33,25
1,10,28,32
0,20,22,27
0,20,33,32
0,20,30,32
1,30,33,43
1,10,33,43
1,20,59,55
0,10,34,23
0,30,49,44
1,10,77,22
1,20,49,44
1,10,56,43
1,20,65,25
0,10,23,27
1,10,78,43
1,30,55,55
0,20,22,66
1,10,59,55
0,10,25,37
1,10,59,55
0,30,33,32
0,10,45,32
0,20,22,32
0,20,28,32
0,10,21,54
1,10,44,22
0,30,43,37
0,20,45,32
0,20,25,27
1,20,49,44
0,30,43,22
0,10,33,55
1,10,55,23
0,10,22,32
0,30,29,32
0,10,30,32
1,20,22,55
0,20,33,44
1,30,55,55
0,10,29,32
1,30,65,25
1,30,99,25
1,30,66,43
0,10,22,32
0,20,29,32
1,30,67,25
1,20,66,25
0,20,22,27
0,30,30,32
0,30,22,32
0,20,29,32
1,10,49,38
0,30,24,32
0,20,21,54
0,10,29,32
0,30,23,27
1,10,28,32
0,30,49,44
1,30,49,32
0,20,29,32
1,20,66,25
0,30,33,34
0,20,29,32
1,10,46,43
0,10,30,32
1,30,65,25
1,20,44,25
1,10,59,55
0,10,24,27
0,10,22,27
0,20,22,37
1,10,77,25
0,20,30,32
0,10,33,32
1,10,55,25
1,30,56,26
0,30,68,43
1,20,55,55
1,30,77,24
0,30,49,44
1,30,59,55
0,10,29,32
0,30,49,38
1,20,49,44
1,20,59,55
0,30,49,44
1,20,55,27
0,30,19,46
0,10,29,32
0,10,29,32
1,30,66,43
1,20,55,55
1,10,55,55
1,10,28,32
0,30,22,32
1,20,59,55
0,30,45,32
1,10,49,38
1,20,28,38
0,10,33,32
1,10,22,37
0,30,33,27
0,30,33,32
1,10,28,38
1,20,59,55
0,30,33,37
1,20,33,25
1,10,55,55
0,30,78,43
1,20,67,43
1,20,49,32
0,10,30,32
1,10,66,25
1,10,49,38
1,20,56,43
1,20,78,25
1,30,55,55
0,30,22,37
1,30,59,55
1,20,55,55
0,30,23,37
1,10,49,44
1,10,49,38
0,10,43,24
0,10,23,32
1,20,49,44
0,30,31,32
0,30,33,27
0,20,22,37
1,30,77,22
1,30,59,55
0,20,22,32
1,30,55,23
0,30,30,32
1,10,49,44
0,20,22,32
1,10,68,43
1,10,49,55
1,20,49,55
0,20,29,32
0,10,24,27
1,20,36,37
0,30,32,37
1,10,57,25
0,10,21,37
1,20,59,55
0,30,22,32
0,10,31,32
0,30,29,32
1,20,55,25
1,10,89,22
0,30,22,66
0,10,22,27
0,30,34,23
0,20,29,32
1,20,34,33
0,10,45,32
0,10,20,47
1,10,33,37
0,20,30,32
0,10,45,32
0,30,28,38
0,20,21,37
1,30,76,21
0,30,29,32
0,30,49,38
0,20,55,36
1,10,55,27
0,10,29,32
0,20,24,27
1,10,28,32
0,30,22,27
0,30,29,32
1,10,97,18
1,30,67,25
1,30,55,55
0,30,25,37
1,10,22,37
0,30,28,32
0,20,33,32
0,30,49,44
1,20,22,25
1,10,77,24
0,10,29,32
0,30,55,36
0,10,32,37
1,20,59,55
0,20,29,32
1,10,28,38
1,20,88,43
0,20,29,32
0,20,23,30
1,30,55,25
1,20,88,43
1,10,49,44
1,30,54,25
1,20,55,55
0,30,28,32
1,20,88,23
0,20,44,37
0,20,21,46
1,10,49,38
1,20,55,23
0,10,29,32
1,10,44,25
0,20,31,32
0,30,29,32
0,30,33,24
0,10,33,23
0,10,31,32
1,30,59,55
0,10,22,27
0,10,22,32
1,20,55,55
1,10,43,37
0,30,22,32
0,10,25,27
0,20,31,32
0,20,29,32
1,20,44,43
0,20,45,32
0,10,29,32
1,30,55,23
0,20,30,32
0,30,30,32
1,10,49,44
0,20,30,32
0,30,25,27
0,10,29,32
0,20,33,24
1,20,55,55
0,30,44,43
0,10,29,32
1,10,36,37
0,30,21,27
1,20,66,43
0,30,49,44
0,30,36,37
0,30,30,32
1,20,88,23
1,20,49,38
0,30,45,32
1,20,46,43
0,20,21,44
1,20,66,22
0,30,23,32
1,20,59,55
0,10,22,45
0,20,30,32
0,10,33,24
0,10,29,32
0,30,29,32
0,10,31,32
1,10,78,43
0,20,33,37
1,20,78,21
1,10,88,23
1,20,59,55
1,30,59,55
0,30,43,24
1,30,78,25
1,30,88,23
1,30,66,22
1,20,54,25
0,20,45,32
1,20,49,44
0,20,24,55
1,10,66,43
1,20,44,22
1,10,55,55
1,30,59,55
0,20,30,32
0,10,22,32
1,20,49,44
0,30,66,43
1,30,68,25
1,30,59,55
0,20,28,38
1,10,59,55
0,20,29,32
1,10,55,55
0,30,25,27
0,10,29,32
0,10,55,36
0,30,21,37
0,30,28,38
1 MYDEPV Price Income Age
2 1 10 33 37
3 0 20 21 55
4 1 30 59 55
5 1 20 76 44
6 0 30 24 37
7 0 20 22 32
8 1 10 28 32
9 1 10 49 38
10 0 30 76 43
11 1 20 59 55
12 0 30 45 32
13 0 30 21 46
14 0 30 49 44
15 0 10 23 30
16 1 10 55 55
17 0 20 29 32
18 1 10 49 44
19 0 20 45 32
20 0 20 24 37
21 0 10 30 32
22 0 10 24 55
23 1 10 59 55
24 0 30 31 32
25 0 20 33 32
26 0 30 22 32
27 0 30 29 32
28 0 10 30 32
29 0 20 28 32
30 1 30 59 55
31 0 30 56 43
32 1 30 77 43
33 1 20 97 18
34 0 20 23 32
35 0 30 25 37
36 0 30 23 32
37 0 30 88 43
38 0 30 49 44
39 1 30 76 44
40 1 20 67 25
41 1 10 55 55
42 0 20 26 37
43 1 20 49 44
44 1 20 68 25
45 0 30 45 32
46 1 20 68 43
47 0 20 32 35
48 1 30 22 55
49 1 30 55 55
50 1 20 66 43
51 0 20 29 32
52 1 10 49 44
53 1 10 28 32
54 1 10 23 37
55 0 20 45 32
56 0 30 22 37
57 1 10 66 25
58 0 20 30 32
59 0 20 43 27
60 0 20 34 55
61 0 30 32 32
62 1 10 67 25
63 0 20 25 27
64 1 20 49 38
65 0 30 33 55
66 0 20 30 32
67 1 10 34 37
68 0 30 33 32
69 0 10 32 27
70 0 20 30 32
71 1 20 66 25
72 0 30 29 32
73 1 10 25 37
74 1 20 55 55
75 0 30 22 32
76 1 10 28 38
77 0 20 22 44
78 0 30 28 32
79 0 10 45 32
80 1 20 65 22
81 1 10 78 21
82 1 30 66 25
83 1 20 99 25
84 0 10 21 44
85 0 20 23 37
86 0 30 22 37
87 1 30 88 43
88 0 30 28 32
89 1 30 49 55
90 1 10 55 55
91 0 20 29 32
92 0 30 87 43
93 1 30 66 25
94 1 20 77 22
95 1 10 26 37
96 0 30 45 32
97 0 20 43 22
98 1 30 64 33
99 0 20 45 32
100 0 10 30 32
101 0 30 56 43
102 0 20 30 32
103 0 30 30 32
104 1 10 78 25
105 1 20 77 43
106 1 20 49 38
107 0 30 32 35
108 0 10 29 32
109 1 20 89 22
110 0 30 30 32
111 1 30 55 55
112 0 20 22 32
113 0 20 32 32
114 0 30 30 32
115 0 30 49 44
116 1 10 77 43
117 1 20 59 55
118 0 20 30 32
119 0 30 22 27
120 1 20 68 25
121 1 10 59 55
122 1 30 17 23
123 0 20 22 32
124 1 10 44 43
125 1 20 76 21
126 0 20 29 32
127 1 10 59 55
128 0 20 29 32
129 0 30 23 30
130 1 20 49 44
131 0 20 33 32
132 0 20 23 32
133 1 10 64 33
134 1 10 49 44
135 1 30 57 25
136 1 10 28 32
137 0 10 22 32
138 0 30 22 44
139 0 20 33 23
140 0 30 46 43
141 0 30 22 32
142 1 20 59 55
143 0 10 22 32
144 1 20 59 55
145 1 10 33 24
146 0 10 55 44
147 0 30 49 38
148 1 30 77 25
149 0 20 22 37
150 1 30 55 55
151 1 30 22 25
152 1 10 44 37
153 0 30 21 37
154 1 20 49 44
155 1 20 55 55
156 0 30 33 32
157 0 10 30 32
158 0 10 29 32
159 0 30 49 38
160 1 10 21 37
161 1 10 55 25
162 0 30 22 32
163 0 20 28 32
164 0 10 25 27
165 1 20 98 43
166 1 20 43 37
167 0 30 49 38
168 1 20 76 43
169 0 10 30 32
170 0 30 32 27
171 1 10 59 55
172 0 20 21 27
173 0 30 55 44
174 1 20 77 24
175 0 30 34 37
176 1 10 59 55
177 1 10 65 25
178 1 10 78 65
179 0 20 19 46
180 1 10 65 22
181 1 20 59 55
182 1 30 55 27
183 0 20 29 32
184 1 20 49 38
185 0 20 23 38
186 1 20 34 37
187 0 30 30 32
188 1 30 59 55
189 1 10 22 25
190 1 10 55 23
191 0 30 29 32
192 0 20 22 32
193 0 20 33 27
194 1 10 56 43
195 1 10 49 44
196 1 10 68 25
197 0 10 22 32
198 0 20 33 32
199 0 20 22 27
200 0 30 28 32
201 0 20 45 32
202 0 30 28 38
203 0 30 24 27
204 0 20 30 32
205 0 20 29 32
206 0 10 24 32
207 1 10 28 32
208 1 10 55 55
209 0 20 20 47
210 0 20 30 32
211 0 30 28 32
212 0 10 30 32
213 0 20 22 37
214 0 30 20 47
215 0 30 45 32
216 0 10 30 32
217 0 20 22 27
218 1 10 33 25
219 0 10 30 32
220 0 10 21 55
221 0 10 45 32
222 1 30 68 25
223 0 10 30 32
224 1 30 65 22
225 0 30 49 44
226 1 30 44 25
227 0 20 28 32
228 1 10 49 32
229 1 10 66 43
230 0 30 45 32
231 1 10 65 25
232 1 20 55 23
233 1 30 78 21
234 1 10 66 22
235 0 20 25 37
236 0 10 43 22
237 1 10 66 43
238 0 30 21 55
239 0 20 23 27
240 0 30 29 32
241 1 20 56 43
242 0 30 24 27
243 1 10 55 44
244 1 20 59 55
245 0 10 34 25
246 0 20 34 23
247 1 20 66 25
248 0 30 34 25
249 0 20 32 32
250 0 10 33 27
251 1 10 88 23
252 0 30 29 32
253 0 30 22 27
254 1 20 17 23
255 1 10 54 25
256 1 20 77 25
257 1 10 59 55
258 0 10 33 32
259 0 20 32 37
260 0 10 22 37
261 1 20 55 37
262 1 30 59 55
263 0 10 29 32
264 1 10 32 32
265 1 20 28 38
266 1 10 66 25
267 0 10 45 32
268 1 20 55 55
269 0 10 19 46
270 0 30 21 44
271 1 20 49 44
272 0 10 33 32
273 0 20 30 32
274 1 30 89 22
275 0 30 30 32
276 0 20 34 25
277 1 30 55 55
278 0 30 30 32
279 0 20 55 44
280 0 20 30 32
281 1 30 59 55
282 0 30 34 55
283 0 10 33 23
284 0 10 30 32
285 0 10 45 32
286 0 10 29 32
287 0 30 78 43
288 0 30 30 32
289 0 30 22 37
290 1 20 49 44
291 1 20 49 38
292 1 30 33 24
293 1 20 57 25
294 1 10 17 23
295 1 10 55 55
296 1 10 76 21
297 0 10 29 32
298 0 10 23 32
299 0 20 33 55
300 0 30 29 32
301 0 20 29 32
302 0 10 29 32
303 0 20 28 32
304 1 20 55 25
305 0 10 22 32
306 0 30 32 32
307 1 20 78 43
308 1 10 87 43
309 1 10 49 44
310 0 10 43 27
311 0 30 26 37
312 0 20 29 32
313 0 20 30 32
314 0 10 22 66
315 1 30 66 25
316 0 10 30 32
317 1 10 88 43
318 0 20 33 34
319 1 10 99 25
320 1 20 55 55
321 1 10 34 33
322 0 30 28 32
323 0 10 29 32
324 0 30 29 32
325 0 30 49 38
326 0 30 33 44
327 0 10 33 32
328 1 10 59 55
329 1 20 55 44
330 1 20 66 43
331 1 10 67 25
332 0 10 30 32
333 0 30 21 37
334 0 30 30 32
335 0 20 22 37
336 0 20 30 32
337 0 30 45 32
338 0 30 28 38
339 1 20 65 25
340 0 30 30 32
341 1 10 76 44
342 1 10 49 44
343 1 30 34 33
344 0 30 22 27
345 0 10 33 44
346 0 10 30 32
347 1 30 55 44
348 1 20 77 25
349 0 10 21 27
350 1 10 76 43
351 0 20 22 45
352 0 30 29 32
353 0 20 21 37
354 0 30 33 37
355 0 20 43 24
356 1 10 59 55
357 1 10 55 37
358 1 20 49 44
359 1 30 88 23
360 0 20 25 37
361 0 30 55 37
362 0 10 34 55
363 0 20 28 32
364 0 30 30 32
365 0 30 28 32
366 0 10 45 32
367 1 10 59 55
368 0 30 29 32
369 0 20 24 32
370 0 10 30 32
371 1 10 77 25
372 1 20 87 43
373 0 10 23 38
374 1 10 28 38
375 1 30 98 43
376 0 20 33 32
377 1 10 59 55
378 0 20 45 32
379 1 20 67 25
380 0 10 21 46
381 0 20 22 32
382 1 20 28 38
383 1 10 21 37
384 1 30 44 22
385 0 10 33 27
386 0 20 28 32
387 0 30 29 32
388 1 20 78 65
389 0 20 24 27
390 0 30 67 43
391 1 30 97 18
392 0 30 28 32
393 0 10 30 32
394 0 30 24 55
395 0 20 33 37
396 0 30 33 32
397 1 20 33 24
398 1 30 55 25
399 0 10 33 34
400 1 10 55 55
401 0 10 24 37
402 0 10 30 32
403 0 10 22 37
404 0 30 22 45
405 0 30 30 32
406 1 30 55 55
407 1 30 66 25
408 0 30 29 32
409 1 10 22 55
410 0 30 29 32
411 0 10 30 32
412 0 30 30 32
413 0 30 33 23
414 0 20 31 32
415 1 30 55 55
416 0 30 29 32
417 0 10 32 35
418 0 30 33 32
419 0 30 30 32
420 1 10 49 44
421 0 30 23 38
422 1 20 64 33
423 1 20 78 43
424 1 10 67 43
425 1 30 78 65
426 0 20 33 23
427 0 30 49 44
428 1 10 28 32
429 0 20 28 32
430 0 20 30 32
431 0 20 30 32
432 0 30 22 37
433 1 10 49 44
434 1 10 88 43
435 1 10 32 32
436 1 20 33 43
437 1 20 56 26
438 0 30 44 37
439 0 20 32 27
440 0 10 22 37
441 0 20 33 27
442 1 10 98 43
443 0 20 21 37
444 0 30 30 32
445 0 30 31 32
446 0 30 33 23
447 0 30 30 32
448 0 20 29 32
449 0 30 29 32
450 1 20 49 38
451 1 30 59 55
452 1 30 59 55
453 0 30 43 27
454 0 30 21 54
455 0 10 22 44
456 1 10 56 26
457 0 30 30 32
458 0 10 22 27
459 1 10 68 25
460 1 10 66 25
461 1 30 77 25
462 0 20 28 32
463 0 30 49 44
464 0 10 33 32
465 1 30 33 25
466 1 10 28 32
467 0 20 22 27
468 0 20 33 32
469 0 20 30 32
470 1 30 33 43
471 1 10 33 43
472 1 20 59 55
473 0 10 34 23
474 0 30 49 44
475 1 10 77 22
476 1 20 49 44
477 1 10 56 43
478 1 20 65 25
479 0 10 23 27
480 1 10 78 43
481 1 30 55 55
482 0 20 22 66
483 1 10 59 55
484 0 10 25 37
485 1 10 59 55
486 0 30 33 32
487 0 10 45 32
488 0 20 22 32
489 0 20 28 32
490 0 10 21 54
491 1 10 44 22
492 0 30 43 37
493 0 20 45 32
494 0 20 25 27
495 1 20 49 44
496 0 30 43 22
497 0 10 33 55
498 1 10 55 23
499 0 10 22 32
500 0 30 29 32
501 0 10 30 32
502 1 20 22 55
503 0 20 33 44
504 1 30 55 55
505 0 10 29 32
506 1 30 65 25
507 1 30 99 25
508 1 30 66 43
509 0 10 22 32
510 0 20 29 32
511 1 30 67 25
512 1 20 66 25
513 0 20 22 27
514 0 30 30 32
515 0 30 22 32
516 0 20 29 32
517 1 10 49 38
518 0 30 24 32
519 0 20 21 54
520 0 10 29 32
521 0 30 23 27
522 1 10 28 32
523 0 30 49 44
524 1 30 49 32
525 0 20 29 32
526 1 20 66 25
527 0 30 33 34
528 0 20 29 32
529 1 10 46 43
530 0 10 30 32
531 1 30 65 25
532 1 20 44 25
533 1 10 59 55
534 0 10 24 27
535 0 10 22 27
536 0 20 22 37
537 1 10 77 25
538 0 20 30 32
539 0 10 33 32
540 1 10 55 25
541 1 30 56 26
542 0 30 68 43
543 1 20 55 55
544 1 30 77 24
545 0 30 49 44
546 1 30 59 55
547 0 10 29 32
548 0 30 49 38
549 1 20 49 44
550 1 20 59 55
551 0 30 49 44
552 1 20 55 27
553 0 30 19 46
554 0 10 29 32
555 0 10 29 32
556 1 30 66 43
557 1 20 55 55
558 1 10 55 55
559 1 10 28 32
560 0 30 22 32
561 1 20 59 55
562 0 30 45 32
563 1 10 49 38
564 1 20 28 38
565 0 10 33 32
566 1 10 22 37
567 0 30 33 27
568 0 30 33 32
569 1 10 28 38
570 1 20 59 55
571 0 30 33 37
572 1 20 33 25
573 1 10 55 55
574 0 30 78 43
575 1 20 67 43
576 1 20 49 32
577 0 10 30 32
578 1 10 66 25
579 1 10 49 38
580 1 20 56 43
581 1 20 78 25
582 1 30 55 55
583 0 30 22 37
584 1 30 59 55
585 1 20 55 55
586 0 30 23 37
587 1 10 49 44
588 1 10 49 38
589 0 10 43 24
590 0 10 23 32
591 1 20 49 44
592 0 30 31 32
593 0 30 33 27
594 0 20 22 37
595 1 30 77 22
596 1 30 59 55
597 0 20 22 32
598 1 30 55 23
599 0 30 30 32
600 1 10 49 44
601 0 20 22 32
602 1 10 68 43
603 1 10 49 55
604 1 20 49 55
605 0 20 29 32
606 0 10 24 27
607 1 20 36 37
608 0 30 32 37
609 1 10 57 25
610 0 10 21 37
611 1 20 59 55
612 0 30 22 32
613 0 10 31 32
614 0 30 29 32
615 1 20 55 25
616 1 10 89 22
617 0 30 22 66
618 0 10 22 27
619 0 30 34 23
620 0 20 29 32
621 1 20 34 33
622 0 10 45 32
623 0 10 20 47
624 1 10 33 37
625 0 20 30 32
626 0 10 45 32
627 0 30 28 38
628 0 20 21 37
629 1 30 76 21
630 0 30 29 32
631 0 30 49 38
632 0 20 55 36
633 1 10 55 27
634 0 10 29 32
635 0 20 24 27
636 1 10 28 32
637 0 30 22 27
638 0 30 29 32
639 1 10 97 18
640 1 30 67 25
641 1 30 55 55
642 0 30 25 37
643 1 10 22 37
644 0 30 28 32
645 0 20 33 32
646 0 30 49 44
647 1 20 22 25
648 1 10 77 24
649 0 10 29 32
650 0 30 55 36
651 0 10 32 37
652 1 20 59 55
653 0 20 29 32
654 1 10 28 38
655 1 20 88 43
656 0 20 29 32
657 0 20 23 30
658 1 30 55 25
659 1 20 88 43
660 1 10 49 44
661 1 30 54 25
662 1 20 55 55
663 0 30 28 32
664 1 20 88 23
665 0 20 44 37
666 0 20 21 46
667 1 10 49 38
668 1 20 55 23
669 0 10 29 32
670 1 10 44 25
671 0 20 31 32
672 0 30 29 32
673 0 30 33 24
674 0 10 33 23
675 0 10 31 32
676 1 30 59 55
677 0 10 22 27
678 0 10 22 32
679 1 20 55 55
680 1 10 43 37
681 0 30 22 32
682 0 10 25 27
683 0 20 31 32
684 0 20 29 32
685 1 20 44 43
686 0 20 45 32
687 0 10 29 32
688 1 30 55 23
689 0 20 30 32
690 0 30 30 32
691 1 10 49 44
692 0 20 30 32
693 0 30 25 27
694 0 10 29 32
695 0 20 33 24
696 1 20 55 55
697 0 30 44 43
698 0 10 29 32
699 1 10 36 37
700 0 30 21 27
701 1 20 66 43
702 0 30 49 44
703 0 30 36 37
704 0 30 30 32
705 1 20 88 23
706 1 20 49 38
707 0 30 45 32
708 1 20 46 43
709 0 20 21 44
710 1 20 66 22
711 0 30 23 32
712 1 20 59 55
713 0 10 22 45
714 0 20 30 32
715 0 10 33 24
716 0 10 29 32
717 0 30 29 32
718 0 10 31 32
719 1 10 78 43
720 0 20 33 37
721 1 20 78 21
722 1 10 88 23
723 1 20 59 55
724 1 30 59 55
725 0 30 43 24
726 1 30 78 25
727 1 30 88 23
728 1 30 66 22
729 1 20 54 25
730 0 20 45 32
731 1 20 49 44
732 0 20 24 55
733 1 10 66 43
734 1 20 44 22
735 1 10 55 55
736 1 30 59 55
737 0 20 30 32
738 0 10 22 32
739 1 20 49 44
740 0 30 66 43
741 1 30 68 25
742 1 30 59 55
743 0 20 28 38
744 1 10 59 55
745 0 20 29 32
746 1 10 55 55
747 0 30 25 27
748 0 10 29 32
749 0 10 55 36
750 0 30 21 37
751 0 30 28 38