diff --git a/ezyrb/ann.py b/ezyrb/ann.py index 36641290..47635548 100755 --- a/ezyrb/ann.py +++ b/ezyrb/ann.py @@ -99,7 +99,7 @@ def _build_model(self, points, values): layers_torch.append(nn.Linear(layers[-2], layers[-1])) self.model = nn.Sequential(*layers_torch) - def fit(self, points, values): + def fit(self, points, values, optimizer = torch.optim.Adam, learning_rate = 0.001, frequency_print = 0): """ Build the ANN given 'points' and 'values' and perform training. @@ -119,14 +119,16 @@ def fit(self, points, values): :param numpy.ndarray points: the coordinates of the given (training) points. :param numpy.ndarray values: the (training) values in the points. + :param torch.optimizer optimizer: the optimizer used for the neural network + :param float learning_rate: learning rate used in the optimizer + :param int frequency_print: the number of epochs between the print of each loss value """ self._build_model(points, values) - self.optimizer = torch.optim.Adam(self.model.parameters(), lr = 0.01) + self.optimizer = optimizer(self.model.parameters(), lr = learning_rate) points = self._convert_numpy_to_torch(points) values = self._convert_numpy_to_torch(values) - print(points.shape, values.shape) n_epoch = 1 flag = True while flag: @@ -143,7 +145,9 @@ def fit(self, points, values): elif isinstance(criteria, float): # stop criteria is float if loss.item() < criteria: flag = False - print(loss.item()) + if frequency_print != 0: + if n_epoch % frequency_print == 1: + print(loss.item()) n_epoch += 1 def predict(self, new_point): @@ -157,10 +161,7 @@ def predict(self, new_point): new_point = self._convert_numpy_to_torch(np.array(new_point)) y_new = self.model(new_point) return self._convert_torch_to_numpy(y_new) - - def predict_tensor(self, new_point): - - return self.model(new_point) + def save_state(self, filename): @@ -171,18 +172,13 @@ def save_state(self, filename): 'model_class' : self.model.__class__ } - - torch.save(checkpoint, filename) def load_state(self, filename, points, values): checkpoint = torch.load(filename) - - self._build_model(points, values) - print(self.model) self.optimizer = checkpoint['optimizer_class'] self.model.load_state_dict(checkpoint['model_state']) diff --git a/ezyrb/database.py b/ezyrb/database.py index 7863c3c1..609c8f86 100644 --- a/ezyrb/database.py +++ b/ezyrb/database.py @@ -140,7 +140,6 @@ def add(self, parameters, snapshots, space=None): raise RuntimeError('No Spatial Value given') if (self._space is not None) or (space is not None): - print(space.shape, snapshots.shape) if len(space) != len(snapshots) or len(space[0]) != len(snapshots[0]): raise RuntimeError( 'length of space and snapshots are different.') diff --git a/ezyrb/nnspod.py b/ezyrb/nnspod.py index ce5a278d..e3166b50 100644 --- a/ezyrb/nnspod.py +++ b/ezyrb/nnspod.py @@ -12,87 +12,67 @@ class NNsPOD(POD): def __init__(self, method = "svd", path = None): super().__init__(method) self.path = path - + def reshape2dto1d(self, x, y): + """ + reshapes two n by n arrays into one n^2 by 2 array + :param numpy.array x: x value of data + :param numpy.array y: y value of data + """ x = x.reshape(-1,1) y = y.reshape(-1,1) coords = np.concatenate((x, y), axis = 1) coords = np.array(coords).reshape(-1,2) - - + return coords def reshape1dto2d(self, snapshots): - print(len(snapshots), snapshots.shape) + """ + turns 1d list of data into 2d + :param array-like snapshots: data to be reshaped + """ return snapshots.reshape(int(np.sqrt(len(snapshots))), int(np.sqrt(len(snapshots)))) + def train_interpnet(self,ref_data, interp_layers, interp_function, interp_stop_training, interp_loss, retrain = False, frequency_print = 0): + """ + trains the Interpnet given 1d data: - def train_InterpNet1d(self,ref_data, interp_layers, interp_function, interp_stop_training, interp_loss, retrain = False): + :param database ref_data: the reference data that the rest of the data will be shifted to + :param list interp_layers: list with number of neurons in each layer + :param torch.nn.modules.activation interp_function: activation function for the interpnet + :param float interp_stop_training: desired tolerance for the interp training + :param torch.nn.Module interp_loss: loss function (MSE default) + :param boolean retrain: True if the interpNetShould be retrained, False if it should be loaded + """ - # print("loading") - self.interp_net = ANN(interp_layers, interp_function, interp_stop_training, interp_loss) - if not retrain: - try: - self.interp_net = self.interp_net.load_state(self.path, ref_data.space.reshape(-1,1), ref_data.snapshots.reshape(-1,1)) - print("loaded") - except: - self.interp_net.fit(ref_data.space.reshape(-1,1), ref_data.snapshots.reshape(-1,1)) - self.interp_net.save_state(self.path) - print(self.interp_net.load_state(self.path, ref_data.space.reshape(-1,1), ref_data.snapshots.reshape(-1,1))) + if len(ref_data.space.shape) > 2: + space = ref_data.space.reshape(-1, 2) else: - self.interp_net.fit(ref_data.space.reshape(-1,1), ref_data.snapshots.reshape(-1,1)) - self.interp_net.save_state(self.path) - #plt.plot(ref_data.space, ref_data.snapshots, "o") - xi = np.linspace(0,5,1000).reshape(-1,1) - yi = self.interp_net.predict(xi) - print(xi.shape, yi.shape) - #plt.plot(xi,yi, ".") - #plt.show() - - - def train_InterpNet2d(self,ref_data, interp_layers, interp_function, interp_stop_training, interp_loss, retrain = False): - - - self.interp_net = ANN(interp_layers, interp_function, interp_stop_training, interp_loss) - space = ref_data.space.reshape(-1, 2) - snapshots = ref_data.snapshots.reshape(-1, 1) - + space = ref_data.space.reshape(-1,1) + snapshots = ref_data.snapshots.reshape(-1,1) if not retrain: try: self.interp_net = self.interp_net.load_state(self.path, space, snapshots) + print("loaded interpnet") except: - self.interp_net.fit(space, snapshots) + self.interp_net.fit(space, snapshots, frequency_print = frequency_print) self.interp_net.save_state(self.path) else: - self.interp_net.fit(space, snapshots) + self.interp_net.fit(space, snapshots, frequency_print = frequency_print) self.interp_net.save_state(self.path) - x = np.linspace(0, 5, 256) - y = np.linspace(0, 5, 256) - gridx, gridy = np.meshgrid(x, y) - - plt.pcolor(gridx,gridy,ref_data.snapshots.reshape(256, 256)) - plt.show() - res = 1000 - x = np.linspace(0, 5, res) - y = np.linspace(0, 5, res) - gridx, gridy = np.meshgrid(x, y) - input = self.reshape2dto1d(gridx, gridy) - output = self.interp_net.predict(input) - - toshow = self.reshape1dto2d(output) - plt.pcolor(gridx,gridy,toshow) - plt.show() - - - - def shift(self, x, y, shift_quantity): + """ + shifts data by shift_quanity + """ return(x+shift_quantity, y) - + def pre_shift(self,x,y, ref_y): + """ + moves data so that the max of y and max of ref_y are at the same x coordinate + """ maxy = 0 for i, n, in enumerate(y): if n > y[maxy]: @@ -101,11 +81,13 @@ def pre_shift(self,x,y, ref_y): for i, n in enumerate(ref_y): if n > ref_y[maxref]: maxref = i - - print( x[maxref]-x[maxy], maxref, maxy) + return self.shift(x, y, x[maxref]-x[maxy])[0] - + def make_points(self, x, params): + """ + creates points that can be used to train and predict shiftnet + """ if len(x.shape)> 1: points = np.zeros((len(x),3)) for j, s in enumerate(x): @@ -118,11 +100,13 @@ def make_points(self, x, params): points[j][0] = s points[j][1] = params[0] return points - + def build_model(self, dim = 1): + """ + builds model based on dimension of input data + """ layers = self.layers.copy() layers.insert(0, dim + 1) - print(layers, "!!!!") layers.append(dim) layers_torch = [] for i in range(len(layers) - 2): @@ -131,13 +115,22 @@ def build_model(self, dim = 1): layers_torch.append(nn.Linear(layers[-2], layers[-1])) self.model = nn.Sequential(*layers_torch) - - - def train_ShiftNet1d(self, db, shift_layers, shift_function, shift_stop_training, ref_data, preshift = False): - # TODO: - # make sure neural net works no mater distance between data - # check and implement 2d functionality - # make code look better + def train_shiftnet(self, db, shift_layers, shift_function, shift_stop_training, + ref_data, preshift = False, + optimizer = torch.optim.Adam, learning_rate = 0.0001, frequency_print = 0): + """ + Trains and evaluates shiftnet given 1d data 'db' + + :param Database db: data at a certain parameter value + :param list shift_layers: ordered list with number of neurons in each layer + :param torch.nn.modeulse.activation shift_function: the activation function used by the shiftnet + :param int, float, or list stop_training: + int: number of epochs before stopping + float: desired tolarance before stopping training + list: a int and a float, stops when either desired epochs or tolerance is reached + :param Database db: data at the reference datapoint + :param boolean preshift: True if preshift is desired otherwise false. + """ self.layers = shift_layers self.function = shift_function self.loss_trend = [] @@ -145,13 +138,19 @@ def train_ShiftNet1d(self, db, shift_layers, shift_function, shift_stop_training x = self.pre_shift(db.space[0], db.snapshots[0], ref_data.snapshots[0]) else: x = db.space[0] - + if len(db.space.shape) > 2: + x_reshaped = x.reshape(-1,2) + self.build_model(dim = 2) + else: + self.build_model(dim = 1) + x_reshaped = x.reshape(-1,1) + + values = db.snapshots.reshape(-1,1) + self.stop_training = shift_stop_training points = self.make_points(x, db.parameters) - values = db.snapshots.reshape(-1,1) - self.build_model(dim = 1) - self.optimizer = torch.optim.Adam(self.model.parameters(), 0.0001) + self.optimizer = optimizer(self.model.parameters(), lr = learning_rate) self.loss = torch.nn.MSELoss() points = torch.from_numpy(points).float() @@ -160,14 +159,11 @@ def train_ShiftNet1d(self, db, shift_layers, shift_function, shift_stop_training while flag: shift = self.model(points) x_shift, y = self.shift( - torch.from_numpy(x.reshape(-1,1)).float(), - torch.from_numpy(db.snapshots.reshape(-1,1)).float(), + torch.from_numpy(x_reshaped).float(), + torch.from_numpy(values).float(), shift) - #print(x_shift,y) - ref_interp = self.interp_net.predict_tensor(x_shift) - #print(ref_interp) + ref_interp = self.interp_net.model(x_shift) loss = self.loss(ref_interp, y) - print(loss.item()) loss.backward() self.optimizer.step() self.loss_trend.append(loss.item()) @@ -178,91 +174,17 @@ def train_ShiftNet1d(self, db, shift_layers, shift_function, shift_stop_training elif isinstance(criteria, float): # stop criteria is float if loss.item() < criteria: flag = False + if frequency_print != 0: + if n_epoch % frequency_print == 1: + print(loss.item()) n_epoch += 1 new_point = self.make_points(x, db.parameters) shift = self.model(torch.from_numpy(new_point).float()) x_new = self.shift( - torch.from_numpy(x.reshape(-1,1)).float(), - torch.from_numpy(db.snapshots.reshape(-1,1)).float(), + torch.from_numpy(x_reshaped).float(), + torch.from_numpy(values).float(), shift)[0] - - plt.plot(db.space, db.snapshots, "go") - plt.plot(x_new.detach().numpy(), db.snapshots.reshape(-1,1), ".") - return shift + x_ret = x_new.detach().numpy() + return x_ret - def train_ShiftNet2d(self, db, shift_layers, shift_function, shift_stop_training, ref_data, preshift = False): - # TODO: - # make sure neural net works no mater distance between data - # check and implement 2d functionality - # make code look better - # work on pre_shift for 2d data (iterate through all data until max is found) - # make sure shift works for 2d data(might only shift one part) - self.layers = shift_layers - self.function = shift_function - self.loss_trend = [] - if preshift: - x = self.pre_shift(db.space[0], db.snapshots[0], ref_data.snapshots[0]) - else: - x = db.space[0] - - self.stop_training = shift_stop_training - points = self.make_points(x, db.parameters) - self.build_model(dim = 2) - - self.optimizer = torch.optim.Adam(self.model.parameters(), 0.00001) - - self.loss = torch.nn.MSELoss() - points = torch.from_numpy(points).float() - n_epoch = 1 - flag = True - while flag: - shift = self.model(points) - x_shift, y = self.shift( - torch.from_numpy(x.reshape(-1,2)).float(), - torch.from_numpy(db.snapshots.reshape(-1,1)).float(), - shift) - #print(x_shift,y) - ref_interp = self.interp_net.predict_tensor(x_shift) - #print(ref_interp) - loss = self.loss(ref_interp, y) - print(loss.item()) - loss.backward() - self.optimizer.step() - self.loss_trend.append(loss.item()) - for criteria in self.stop_training: - if isinstance(criteria, int): # stop criteria is an integer - if n_epoch == criteria: - flag = False - elif isinstance(criteria, float): # stop criteria is float - if loss.item() < criteria: - flag = False - n_epoch += 1 - - - x = np.linspace(0, 5, 256) - y = np.linspace(0, 5, 256) - gridx, gridy = np.meshgrid(x, y) - - plt.pcolor(gridx,gridy,ref_data.snapshots.reshape(256, 256)) - plt.show() - res = 256 - x = np.linspace(0, 5, res) - y = np.linspace(0, 5, res) - gridx, gridy = np.meshgrid(x, y) - coords = self.reshape2dto1d(gridx, gridy) - new_point = self.make_points(coords, db.parameters) - shift = self.model(torch.from_numpy(new_point).float()) - x_new = self.shift( - torch.from_numpy(coords.reshape(-1,2)).float(), - torch.from_numpy(db.snapshots.reshape(-1,1)).float(), - shift)[0] - print(x_new.shape) - x, y = np.hsplit(x_new.detach().numpy(), 2) - x = self.reshape1dto2d(x) - y = self.reshape1dto2d(y) - snapshots = self.reshape1dto2d(db.snapshots.reshape(-1,1)) - print(x.shape, y.shape) - plt.pcolor(x,y,snapshots) - plt.show() - return shift \ No newline at end of file diff --git a/tutorials/interpnet1d.pth b/tutorials/interpnet1d.pth new file mode 100644 index 00000000..847f4f09 Binary files /dev/null and b/tutorials/interpnet1d.pth differ diff --git a/tutorials/interpnet2d.pth b/tutorials/interpnet2d.pth new file mode 100644 index 00000000..19bcd99f Binary files /dev/null and b/tutorials/interpnet2d.pth differ diff --git a/tutorials/tutorial-3.ipynb b/tutorials/tutorial-3.ipynb new file mode 100644 index 00000000..be3cad5b --- /dev/null +++ b/tutorials/tutorial-3.ipynb @@ -0,0 +1,2211 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# EZyRB Tutorial 3\n", + "## Use NNsPOD to help with POD\n", + "\n", + "In this tutorial we show how to set up and use the NNsPOD class in order to make all data align, allowing the use of POD." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To do this we will show a simple example where the data is a moving gaussian wave.\n", + "\n", + "the first step is to import necessary packages" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import matplotlib\n", + "import torch\n", + "import torch.nn as nn\n", + "from ezyrb.nnspod import NNsPOD\n", + "from ezyrb import Database\n", + "matplotlib.use('Qt5Agg')\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1d Data\n", + "\n", + "Now we make the data we will use. We make a simple gaussian function and populate the space, snapshots, and parameters of the database" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "n_params = 15\n", + "params = np.linspace(0.5, 4.5, n_params).reshape(-1, 1) # actually the time steps\n", + "def gaussian(x, mu, sig):\n", + " return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))\n", + "def wave(t, res=256):\n", + " x = np.linspace(0, 5, res)\n", + " return x, gaussian(x, t, 0.1)\n", + "\n", + "db = np.array([wave(t)[1] for t in params])\n", + "db_array = np.array([wave(t)[1] for t in params])\n", + "space = wave(0)[0]\n", + "space_array = np.array([wave(t)[0] for t in params])\n", + "\n", + "database = Database(space = space_array, snapshots = db_array, parameters = params)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we make a NNsPOD class, the only value to pass in is where you want to save the interpnet, or where you want to load it from. This is especially usefull with 2d data where training can take hours depending on the size of the dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "NNsPOD_tutorial = NNsPOD(path = \"interpnet1d.pth\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we train the interpnet. the data to pass in is the reference database, the shape of the layers of the NN, the trainnig function the stop training value, which can be a float(loss value to stop at), int(epoch to stop at), or both(will stop at whichever is reached first).The loss function(MSE by default). and whether you would like to retrain NN or load a saved NN. If you choose to retrain the loss value at each epoch will be printed out." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loaded interpnet\n" + ] + } + ], + "source": [ + "ref_data = 5\n", + "NNsPOD_tutorial.train_interpnet(database[ref_data], [20,20], nn.Sigmoid(), [0.000001], None, retrain = False, frequency_print = 5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we can graph the original reference data as well as the data we get from the interpNet after feeding it 1000 positional datapoints. The large points are the original data points, and the small points are ones created by the interpnet. It should be clear that the interpnet is able to accuratly replicate the gaussian" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "plt.plot(database[ref_data].space, database[ref_data].snapshots, \"o\")\n", + "xi = np.linspace(0,5,1000).reshape(-1,1)\n", + "yi = NNsPOD_tutorial.interp_net.predict(xi)\n", + "plt.plot(xi,yi, \".\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we train the shiftnet on all data besides the reference. to do this you must pass in the database at the value, the shape of the NN, the training function, the stop training value, the reference database, and if you would like the data to be preshifted. For the shiftnet it can be useful to put a loss value and epoch value to stop at, as there is a minimum level the loss value can reach, and if you put a lower value the neural net will not stop.\n", + "\n", + "Training the shiftnet also prints out the loss value at every epoch" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.03618120029568672\n", + "0.007488970644772053\n", + "0.03669120371341705\n", + "0.06321724504232407\n", + "0.05040336400270462\n", + "0.06794826686382294\n", + "0.05736231058835983\n", + "0.035289566963911057\n", + "0.03536386042833328\n", + "0.03537747263908386\n", + "0.03537629544734955\n", + "0.0353749580681324\n", + "0.03537601977586746\n", + "0.03536161035299301\n", + "0.03535355627536774\n", + "0.03535125032067299\n", + "0.035350050777196884\n", + "0.035349443554878235\n", + "0.03534916043281555\n", + "0.03534897416830063\n", + "0.035348761826753616\n", + "0.0353485532104969\n", + "0.035348355770111084\n", + "0.03534820303320885\n", + "0.0353480689227581\n", + "0.03534799814224243\n", + "0.03534792736172676\n", + "0.035347890108823776\n", + "0.035347867757081985\n", + "0.035347845405340195\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.0006343760178424418\n", + "0.040905095636844635\n", + "0.048760831356048584\n", + "0.05438512563705444\n", + "0.06428015977144241\n", + "0.05451284348964691\n", + "0.0490446574985981\n", + "0.05337583273649216\n", + "0.05715584009885788\n", + "0.04461467266082764\n", + "0.039767101407051086\n", + "0.038730621337890625\n", + "0.03904196619987488\n", + "0.0353756807744503\n", + "0.035373978316783905\n", + "0.035365767776966095\n", + "0.03535913676023483\n", + "0.03535446152091026\n", + "0.03535158932209015\n", + "0.03534995764493942\n", + "0.0353490486741066\n", + "0.03534854203462601\n", + "0.03534824401140213\n", + "0.0353480726480484\n", + "0.03534799814224243\n", + "0.03534792736172676\n", + "0.03534790500998497\n", + "0.035347890108823776\n", + "0.03534787893295288\n", + "0.03534786403179169\n", + "0.03534785285592079\n", + "0.03534784913063049\n", + "0.035347841680049896\n", + "0.0353478379547596\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.0640300065279007\n", + "0.04936844855546951\n", + "0.04293443262577057\n", + "0.06666997075080872\n", + "0.0670924037694931\n", + "0.0680939331650734\n", + "0.035303860902786255\n", + "0.03531253710389137\n", + "0.035361431539058685\n", + "0.03670534864068031\n", + "0.035369325429201126\n", + "0.03536026179790497\n", + "0.035354845225811005\n", + "0.03535172715783119\n", + "0.03534998744726181\n", + "0.03534902259707451\n", + "0.035348497331142426\n", + "0.035348206758499146\n", + "0.03534804657101631\n", + "0.03534796088933945\n", + "0.03534791246056557\n", + "0.035347890108823776\n", + "0.035347871482372284\n", + "0.035347867757081985\n", + "0.03534785658121109\n", + "0.035347841680049896\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.014654207974672318\n", + "0.04599158093333244\n", + "0.06999507546424866\n", + "0.003101724199950695\n", + "0.022733839228749275\n", + "0.0671105682849884\n", + "0.0025321519933640957\n", + "0.03800452500581741\n", + "0.03530562296509743\n", + "0.035327065736055374\n", + "0.03531361743807793\n", + "0.035295270383358\n", + "0.03529457747936249\n", + "0.035302042961120605\n", + "0.035315144807100296\n", + "0.035331305116415024\n", + "0.03534695506095886\n", + "0.035359516739845276\n", + "0.035368263721466064\n", + "0.03537369892001152\n", + "0.03537660837173462\n", + "0.03537774831056595\n", + "0.03537769615650177\n", + "0.03537685051560402\n", + "0.03537551313638687\n", + "0.035373881459236145\n", + "0.03537209331989288\n", + "0.03537025302648544\n", + "0.0353684239089489\n", + "0.03536663576960564\n", + "0.035364940762519836\n", + "0.0353633388876915\n", + "0.035361845046281815\n", + "0.03536047041416168\n", + "0.035359203815460205\n", + "0.03535805642604828\n", + "0.03535700589418411\n", + "0.035356055945158005\n", + "0.03535519912838936\n", + "0.03535442799329758\n", + "0.035353731364011765\n", + "0.03535310551524162\n", + "0.035352546721696854\n", + "0.03535205125808716\n", + "0.035351596772670746\n", + "0.035351190716028214\n", + "0.03535082936286926\n", + "0.0353504940867424\n", + "0.03535019978880882\n", + "0.03534993156790733\n", + "0.03534969687461853\n", + "0.03534948453307152\n", + "0.0353492870926857\n", + "0.03534911945462227\n", + "0.03534896299242973\n", + "0.035348836332559586\n", + "0.035348717123270035\n", + "0.035348616540431976\n", + "0.035348523408174515\n", + "0.03534844145178795\n", + "0.035348378121852875\n", + "0.0353483110666275\n", + "0.03534825146198273\n", + "0.035348210483789444\n", + "0.03534815460443497\n", + "0.03534811735153198\n", + "0.035348080098629\n", + "0.035348061472177505\n", + "0.03534804284572601\n", + "0.03534802049398422\n", + "0.03534800559282303\n", + "0.03534798324108124\n", + "0.035347964614629745\n", + "0.03534794971346855\n", + "0.03534793481230736\n", + "0.035347919911146164\n", + "0.035347916185855865\n", + "0.035347916185855865\n", + "0.03534790500998497\n", + "0.035347893834114075\n", + "0.035347890108823776\n", + "0.035347890108823776\n", + "0.035347890108823776\n", + "0.03534788265824318\n", + "0.03534788265824318\n", + "0.03534787893295288\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478379547596\n", + "0.0353478379547596\n", + "0.0353478379547596\n", + "0.0353478379547596\n", + "0.035347841680049896\n", + "0.03534784913063049\n", + "0.03534785658121109\n", + "0.03534786403179169\n", + "0.035347871482372284\n", + "0.035347871482372284\n", + "0.035347871482372284\n", + "0.03534787893295288\n", + "0.03534787893295288\n", + "0.03534787893295288\n", + "0.03534787893295288\n", + "0.035347871482372284\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.03534786403179169\n", + "0.03534786030650139\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785285592079\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347845405340195\n", + "0.035347841680049896\n", + "0.0353478342294693\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.035347830504179\n", + "0.06685949116945267\n", + "0.010386582463979721\n", + "0.06603461503982544\n", + "0.0035528428852558136\n", + "0.0548289492726326\n", + "0.06674175709486008\n", + "0.032915644347667694\n", + "0.06800820678472519\n", + "0.03606117516756058\n", + "0.03660352900624275\n", + "0.03706599026918411\n", + "0.03736760467290878\n", + "0.037629660218954086\n", + "0.037848345935344696\n", + "0.03799767792224884\n", + "0.038088567554950714\n", + "0.03814070299267769\n", + "0.038169875741004944\n", + "0.038186050951480865\n", + "0.038194991648197174\n", + "0.03819993883371353\n", + "0.0382026769220829\n", + "0.03820420801639557\n", + "0.038205064833164215\n", + "0.03820556029677391\n", + "0.03820585086941719\n", + "0.03820601850748062\n", + "0.03820611163973808\n", + "0.03820617124438286\n", + "0.03820622339844704\n", + "0.03820626437664032\n", + "0.03820629417896271\n", + "0.03820633888244629\n", + "0.03820638358592987\n", + "0.03820640966296196\n", + "0.038206443190574646\n", + "0.038206472992897034\n", + "0.038206491619348526\n", + "0.03820651024580002\n", + "0.03820653259754181\n", + "0.038206543773412704\n", + "0.0382065549492836\n", + "0.038206566125154495\n", + "0.03820657730102539\n", + "0.03820658475160599\n", + "0.038206592202186584\n", + "0.03820660337805748\n", + "0.03820661082863808\n", + "0.038206614553928375\n", + "0.03820662572979927\n", + "0.03820662945508957\n", + "0.038206640630960464\n", + "0.03820664435625076\n", + "0.03820665180683136\n", + "0.03820665925741196\n", + "0.03820665925741196\n", + "0.038206662982702255\n", + "0.038206662982702255\n", + "0.038206666707992554\n", + "0.03820667415857315\n", + "0.03820667415857315\n", + "0.03820667415857315\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.004749283194541931\n", + "6.864473834866658e-05\n", + "0.054425131529569626\n", + "0.03624590486288071\n", + "0.04943655803799629\n", + "0.0635475367307663\n", + "0.060113489627838135\n", + "0.04980885237455368\n", + "0.04133860766887665\n", + "0.039592910557985306\n", + "0.03745774179697037\n", + "0.03536282479763031\n", + "0.035358987748622894\n", + "0.03535560518503189\n", + "0.03535333648324013\n", + "0.03535190597176552\n", + "0.035350870341062546\n", + "0.03535007685422897\n", + "0.03534946218132973\n", + "0.03534898906946182\n", + "0.03534865006804466\n", + "0.03534839674830437\n", + "0.035348210483789444\n", + "0.0353480689227581\n", + "0.03534799814224243\n", + "0.03534792736172676\n", + "0.03534790128469467\n", + "0.03534787893295288\n", + "0.035347871482372284\n", + "0.03534786030650139\n", + "0.03534785658121109\n", + "0.035347841680049896\n", + "0.035347841680049896\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.01937580108642578\n", + "0.0009189761476591229\n", + "0.0026972279883921146\n", + "0.06929903477430344\n", + "0.002275083214044571\n", + "0.004671498667448759\n", + "0.05172063037753105\n", + "0.044871266931295395\n", + "0.035369277000427246\n", + "0.03537744656205177\n", + "0.03537606820464134\n", + "0.03537062555551529\n", + "0.035365357995033264\n", + "0.0353611558675766\n", + "0.03535773232579231\n", + "0.035354964435100555\n", + "0.03535287454724312\n", + "0.03535136580467224\n", + "0.03535030782222748\n", + "0.035349585115909576\n", + "0.03534908965229988\n", + "0.035348743200302124\n", + "0.03534851223230362\n", + "0.035348355770111084\n", + "0.03534824028611183\n", + "0.03534814715385437\n", + "0.03534809127449989\n", + "0.035348061472177505\n", + "0.03534804657101631\n", + "0.035348016768693924\n", + "0.03534800559282303\n", + "0.03534800186753273\n", + "0.03534799814224243\n", + "0.03534799441695213\n", + "0.035347986966371536\n", + "0.03534797206521034\n", + "0.03534796088933945\n", + "0.035347942262887955\n", + "0.03534792736172676\n", + "0.035347916185855865\n", + "0.035347916185855865\n", + "0.03534790500998497\n", + "0.035347893834114075\n", + "0.035347893834114075\n", + "0.035347893834114075\n", + "0.03534788638353348\n", + "0.03534788265824318\n", + "0.03534787893295288\n", + "0.03534787893295288\n", + "0.03534787893295288\n", + "0.035347871482372284\n", + "0.035347871482372284\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.035347867757081985\n", + "0.03534786030650139\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534785658121109\n", + "0.03534784913063049\n", + "0.035347841680049896\n", + "0.035347841680049896\n", + "0.035347841680049896\n", + "0.035347841680049896\n", + "0.035347841680049896\n", + "0.035347841680049896\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.0353478342294693\n", + "0.02686026133596897\n", + "0.008267712779343128\n", + "0.015089419670403004\n", + "0.07033220678567886\n", + "0.0691613256931305\n", + "0.06700858473777771\n", + "0.05922491103410721\n", + "0.03635361045598984\n", + "0.0521370954811573\n", + "0.03656502813100815\n", + "0.03711673989892006\n", + "0.03748020529747009\n", + "0.037738773971796036\n", + "0.037912413477897644\n", + "0.038023676723241806\n", + "0.03809357434511185\n", + "0.03813707455992699\n", + "0.03816399723291397\n", + "0.03818058222532272\n", + "0.038190774619579315\n", + "0.03819701820611954\n", + "0.03820083662867546\n", + "0.0382031612098217\n", + "0.03820457309484482\n", + "0.038205426186323166\n", + "0.038205936551094055\n", + "0.03820624202489853\n", + "0.038206424564123154\n", + "0.03820652514696121\n", + "0.038206592202186584\n", + "0.03820662945508957\n", + "0.03820664808154106\n", + "0.038206662982702255\n", + "0.03820667415857315\n", + "0.03820667788386345\n", + "0.03820667788386345\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03602290153503418\n", + "0.0006493850378319621\n", + "0.06898123025894165\n", + "0.05555272474884987\n", + "0.0010742570739239454\n", + "0.07045412063598633\n", + "0.047709014266729355\n", + "0.03571415692567825\n", + "0.03631020337343216\n", + "0.036884158849716187\n", + "0.03735504299402237\n", + "0.03768881782889366\n", + "0.03789759427309036\n", + "0.03801729902625084\n", + "0.038085125386714935\n", + "0.03812903165817261\n", + "0.03815951570868492\n", + "0.038179002702236176\n", + "0.03819063678383827\n", + "0.03819741681218147\n", + "0.038201332092285156\n", + "0.03820359334349632\n", + "0.03820490092039108\n", + "0.038205645978450775\n", + "0.038206085562705994\n", + "0.03820633143186569\n", + "0.03820648044347763\n", + "0.0382065623998642\n", + "0.038206614553928375\n", + "0.038206640630960464\n", + "0.03820665553212166\n", + "0.038206666707992554\n", + "0.03820667788386345\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n", + "0.03820668160915375\n" + ] + } + ], + "source": [ + "i = 0\n", + "while i < 10:\n", + " x_new = NNsPOD_tutorial.train_shiftnet(database[i], [20,20,20], nn.Tanh(), [1000, 0.00001], database[ref_data], preshift = True, frequency_print = 0, learning_rate = 0.01) \n", + " db = database[i] \n", + " plt.plot(db.space, db.snapshots, \"go\")\n", + " plt.plot(x_new, db.snapshots.reshape(-1,1), \".\")\n", + " i+=1\n", + " if i == ref_data:\n", + " i +=1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we plot and show all the data. The original positions is represented by green circles, the reference data is the blue plusmarks, and the different shifted data is represented by the smaller dots. It should be clear that all of the data has been moved to allign with the reference data" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "plt.plot(database[0].space, database[ref_data].snapshots, \"b+\")\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2d Data\n", + "\n", + "Now we do the same but with 2d data and implement some basic functions to help with shaping the data" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "def reshape2dto1d(x, y):\n", + " x = x.reshape(-1,1)\n", + " y = y.reshape(-1,1)\n", + " coords = np.concatenate((x, y), axis = 1)\n", + " coords = np.array(coords).reshape(-1,2)\n", + " \n", + " return coords\n", + "\n", + "def reshape1dto2d(snapshots):\n", + " return snapshots.reshape(int(np.sqrt(len(snapshots))), int(np.sqrt(len(snapshots))))\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we create the 2d gaussian and populate the database" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[0.5 0.5]\n", + "[0.94444444 0.94444444]\n", + "[1.38888889 1.38888889]\n", + "[1.83333333 1.83333333]\n", + "[2.27777778 2.27777778]\n", + "[2.72222222 2.72222222]\n", + "[3.16666667 3.16666667]\n", + "[3.61111111 3.61111111]\n", + "[4.05555556 4.05555556]\n", + "[4.5 4.5]\n", + "[0 0]\n" + ] + } + ], + "source": [ + "\n", + "n_params = 10\n", + "params = np.linspace(0.5, 4.5, n_params).reshape(-1, 1) # actually the time steps\n", + "\n", + "def gaussian(x, mu, sig):\n", + " print(mu)\n", + " gaussx, gaussy = np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.))).T\n", + " return gaussx * gaussy\n", + "def wave(t, res=256):\n", + " x = np.linspace(0, 5, res)\n", + " return x, gaussian(x, t, 0.1)\n", + "def wave2D(t, res=256):\n", + " x = np.linspace(0, 5, res)\n", + " y = np.linspace(0, 5, res)\n", + " gridx, gridy = np.meshgrid(x, y)\n", + " gridx, gridy = gridx.reshape(-1,1), gridy.reshape(-1,1)\n", + " wave = gaussian(np.hstack([gridx, gridy]), t*np.array([1, 1]), 0.1)\n", + " return gridx, gridy, wave\n", + "db = np.array([wave2D(t)[2] for t in params])\n", + "db_array = db.reshape(n_params, -1, 1)\n", + "gridx, gridy = wave2D(0)[0 :2]\n", + "space = reshape2dto1d(gridx,gridy)\n", + "space_array = np.array([space.copy() for t in params])\n", + "\n", + "database = Database(space = space_array, snapshots = db_array, parameters = params)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "NNsPOD_tutorial = NNsPOD(path = \"interpnet2d.pth\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we put the value for the reference data and train the interpnet. If you want to change this it will need to retrain the interpnet, which can take a long time for 2d data, especially is the loss value is very low." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loaded interpnet\n" + ] + } + ], + "source": [ + "\n", + "ref_data = 5\n", + "NNsPOD_tutorial.train_interpnet(database[ref_data], [40,40], nn.Sigmoid(), [10000000,0.000001], None, retrain = False, frequency_print = 5)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we graph the reference data" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "x = np.linspace(0, 5, 256)\n", + "y = np.linspace(0, 5, 256)\n", + "gridx, gridy = np.meshgrid(x, y)\n", + " \n", + "plt.pcolor(gridx,gridy,database[ref_data].snapshots.reshape(256, 256))\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we graph the interpolated data. it should be visisble that we get the same function, but with better resolution" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "res = 1000\n", + "x = np.linspace(0, 5, res)\n", + "y = np.linspace(0, 5, res)\n", + "gridx, gridy = np.meshgrid(x, y)\n", + "input = NNsPOD_tutorial.reshape2dto1d(gridx, gridy)\n", + "output = NNsPOD_tutorial.interp_net.predict(input)\n", + "\n", + "toshow = NNsPOD_tutorial.reshape1dto2d(output)\n", + "plt.pcolor(gridx,gridy,toshow)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we use the shiftnet and graph the shifted data. For each parameter we first graph the refrence data, then the input data, then it will take some time to shift it, and finally it will graph the shifted data. The loss value at every epoch will be printed out as well." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "i = 0\n", + "x = np.linspace(0, 5, 256)\n", + "y = np.linspace(0, 5, 256)\n", + "gridx, gridy = np.meshgrid(x, y)\n", + "while i < 10:\n", + " db = database[i]\n", + " plt.pcolor(gridx,gridy,database[ref_data].snapshots.reshape(256, 256))\n", + " plt.show()\n", + " plt.pcolor(gridx,gridy,database[i].snapshots.reshape(256, 256))\n", + " plt.show()\n", + " x_new = NNsPOD_tutorial.train_shiftnet(database[i], [20,20,20], nn.PReLU(), [0.001], database[ref_data], preshift = True, frequency_print = 5)\n", + " x, y = np.hsplit(x_new, 2)\n", + " x = NNsPOD_tutorial.reshape1dto2d(x)\n", + " y = NNsPOD_tutorial.reshape1dto2d(y)\n", + " snapshots = NNsPOD_tutorial.reshape1dto2d(db.snapshots.reshape(-1,1))\n", + " plt.pcolor(x,y,snapshots)\n", + " plt.show()\n", + " res = 256\n", + " i+=1\n", + " if i == ref_data:\n", + " i +=1\n", + "\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.10.4 64-bit", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.4" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "c3571620c9e7a2ef712c686809cf2d92a9d8fa44cb30698f5938f17078c44765" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}